lang
stringclasses
1 value
license
stringclasses
13 values
stderr
stringlengths
0
350
commit
stringlengths
40
40
returncode
int64
0
128
repos
stringlengths
7
45.1k
new_contents
stringlengths
0
1.87M
new_file
stringlengths
6
292
old_contents
stringlengths
0
1.87M
message
stringlengths
6
9.26k
old_file
stringlengths
6
292
subject
stringlengths
0
4.45k
Java
isc
093004d7ce644eb5d65f3858eafd78b1aa114c4b
0
steve-goldman/volksempfaenger
package net.x4a42.volksempfaenger.ui; import java.io.File; import java.util.ArrayList; import java.util.List; import net.x4a42.volksempfaenger.R; import net.x4a42.volksempfaenger.Utils; import net.x4a42.volksempfaenger.data.Columns.Enclosure; import net.x4a42.volksempfaenger.data.Columns.Episode; import net.x4a42.volksempfaenger.data.Constants; import net.x4a42.volksempfaenger.data.EpisodeCursor; import net.x4a42.volksempfaenger.data.VolksempfaengerContentProvider; import net.x4a42.volksempfaenger.net.DescriptionImageDownloader; import net.x4a42.volksempfaenger.service.DownloadService; import net.x4a42.volksempfaenger.service.PlaybackService; import android.app.ActionBar; import android.app.AlertDialog; import android.content.ContentUris; import android.content.ContentValues; import android.content.DialogInterface; import android.content.Intent; import android.database.Cursor; import android.graphics.Bitmap; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.Drawable; import android.net.Uri; import android.os.AsyncTask; import android.os.Build; import android.os.Bundle; import android.support.v4.app.FragmentActivity; import android.support.v4.app.LoaderManager; import android.support.v4.content.CursorLoader; import android.support.v4.content.Loader; import android.text.Html; import android.text.Spannable; import android.text.SpannableStringBuilder; import android.text.Spanned; import android.text.method.LinkMovementMethod; import android.text.style.CharacterStyle; import android.text.style.ImageSpan; import android.util.DisplayMetrics; import android.util.Log; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.widget.Button; import android.widget.TextView; import android.widget.Toast; public class ViewEpisodeActivity extends FragmentActivity implements LoaderManager.LoaderCallbacks<Cursor> { private static final String TAG = "ViewEpisodeActivity"; private static final String WHERE_EPISODE_ID = Enclosure.EPISODE_ID + "=?"; private Uri uri; private long id; private EpisodeCursor episodeCursor; private Bitmap podcastLogoBitmap; private Button playButton; private PodcastLogoView podcastLogo; private TextView podcastTitle; private TextView podcastDescription; private TextView episodeTitle; private TextView episodeDescription; private View contentContainer; private AsyncTask<Void, ImageSpan, Void> lastImageLoadTask; private SpannableStringBuilder descriptionSpanned; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.view_episode); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) { ActionBar actionBar = getActionBar(); actionBar.setDisplayHomeAsUpEnabled(true); } playButton = (Button) findViewById(R.id.play); podcastLogo = (PodcastLogoView) findViewById(R.id.logo); podcastTitle = (TextView) findViewById(R.id.podcast_title); podcastDescription = (TextView) findViewById(R.id.podcast_description); episodeTitle = (TextView) findViewById(R.id.episode_title); episodeDescription = (TextView) findViewById(R.id.episode_description); contentContainer = findViewById(R.id.contentContainer); episodeDescription.setMovementMethod(LinkMovementMethod.getInstance()); Intent intent = new Intent(this, PlaybackService.class); startService(intent); onNewIntent(getIntent()); } @Override protected void onNewIntent(Intent intent) { super.onNewIntent(intent); setIntent(intent); uri = intent.getData(); if (uri == null) { id = intent.getLongExtra("id", -1); if (id == -1) { finish(); return; } uri = ContentUris.withAppendedId( VolksempfaengerContentProvider.EPISODE_URI, id); } else { id = ContentUris.parseId(uri); } LoaderManager lm = getSupportLoaderManager(); if (lm.getLoader(0) == null) { lm.initLoader(0, null, this); } else { lm.restartLoader(0, null, this); } } @Override protected void onDestroy() { super.onDestroy(); if (podcastLogoBitmap != null) { podcastLogoBitmap.recycle(); } } @Override public boolean onCreateOptionsMenu(Menu menu) { MenuInflater inflater = getMenuInflater(); inflater.inflate(R.menu.view_episode, menu); ActivityHelper.addGlobalMenu(this, menu); return true; } List<EnclosureSimple> enclosures; @Override public boolean onOptionsItemSelected(MenuItem item) { ContentValues values = new ContentValues(); switch (item.getItemId()) { case android.R.id.home: finish(); return true; case R.id.item_download: if (episodeCursor.getEnclosureId() != 0) { // there is an preferred enclosure downloadEnclosure(); } else { enclosures = getEnclosures(); switch (enclosures.size()) { case 0: // no enclosures Toast.makeText(this, R.string.message_episode_without_enclosure, Toast.LENGTH_SHORT).show(); break; case 1: // exactly one enclosure downloadEnclosure(enclosures.get(0).id); break; default: // multiple enclosures (they suck) AlertDialog dialog = getEnclosureChooserDialog( getString(R.string.dialog_choose_download_enclosure), enclosures, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { downloadEnclosure(enclosures.get(which).id); } }); dialog.show(); break; } } return true; case R.id.item_mark_listened: values.put(Episode.STATUS, Constants.EPISODE_STATE_LISTENED); getContentResolver().update( ContentUris.withAppendedId( VolksempfaengerContentProvider.EPISODE_URI, id), values, null, null); return true; case R.id.item_delete: // TODO: confirmation dialog, AsyncTask Uri uri = episodeCursor.getDownloadUri(); if (uri != null) { File file = new File(uri.getPath()); if (file != null && file.isFile()) { file.delete(); } } values.put(Episode.DOWNLOAD_ID, 0); values.put(Episode.STATUS, Constants.EPISODE_STATE_LISTENED); getContentResolver().update( ContentUris.withAppendedId( VolksempfaengerContentProvider.EPISODE_URI, episodeCursor.getId()), values, null, null); // TODO remove from DownloadManager return true; default: return ActivityHelper.handleGlobalMenu(this, item); } } private void downloadEnclosure(long... v) { if (v == null || v.length == 0) { v = new long[] { episodeCursor.getEnclosureId() }; } else if (v.length == 1) { ContentValues values = new ContentValues(); values.put(Episode.ENCLOSURE_ID, v[0]); getContentResolver().update( ContentUris.withAppendedId( VolksempfaengerContentProvider.EPISODE_URI, episodeCursor.getId()), values, null, null); } Intent intent = new Intent(this, DownloadService.class); intent.putExtra("id", new long[] { episodeCursor.getId() }); startService(intent); // the service will send a Toast as user feedback } private class EnclosureSimple { public long id; public String url; } private List<EnclosureSimple> getEnclosures() { Cursor cursor; { String[] projection = { Enclosure._ID, Enclosure.URL }; cursor = getContentResolver() .query(VolksempfaengerContentProvider.ENCLOSURE_URI, projection, WHERE_EPISODE_ID, new String[] { String.valueOf(id) }, null); } List<EnclosureSimple> enclosures = new ArrayList<EnclosureSimple>(); while (cursor.moveToNext()) { EnclosureSimple enclosure = new EnclosureSimple(); enclosure.id = cursor.getLong(cursor.getColumnIndex(Enclosure._ID)); enclosure.url = cursor.getString(cursor .getColumnIndex(Enclosure.URL)); enclosures.add(enclosure); } cursor.close(); return enclosures; } private AlertDialog getEnclosureChooserDialog(String title, List<EnclosureSimple> enclosures, DialogInterface.OnClickListener listener) { AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setTitle(title); CharSequence items[] = new String[enclosures.size()]; for (int i = 0; i < enclosures.size(); i++) { items[i] = enclosures.get(i).url; } builder.setItems(items, listener); return builder.create(); } private class ImageLoadTask extends AsyncTask<Void, ImageSpan, Void> { private DescriptionImageDownloader imageDownloader; private int viewWidth; DisplayMetrics metrics = new DisplayMetrics(); @Override protected void onPreExecute() { imageDownloader = new DescriptionImageDownloader( ViewEpisodeActivity.this); getWindowManager().getDefaultDisplay().getMetrics(metrics); viewWidth = contentContainer.getMeasuredWidth(); } @Override protected Void doInBackground(Void... params) { for (ImageSpan img : descriptionSpanned.getSpans(0, descriptionSpanned.length(), ImageSpan.class)) { if (isCancelled()) { return null; } if (!getImageFile(img).isFile()) { try { imageDownloader.fetchImage(img.getSource()); } catch (Exception e) { // Who cares? Log.d(getClass().getSimpleName(), "Exception handled", e); } } if (isCancelled()) { return null; } publishProgress(img); } return null; } @Override protected void onProgressUpdate(ImageSpan... values) { ImageSpan img = values[0]; File cache = getImageFile(img); String src = img.getSource(); if (cache.isFile()) { Drawable d = new BitmapDrawable(getResources(), cache.getAbsolutePath()); int width, height; int originalWidthScaled = (int) (d.getIntrinsicWidth() * metrics.density); int originalHeightScaled = (int) (d.getIntrinsicHeight() * metrics.density); if (originalWidthScaled > viewWidth) { height = d.getIntrinsicHeight() * viewWidth / d.getIntrinsicWidth(); width = viewWidth; } else { height = originalHeightScaled; width = originalWidthScaled; } d.setBounds(0, 0, width, height); ImageSpan newImg = new ImageSpan(d, src); int start = descriptionSpanned.getSpanStart(img); int end = descriptionSpanned.getSpanEnd(img); if (start == -1 || end == -1) { return; } descriptionSpanned.removeSpan(img); descriptionSpanned.setSpan(newImg, start, end, Spannable.SPAN_EXCLUSIVE_EXCLUSIVE); // explicitly update description episodeDescription.setText(descriptionSpanned); } } @Override protected void onPostExecute(Void result) { lastImageLoadTask = null; } @Override protected void onCancelled(Void result) { lastImageLoadTask = null; } private File getImageFile(ImageSpan img) { return getImageFile(img.getSource()); } private File getImageFile(String url) { return Utils.getDescriptionImageFile(ViewEpisodeActivity.this, url); } } @Override public Loader<Cursor> onCreateLoader(int id, Bundle args) { String[] projection = { Episode._ID, Episode.TITLE, Episode.DESCRIPTION, Episode.STATUS, Episode.DATE, Episode.DURATION_TOTAL, Episode.DURATION_LISTENED, Episode.PODCAST_ID, Episode.PODCAST_TITLE, Episode.PODCAST_DESCRIPTION, Episode.DOWNLOAD_ID, Episode.DOWNLOAD_DONE, Episode.DOWNLOAD_URI, Episode.DOWNLOAD_STATUS, Episode.DOWNLOAD_TOTAL, Episode.ENCLOSURE_ID }; return new CursorLoader(this, uri, projection, null, null, null); } @Override public void onLoadFinished(Loader<Cursor> loader, Cursor cursor) { episodeCursor = new EpisodeCursor(cursor); if (!episodeCursor.moveToFirst()) { // the episode does not exist (any more) finish(); return; } onEpisodeCursorChanged(); } @Override public void onLoaderReset(Loader<Cursor> loader) { episodeCursor = null; onEpisodeCursorChanged(); } public void onEpisodeCursorChanged() { if (episodeCursor == null) return; setTitle(episodeCursor.getPodcastTitle()); playButton.setText("Play"); // TODO resource and toggle podcastTitle.setText(episodeCursor.getPodcastTitle()); podcastLogo.setPodcastId(episodeCursor.getPodcastId()); podcastDescription.setText(episodeCursor.getPodcastDescription()); episodeTitle.setText(episodeCursor.getTitle()); if (lastImageLoadTask != null) { lastImageLoadTask.cancel(true); } Spanned s = Html.fromHtml(episodeCursor.getDescription()); descriptionSpanned = s instanceof SpannableStringBuilder ? (SpannableStringBuilder) s : new SpannableStringBuilder(s); if (descriptionSpanned.getSpans(0, descriptionSpanned.length(), CharacterStyle.class).length == 0) { // use the normal text as there is no html episodeDescription.setText(episodeCursor.getDescription()); } else { episodeDescription.setText(descriptionSpanned); lastImageLoadTask = new ImageLoadTask().execute(); } } public void onClickPlay(View v) { Intent intent = new Intent(this, PlaybackService.class); intent.setAction(PlaybackService.ACTION_PLAY); intent.setData(uri); startService(intent); } public Uri getUri() { return uri; } }
src/net/x4a42/volksempfaenger/ui/ViewEpisodeActivity.java
package net.x4a42.volksempfaenger.ui; import java.io.File; import java.util.ArrayList; import java.util.List; import net.x4a42.volksempfaenger.R; import net.x4a42.volksempfaenger.Utils; import net.x4a42.volksempfaenger.data.Columns.Enclosure; import net.x4a42.volksempfaenger.data.Columns.Episode; import net.x4a42.volksempfaenger.data.Constants; import net.x4a42.volksempfaenger.data.EpisodeCursor; import net.x4a42.volksempfaenger.data.VolksempfaengerContentProvider; import net.x4a42.volksempfaenger.net.DescriptionImageDownloader; import net.x4a42.volksempfaenger.service.DownloadService; import net.x4a42.volksempfaenger.service.PlaybackService; import android.app.ActionBar; import android.app.AlertDialog; import android.content.ContentUris; import android.content.ContentValues; import android.content.DialogInterface; import android.content.Intent; import android.database.Cursor; import android.graphics.Bitmap; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.Drawable; import android.net.Uri; import android.os.AsyncTask; import android.os.Build; import android.os.Bundle; import android.support.v4.app.FragmentActivity; import android.support.v4.app.LoaderManager; import android.support.v4.content.CursorLoader; import android.support.v4.content.Loader; import android.text.Html; import android.text.Spannable; import android.text.SpannableStringBuilder; import android.text.Spanned; import android.text.method.LinkMovementMethod; import android.text.style.CharacterStyle; import android.text.style.ImageSpan; import android.util.DisplayMetrics; import android.util.Log; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.widget.Button; import android.widget.TextView; import android.widget.Toast; public class ViewEpisodeActivity extends FragmentActivity implements LoaderManager.LoaderCallbacks<Cursor> { private static final String TAG = "ViewEpisodeActivity"; private static final String WHERE_EPISODE_ID = Enclosure.EPISODE_ID + "=?"; private Uri uri; private long id; private EpisodeCursor episodeCursor; private Bitmap podcastLogoBitmap; private Button playButton; private PodcastLogoView podcastLogo; private TextView podcastTitle; private TextView podcastDescription; private TextView episodeTitle; private TextView episodeDescription; private View contentContainer; private SpannableStringBuilder descriptionSpanned; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.view_episode); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) { ActionBar actionBar = getActionBar(); actionBar.setDisplayHomeAsUpEnabled(true); } playButton = (Button) findViewById(R.id.play); podcastLogo = (PodcastLogoView) findViewById(R.id.logo); podcastTitle = (TextView) findViewById(R.id.podcast_title); podcastDescription = (TextView) findViewById(R.id.podcast_description); episodeTitle = (TextView) findViewById(R.id.episode_title); episodeDescription = (TextView) findViewById(R.id.episode_description); contentContainer = findViewById(R.id.contentContainer); episodeDescription.setMovementMethod(LinkMovementMethod.getInstance()); Intent intent = new Intent(this, PlaybackService.class); startService(intent); onNewIntent(getIntent()); getSupportLoaderManager().initLoader(0, null, this); } @Override protected void onNewIntent(Intent intent) { super.onNewIntent(intent); setIntent(intent); uri = intent.getData(); if (uri == null) { id = intent.getLongExtra("id", -1); if (id == -1) { finish(); return; } uri = ContentUris.withAppendedId( VolksempfaengerContentProvider.EPISODE_URI, id); } else { id = ContentUris.parseId(uri); } } @Override protected void onDestroy() { super.onDestroy(); if (podcastLogoBitmap != null) { podcastLogoBitmap.recycle(); } } @Override public boolean onCreateOptionsMenu(Menu menu) { MenuInflater inflater = getMenuInflater(); inflater.inflate(R.menu.view_episode, menu); ActivityHelper.addGlobalMenu(this, menu); return true; } List<EnclosureSimple> enclosures; @Override public boolean onOptionsItemSelected(MenuItem item) { ContentValues values = new ContentValues(); switch (item.getItemId()) { case android.R.id.home: finish(); return true; case R.id.item_download: if (episodeCursor.getEnclosureId() != 0) { // there is an preferred enclosure downloadEnclosure(); } else { enclosures = getEnclosures(); switch (enclosures.size()) { case 0: // no enclosures Toast.makeText(this, R.string.message_episode_without_enclosure, Toast.LENGTH_SHORT).show(); break; case 1: // exactly one enclosure downloadEnclosure(enclosures.get(0).id); break; default: // multiple enclosures (they suck) AlertDialog dialog = getEnclosureChooserDialog( getString(R.string.dialog_choose_download_enclosure), enclosures, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { downloadEnclosure(enclosures.get(which).id); } }); dialog.show(); break; } } return true; case R.id.item_mark_listened: values.put(Episode.STATUS, Constants.EPISODE_STATE_LISTENED); getContentResolver().update( ContentUris.withAppendedId( VolksempfaengerContentProvider.EPISODE_URI, id), values, null, null); return true; case R.id.item_delete: // TODO: confirmation dialog, AsyncTask Uri uri = episodeCursor.getDownloadUri(); if (uri != null) { File file = new File(uri.getPath()); if (file != null && file.isFile()) { file.delete(); } } values.put(Episode.DOWNLOAD_ID, 0); values.put(Episode.STATUS, Constants.EPISODE_STATE_LISTENED); getContentResolver().update( ContentUris.withAppendedId( VolksempfaengerContentProvider.EPISODE_URI, episodeCursor.getId()), values, null, null); // TODO remove from DownloadManager return true; default: return ActivityHelper.handleGlobalMenu(this, item); } } private void downloadEnclosure(long... v) { if (v == null || v.length == 0) { v = new long[] { episodeCursor.getEnclosureId() }; } else if (v.length == 1) { ContentValues values = new ContentValues(); values.put(Episode.ENCLOSURE_ID, v[0]); getContentResolver().update( ContentUris.withAppendedId( VolksempfaengerContentProvider.EPISODE_URI, episodeCursor.getId()), values, null, null); } Intent intent = new Intent(this, DownloadService.class); intent.putExtra("id", new long[] { episodeCursor.getId() }); startService(intent); // the service will send a Toast as user feedback } private class EnclosureSimple { public long id; public String url; } private List<EnclosureSimple> getEnclosures() { Cursor cursor; { String[] projection = { Enclosure._ID, Enclosure.URL }; cursor = getContentResolver() .query(VolksempfaengerContentProvider.ENCLOSURE_URI, projection, WHERE_EPISODE_ID, new String[] { String.valueOf(id) }, null); } List<EnclosureSimple> enclosures = new ArrayList<EnclosureSimple>(); while (cursor.moveToNext()) { EnclosureSimple enclosure = new EnclosureSimple(); enclosure.id = cursor.getLong(cursor.getColumnIndex(Enclosure._ID)); enclosure.url = cursor.getString(cursor .getColumnIndex(Enclosure.URL)); enclosures.add(enclosure); } cursor.close(); return enclosures; } private AlertDialog getEnclosureChooserDialog(String title, List<EnclosureSimple> enclosures, DialogInterface.OnClickListener listener) { AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setTitle(title); CharSequence items[] = new String[enclosures.size()]; for (int i = 0; i < enclosures.size(); i++) { items[i] = enclosures.get(i).url; } builder.setItems(items, listener); return builder.create(); } private class ImageLoadTask extends AsyncTask<Void, ImageSpan, Void> { private DescriptionImageDownloader imageDownloader; private int viewWidth; DisplayMetrics metrics = new DisplayMetrics(); @Override protected void onPreExecute() { imageDownloader = new DescriptionImageDownloader( ViewEpisodeActivity.this); getWindowManager().getDefaultDisplay().getMetrics(metrics); viewWidth = contentContainer.getMeasuredWidth(); } @Override protected Void doInBackground(Void... params) { for (ImageSpan img : descriptionSpanned.getSpans(0, descriptionSpanned.length(), ImageSpan.class)) { if (!getImageFile(img).isFile()) { try { imageDownloader.fetchImage(img.getSource()); } catch (Exception e) { // Who cares? Log.d(getClass().getSimpleName(), "Exception handled", e); } } publishProgress(img); } return null; } @Override protected void onProgressUpdate(ImageSpan... values) { ImageSpan img = values[0]; File cache = getImageFile(img); String src = img.getSource(); if (cache.isFile()) { Drawable d = new BitmapDrawable(getResources(), cache.getAbsolutePath()); int width, height; int originalWidthScaled = (int) (d.getIntrinsicWidth() * metrics.density); int originalHeightScaled = (int) (d.getIntrinsicHeight() * metrics.density); if (originalWidthScaled > viewWidth) { height = d.getIntrinsicHeight() * viewWidth / d.getIntrinsicWidth(); width = viewWidth; } else { height = originalHeightScaled; width = originalWidthScaled; } d.setBounds(0, 0, width, height); ImageSpan newImg = new ImageSpan(d, src); int start = descriptionSpanned.getSpanStart(img); int end = descriptionSpanned.getSpanEnd(img); descriptionSpanned.removeSpan(img); descriptionSpanned.setSpan(newImg, start, end, Spannable.SPAN_EXCLUSIVE_EXCLUSIVE); // explicitly update description episodeDescription.setText(descriptionSpanned); } } private File getImageFile(ImageSpan img) { return getImageFile(img.getSource()); } private File getImageFile(String url) { return Utils.getDescriptionImageFile(ViewEpisodeActivity.this, url); } } @Override public Loader<Cursor> onCreateLoader(int id, Bundle args) { String[] projection = { Episode._ID, Episode.TITLE, Episode.DESCRIPTION, Episode.STATUS, Episode.DATE, Episode.DURATION_TOTAL, Episode.DURATION_LISTENED, Episode.PODCAST_ID, Episode.PODCAST_TITLE, Episode.PODCAST_DESCRIPTION, Episode.DOWNLOAD_ID, Episode.DOWNLOAD_DONE, Episode.DOWNLOAD_URI, Episode.DOWNLOAD_STATUS, Episode.DOWNLOAD_TOTAL, Episode.ENCLOSURE_ID }; return new CursorLoader(this, uri, projection, null, null, null); } @Override public void onLoadFinished(Loader<Cursor> loader, Cursor cursor) { episodeCursor = new EpisodeCursor(cursor); if (!episodeCursor.moveToFirst()) { // the episode does not exist (any more) finish(); return; } onEpisodeCursorChanged(); } @Override public void onLoaderReset(Loader<Cursor> loader) { episodeCursor = null; onEpisodeCursorChanged(); } public void onEpisodeCursorChanged() { if (episodeCursor == null) return; setTitle(episodeCursor.getPodcastTitle()); playButton.setText("Play"); // TODO resource and toggle podcastTitle.setText(episodeCursor.getPodcastTitle()); podcastLogo.setPodcastId(episodeCursor.getPodcastId()); podcastDescription.setText(episodeCursor.getPodcastDescription()); episodeTitle.setText(episodeCursor.getTitle()); Spanned s = Html.fromHtml(episodeCursor.getDescription()); descriptionSpanned = s instanceof SpannableStringBuilder ? (SpannableStringBuilder) s : new SpannableStringBuilder(s); if (descriptionSpanned.getSpans(0, descriptionSpanned.length(), CharacterStyle.class).length == 0) { // use the normal text as there is no html episodeDescription.setText(episodeCursor.getDescription()); } else { episodeDescription.setText(descriptionSpanned); new ImageLoadTask().execute(); } } public void onClickPlay(View v) { Intent intent = new Intent(this, PlaybackService.class); intent.setAction(PlaybackService.ACTION_PLAY); intent.setData(uri); startService(intent); } public Uri getUri() { return uri; } }
Properly handle new Intents in ViewEpisodeActivity and cancel old ImageLoadTasks.
src/net/x4a42/volksempfaenger/ui/ViewEpisodeActivity.java
Properly handle new Intents in ViewEpisodeActivity and cancel old ImageLoadTasks.
Java
mit
038b196903d3614a33b26f432d440d2bf123e849
0
jklingsporn/vertx-jooq
package io.github.jklingsporn.vertx.jooq.shared.reactive; import io.github.jklingsporn.vertx.jooq.shared.internal.AbstractQueryExecutor; import io.vertx.core.buffer.Buffer; import io.vertx.core.logging.Logger; import io.vertx.core.logging.LoggerFactory; import io.vertx.sqlclient.Tuple; import io.vertx.sqlclient.impl.ArrayTuple; import org.jooq.Configuration; import org.jooq.Param; import org.jooq.Query; import org.jooq.SQLDialect; import org.jooq.conf.ParamType; /** * @author jensklingsporn */ public abstract class AbstractReactiveQueryExecutor extends AbstractQueryExecutor { private static final Logger logger = LoggerFactory.getLogger(AbstractReactiveQueryExecutor.class); /** * Replace ':' but not '::' */ private static final String pattern = "(?<!:):(?!:)"; protected AbstractReactiveQueryExecutor(Configuration configuration) { super(configuration); } protected Tuple getBindValues(Query query) { ArrayTuple bindValues = new ArrayTuple(query.getParams().size()); for (Param<?> param : query.getParams().values()) { if (!param.isInline()) { Object value = convertToDatabaseType(param); bindValues.add(value); } } return bindValues; } protected <U> Object convertToDatabaseType(Param<U> param) { /* * https://github.com/reactiverse/reactive-pg-client/issues/191 enum types are treated as unknown * DataTypes. Workaround is to convert them to string before adding to the Tuple. */ if (Enum.class.isAssignableFrom(param.getBinding().converter().toType())) { if (param.getValue() == null) { return null; } return param.getValue().toString(); } if (byte[].class.isAssignableFrom(param.getBinding().converter().fromType())) { // jooq treats BINARY types as byte[] but the reactive client expects a Buffer to write to blobs byte[] bytes = (byte[]) param.getBinding().converter().to(param.getValue()); if (bytes == null) { return null; } return Buffer.buffer(bytes); } return param.getBinding().converter().to(param.getValue()); } protected void log(Query query) { if (logger.isDebugEnabled()) { logger.debug("Executing {}", query.getSQL(ParamType.INLINED)); } } protected String toPreparedQuery(Query query) { if (SQLDialect.POSTGRES.supports(configuration().dialect())) { String namedQuery = query.getSQL(ParamType.NAMED); return namedQuery.replaceAll(pattern, "\\$"); } // mysql works with the standard string return query.getSQL(); } }
vertx-jooq-shared-reactive/src/main/java/io/github/jklingsporn/vertx/jooq/shared/reactive/AbstractReactiveQueryExecutor.java
package io.github.jklingsporn.vertx.jooq.shared.reactive; import io.github.jklingsporn.vertx.jooq.shared.internal.AbstractQueryExecutor; import io.vertx.core.buffer.Buffer; import io.vertx.core.logging.Logger; import io.vertx.core.logging.LoggerFactory; import io.vertx.sqlclient.Tuple; import io.vertx.sqlclient.impl.ArrayTuple; import org.jooq.Configuration; import org.jooq.Param; import org.jooq.Query; import org.jooq.SQLDialect; import org.jooq.conf.ParamType; /** * @author jensklingsporn */ public abstract class AbstractReactiveQueryExecutor extends AbstractQueryExecutor { private static final Logger logger = LoggerFactory.getLogger(AbstractReactiveQueryExecutor.class); /** * Replace ':' but not '::' */ private static final String pattern = "(?<!:):(?!:)"; protected AbstractReactiveQueryExecutor(Configuration configuration) { super(configuration); } protected Tuple getBindValues(Query query) { ArrayTuple bindValues = new ArrayTuple(query.getParams().size()); for (Param<?> param : query.getParams().values()) { if (!param.isInline()) { Object value = convertToDatabaseType(param); bindValues.add(value); } } return bindValues; } protected <U> Object convertToDatabaseType(Param<U> param) { /* * https://github.com/reactiverse/reactive-pg-client/issues/191 enum types are treated as unknown * DataTypes. Workaround is to convert them to string before adding to the Tuple. */ if (Enum.class.isAssignableFrom(param.getBinding().converter().toType())) { return param.getValue().toString(); } if (byte[].class.isAssignableFrom(param.getBinding().converter().fromType())) { // jooq treats BINARY types as byte[] but the reactive client expects a Buffer to write to blobs byte[] bytes = (byte[]) param.getBinding().converter().to(param.getValue()); if (bytes == null) { return null; } return Buffer.buffer(bytes); } return param.getBinding().converter().to(param.getValue()); } protected void log(Query query) { if (logger.isDebugEnabled()) { logger.debug("Executing {}", query.getSQL(ParamType.INLINED)); } } protected String toPreparedQuery(Query query) { if (SQLDialect.POSTGRES.supports(configuration().dialect())) { String namedQuery = query.getSQL(ParamType.NAMED); return namedQuery.replaceAll(pattern, "\\$"); } // mysql works with the standard string return query.getSQL(); } }
fix(enum): Fix NPE if we try to parse an enum with a null value
vertx-jooq-shared-reactive/src/main/java/io/github/jklingsporn/vertx/jooq/shared/reactive/AbstractReactiveQueryExecutor.java
fix(enum): Fix NPE if we try to parse an enum with a null value
Java
mit
8b42095d848258af5c364c656bdece5d5e5f1ad9
0
980f/ezjava,980f/ezjava
package pers.hal42.lang; import pers.hal42.logging.ErrorLogStream; import java.text.MessageFormat; import java.util.Iterator; import java.util.Map; import java.util.function.Predicate; /** * Created by andyh on 7/19/17. * * generates a map which is a description of the difference of two maps. */ public class MapX<K, V> { private static ErrorLogStream dbg = ErrorLogStream.getForClass(MapX.class); /** * remove diff records where 'thatValue' is null and 'thisValue' does NOT meet criterion @param keep */ public static <K, V> int DropNullIfNot(Map<K, Diff<V>> diffs, Predicate<V> keep) { int startingSize = diffs.size(); Iterator<K> it = diffs.keySet().iterator(); while (it.hasNext()) { K k = it.next(); Diff<V> dif = diffs.get(k); if ((dif.thatValue == null) && !keep.test(dif.thisValue)) { //todo:1 add complementary case wrt who is null it.remove(); } } return startingSize - diffs.size(); } @SuppressWarnings("unchecked") public static <K, V> Map<K, Diff<V>> Diff(Map<K, V> thiss, Map<K, V> that) { try { Map<K, Diff<V>> diffs = thiss.getClass().newInstance(); thiss.forEach((k, v) -> { V probate = that.get(k); if (probate != null ? !probate.equals(v):v !=null) { diffs.put(k, new Diff(v,probate)); } }); that.forEach((k, v)->{ if(!thiss.containsKey(k)){ diffs.put(k, new Diff(null,v)); } }); return diffs; } catch (InstantiationException | IllegalAccessException e) { dbg.Caught(e,"Diffing maps {0}-{1}",thiss,that); return null; } } public static class Diff<V> { public V thisValue; public V thatValue; public Diff(V thiss, V that) { thisValue = thiss; thatValue = that; } @Override public String toString() { if (thisValue == null) { return "New:" + thatValue; } else if (thatValue == null) { return "Lost:" + thisValue; } else { return MessageFormat.format("Change:{0}->{1}", thisValue, thatValue); } } } }
src/pers/hal42/lang/MapX.java
package pers.hal42.lang; import pers.hal42.logging.ErrorLogStream; import java.util.Map; /** * Created by andyh on 7/19/17. * * generates a map which is a description of the difference of two maps. */ public class MapX<K, V> { private static ErrorLogStream dbg = ErrorLogStream.getForClass(MapX.class); public static class Diff<V> { public V thisValue; public V thatValue; public Diff(V thiss,V that){ thisValue=thiss; thatValue=that; } } @SuppressWarnings("unchecked") public static <K, V> Map<K, Diff<V>> Diff(Map<K, V> thiss, Map<K, V> that) { try { Map<K, Diff<V>> diffs = thiss.getClass().newInstance(); thiss.forEach((k, v) -> { V probate = that.get(k); if (probate != null ? !probate.equals(v):v !=null) { diffs.put(k, new Diff(v,probate)); } }); that.forEach((k,v)->{ if(!thiss.containsKey(k)){ diffs.put(k, new Diff(null,v)); } }); return diffs; } catch (InstantiationException | IllegalAccessException e) { dbg.Caught(e,"Diffing maps {0}-{1}",thiss,that); return null; } } }
database diff seems to be running well.
src/pers/hal42/lang/MapX.java
database diff seems to be running well.
Java
mit
49e7ad174a50db8537b97799abadd5936d2b59be
0
ixfan/wechat-kit
/* * MIT License * * Copyright (c) 2016 Warren Fan * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package me.ixfan.wechatkit.message.out.json; import me.ixfan.wechatkit.message.out.OutMessageType; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import java.util.Arrays; import static org.junit.Assert.assertEquals; /** * @author Warren Fan */ @RunWith(JUnit4.class) public class MassMessageJsonSerializeTest { private final String NEWS_TO_TAG = "{\"filter\":{\"is_to_all\":false,\"tag_id\":2},\"msgtype\":\"mpnews\",\"mpnews\":{\"media_id\":\"123dsdajkasd231jhksad\"},\"send_ignore_reprint\":0}"; private final String TEXT_TO_Tag = "{\"filter\":{\"is_to_all\":false,\"tag_id\":2},\"msgtype\":\"text\",\"text\":{\"content\":\"CONTENT\"}}"; private final String IMG_TO_TAG = "{\"filter\":{\"is_to_all\":false,\"tag_id\":2},\"msgtype\":\"image\",\"image\":{\"media_id\":\"123dsdajkasd231jhksad\"}}"; private final String VOICE_TO_TAG = "{\"filter\":{\"is_to_all\":false,\"tag_id\":2},\"msgtype\":\"voice\",\"voice\":{\"media_id\":\"123dsdajkasd231jhksad\"}}"; private final String VIDEO_TO_TAG = "{\"filter\":{\"is_to_all\":false,\"tag_id\":2},\"msgtype\":\"mpvideo\",\"mpvideo\":{\"media_id\":\"IhdaAQXuvJtGzwwc0abfXnzeezfO0NgPK6AQYShD8RQYMTtfzbLdBIQkQziv2XJc\"}}"; private final String CARD_TO_TAG = "{\"filter\":{\"is_to_all\":false,\"tag_id\":\"t2\"},\"msgtype\":\"wxcard\",\"wxcard\":{\"card_id\":\"123dsdajkasd231jhksad\"}}"; private final String NEWS_TO_USERS = "{\"touser\":[\"OPENID1\",\"OPENID2\"],\"msgtype\":\"mpnews\",\"mpnews\":{\"media_id\":\"123dsdajkasd231jhksad\"},\"send_ignore_reprint\":0}"; private final String TEXT_TO_USERS = "{\"touser\":[\"OPENID1\",\"OPENID2\"],\"msgtype\":\"text\",\"text\":{\"content\":\"hello from boxer.\"}}"; private final String IMG_TO_USERS = "{\"touser\":[\"OPENID1\",\"OPENID2\"],\"msgtype\":\"image\",\"image\":{\"media_id\":\"BTgN0opcW3Y5zV_ZebbsD3NFKRWf6cb7OPswPi9Q83fOJHK2P67dzxn11Cp7THat\"}}"; private final String VOICE_TO_USERS = "{\"touser\":[\"OPENID1\",\"OPENID2\"],\"msgtype\":\"voice\",\"voice\":{\"media_id\":\"mLxl6paC7z2Tl-NJT64yzJve8T9c8u9K2x-Ai6Ujd4lIH9IBuF6-2r66mamn_gIT\"}}"; private final String VIDEO_TO_USERS = "{\"touser\":[\"OPENID1\",\"OPENID2\"],\"msgtype\":\"video\",\"video\":{\"media_id\":\"123dsdajkasd231jhksad\",\"title\":\"TITLE\",\"description\":\"DESCRIPTION\"}}"; private final String CARD_TO_USERS = "{\"touser\":[\"OPENID1\",\"OPENID2\"],\"msgtype\":\"wxcard\",\"wxcard\":{\"card_id\":\"123dsdajkasd231jhksad\"}}"; @Test public void successfullySerializeMassTextMsgToTag() { MessageForMassSend msg = new MessageForMassSend(OutMessageType.TEXT, "CONTENT", "2", false); assertEquals(TEXT_TO_Tag, msg.toJsonString()); } @Test public void successfullySerializeMassNewsMsgToTag() { MessageForMassSend msg = new MessageForMassSend(OutMessageType.MP_NEWS, "123dsdajkasd231jhksad", "2", false); msg.setSendIgnoreReprint(0); assertEquals(NEWS_TO_TAG, msg.toJsonString()); } @Test public void successfullySerializeMassImgMsgToTag() { MessageForMassSend msg = new MessageForMassSend(OutMessageType.IMAGE, "123dsdajkasd231jhksad", "2", false); assertEquals(IMG_TO_TAG, msg.toJsonString()); } @Test public void successfullySerializeMassVoiceMsgToTag() { MessageForMassSend msg = new MessageForMassSend(OutMessageType.VOICE, "123dsdajkasd231jhksad", "2", false); assertEquals(VOICE_TO_TAG, msg.toJsonString()); } @Test public void successfullySerializeMassVideoMsgToTag() { MessageForMassSend msg = new MessageForMassSend(OutMessageType.MP_VIDEO, "IhdaAQXuvJtGzwwc0abfXnzeezfO0NgPK6AQYShD8RQYMTtfzbLdBIQkQziv2XJc", "2", false); assertEquals(VIDEO_TO_TAG, msg.toJsonString()); } @Test public void successfullySerializeMassCardMsgToTag() { MessageForMassSend msg = new MessageForMassSend(OutMessageType.WX_CARD, "123dsdajkasd231jhksad", "t2", false); assertEquals(CARD_TO_TAG, msg.toJsonString()); } @Test public void successfullySerializeMassTextMsgToUsers() { MessageForMassSend msg = new MessageForMassSend(OutMessageType.TEXT, "hello from boxer.", Arrays.asList("OPENID1", "OPENID2")); assertEquals(TEXT_TO_USERS, msg.toJsonString()); } @Test public void successfullySerializeMassNewsMsgToUsers() { MessageForMassSend msg = new MessageForMassSend(OutMessageType.MP_NEWS, "123dsdajkasd231jhksad", Arrays.asList("OPENID1", "OPENID2")); msg.setSendIgnoreReprint(0); assertEquals(NEWS_TO_USERS, msg.toJsonString()); } @Test public void successfullySerializeMassImgMsgToUsers() { MessageForMassSend msg = new MessageForMassSend(OutMessageType.IMAGE, "BTgN0opcW3Y5zV_ZebbsD3NFKRWf6cb7OPswPi9Q83fOJHK2P67dzxn11Cp7THat", Arrays.asList("OPENID1", "OPENID2")); assertEquals(IMG_TO_USERS, msg.toJsonString()); } @Test public void successfullySerializeMassVoiceMsgToUsers() { MessageForMassSend msg = new MessageForMassSend(OutMessageType.VOICE, "mLxl6paC7z2Tl-NJT64yzJve8T9c8u9K2x-Ai6Ujd4lIH9IBuF6-2r66mamn_gIT", Arrays.asList("OPENID1", "OPENID2")); assertEquals(VOICE_TO_USERS, msg.toJsonString()); } @Test public void successfullySerializeMassVideoMsgToUsers() { MessageForMassSend msg = new MessageForMassSend(OutMessageType.VIDEO, "123dsdajkasd231jhksad", Arrays.asList("OPENID1", "OPENID2")); msg.setTitle("TITLE"); msg.setDescription("DESCRIPTION"); assertEquals(VIDEO_TO_USERS, msg.toJsonString()); } @Test public void successfullySerializeMassCardMsgToUsers() { MessageForMassSend msg = new MessageForMassSend(OutMessageType.WX_CARD, "123dsdajkasd231jhksad", Arrays.asList("OPENID1", "OPENID2")); assertEquals(CARD_TO_USERS, msg.toJsonString()); } }
wechatkit/src/test/java/me/ixfan/wechatkit/message/out/json/MassMessageJsonSerializeTest.java
/* * MIT License * * Copyright (c) 2016 Warren Fan * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package me.ixfan.wechatkit.message.out.json; import me.ixfan.wechatkit.message.out.OutMessageType; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import java.util.Arrays; import static org.junit.Assert.assertEquals; /** * @author Warren Fan */ @RunWith(JUnit4.class) public class MassMessageJsonSerializeTest { private final String NEWS_TO_TAG = "{\"filter\":{\"is_to_all\":false,\"tag_id\":2},\"msgtype\":\"mpnews\",\"mpnews\":{\"media_id\":\"123dsdajkasd231jhksad\"}}"; private final String TEXT_TO_Tag = "{\"filter\":{\"is_to_all\":false,\"tag_id\":2},\"msgtype\":\"text\",\"text\":{\"content\":\"CONTENT\"}}"; private final String IMG_TO_TAG = "{\"filter\":{\"is_to_all\":false,\"tag_id\":2},\"msgtype\":\"image\",\"image\":{\"media_id\":\"123dsdajkasd231jhksad\"}}"; private final String VOICE_TO_TAG = "{\"filter\":{\"is_to_all\":false,\"tag_id\":2},\"msgtype\":\"voice\",\"voice\":{\"media_id\":\"123dsdajkasd231jhksad\"}}"; private final String VIDEO_TO_TAG = "{\"filter\":{\"is_to_all\":false,\"tag_id\":2},\"msgtype\":\"mpvideo\",\"mpvideo\":{\"media_id\":\"IhdaAQXuvJtGzwwc0abfXnzeezfO0NgPK6AQYShD8RQYMTtfzbLdBIQkQziv2XJc\"}}"; private final String CARD_TO_TAG = "{\"filter\":{\"is_to_all\":false,\"tag_id\":\"t2\"},\"msgtype\":\"wxcard\",\"wxcard\":{\"card_id\":\"123dsdajkasd231jhksad\"}}"; private final String NEWS_TO_USERS = "{\"touser\":[\"OPENID1\",\"OPENID2\"],\"msgtype\":\"mpnews\",\"mpnews\":{\"media_id\":\"123dsdajkasd231jhksad\"}}"; private final String TEXT_TO_USERS = "{\"touser\":[\"OPENID1\",\"OPENID2\"],\"msgtype\":\"text\",\"text\":{\"content\":\"hello from boxer.\"}}"; private final String IMG_TO_USERS = "{\"touser\":[\"OPENID1\",\"OPENID2\"],\"msgtype\":\"image\",\"image\":{\"media_id\":\"BTgN0opcW3Y5zV_ZebbsD3NFKRWf6cb7OPswPi9Q83fOJHK2P67dzxn11Cp7THat\"}}"; private final String VOICE_TO_USERS = "{\"touser\":[\"OPENID1\",\"OPENID2\"],\"msgtype\":\"voice\",\"voice\":{\"media_id\":\"mLxl6paC7z2Tl-NJT64yzJve8T9c8u9K2x-Ai6Ujd4lIH9IBuF6-2r66mamn_gIT\"}}"; private final String VIDEO_TO_USERS = "{\"touser\":[\"OPENID1\",\"OPENID2\"],\"msgtype\":\"video\",\"video\":{\"media_id\":\"123dsdajkasd231jhksad\",\"title\":\"TITLE\",\"description\":\"DESCRIPTION\"}}"; private final String CARD_TO_USERS = "{\"touser\":[\"OPENID1\",\"OPENID2\"],\"msgtype\":\"wxcard\",\"wxcard\":{\"card_id\":\"123dsdajkasd231jhksad\"}}"; @Test public void successfullySerializeMassTextMsgToTag() { MessageForMassSend msg = new MessageForMassSend(OutMessageType.TEXT, "CONTENT", "2", false); assertEquals(TEXT_TO_Tag, msg.toJsonString()); } @Test public void successfullySerializeMassNewsMsgToTag() { MessageForMassSend msg = new MessageForMassSend(OutMessageType.MP_NEWS, "123dsdajkasd231jhksad", "2", false); assertEquals(NEWS_TO_TAG, msg.toJsonString()); } @Test public void successfullySerializeMassImgMsgToTag() { MessageForMassSend msg = new MessageForMassSend(OutMessageType.IMAGE, "123dsdajkasd231jhksad", "2", false); assertEquals(IMG_TO_TAG, msg.toJsonString()); } @Test public void successfullySerializeMassVoiceMsgToTag() { MessageForMassSend msg = new MessageForMassSend(OutMessageType.VOICE, "123dsdajkasd231jhksad", "2", false); assertEquals(VOICE_TO_TAG, msg.toJsonString()); } @Test public void successfullySerializeMassVideoMsgToTag() { MessageForMassSend msg = new MessageForMassSend(OutMessageType.MP_VIDEO, "IhdaAQXuvJtGzwwc0abfXnzeezfO0NgPK6AQYShD8RQYMTtfzbLdBIQkQziv2XJc", "2", false); assertEquals(VIDEO_TO_TAG, msg.toJsonString()); } @Test public void successfullySerializeMassCardMsgToTag() { MessageForMassSend msg = new MessageForMassSend(OutMessageType.WX_CARD, "123dsdajkasd231jhksad", "t2", false); assertEquals(CARD_TO_TAG, msg.toJsonString()); } @Test public void successfullySerializeMassTextMsgToUsers() { MessageForMassSend msg = new MessageForMassSend(OutMessageType.TEXT, "hello from boxer.", Arrays.asList("OPENID1", "OPENID2")); assertEquals(TEXT_TO_USERS, msg.toJsonString()); } @Test public void successfullySerializeMassNewsMsgToUsers() { MessageForMassSend msg = new MessageForMassSend(OutMessageType.MP_NEWS, "123dsdajkasd231jhksad", Arrays.asList("OPENID1", "OPENID2")); assertEquals(NEWS_TO_USERS, msg.toJsonString()); } @Test public void successfullySerializeMassImgMsgToUsers() { MessageForMassSend msg = new MessageForMassSend(OutMessageType.IMAGE, "BTgN0opcW3Y5zV_ZebbsD3NFKRWf6cb7OPswPi9Q83fOJHK2P67dzxn11Cp7THat", Arrays.asList("OPENID1", "OPENID2")); assertEquals(IMG_TO_USERS, msg.toJsonString()); } @Test public void successfullySerializeMassVoiceMsgToUsers() { MessageForMassSend msg = new MessageForMassSend(OutMessageType.VOICE, "mLxl6paC7z2Tl-NJT64yzJve8T9c8u9K2x-Ai6Ujd4lIH9IBuF6-2r66mamn_gIT", Arrays.asList("OPENID1", "OPENID2")); assertEquals(VOICE_TO_USERS, msg.toJsonString()); } @Test public void successfullySerializeMassVideoMsgToUsers() { MessageForMassSend msg = new MessageForMassSend(OutMessageType.VIDEO, "123dsdajkasd231jhksad", Arrays.asList("OPENID1", "OPENID2")); msg.setTitle("TITLE"); msg.setDescription("DESCRIPTION"); assertEquals(VIDEO_TO_USERS, msg.toJsonString()); } @Test public void successfullySerializeMassCardMsgToUsers() { MessageForMassSend msg = new MessageForMassSend(OutMessageType.WX_CARD, "123dsdajkasd231jhksad", Arrays.asList("OPENID1", "OPENID2")); assertEquals(CARD_TO_USERS, msg.toJsonString()); } }
群分图文消息新增 send_ignore_reprint 参数,JSON 序列化测试
wechatkit/src/test/java/me/ixfan/wechatkit/message/out/json/MassMessageJsonSerializeTest.java
群分图文消息新增 send_ignore_reprint 参数,JSON 序列化测试
Java
mit
9cf7117ea0cc02f67770644e232db8947413f6e0
0
hsyyid/GriefPrevention,MinecraftPortCentral/GriefPrevention
/* GriefPrevention Server Plugin for Minecraft Copyright (C) 2012 Ryan Hamshire This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. */ package me.ryanhamshire.GriefPrevention; import java.util.Calendar; import java.util.Random; import java.util.Vector; import org.bukkit.Chunk; import org.bukkit.World; //FEATURE: automatically remove claims owned by inactive players which: //...aren't protecting much OR //...are a free new player claim (and the player has no other claims) OR //...because the player has been gone a REALLY long time, and that expiration has been configured in config.yml //runs every 1 minute in the main thread class CleanupUnusedClaimsTask implements Runnable { int nextClaimIndex; CleanupUnusedClaimsTask() { //start scanning in a random spot if(GriefPrevention.instance.dataStore.claims.size() == 0) { this.nextClaimIndex = 0; } else { Random randomNumberGenerator = new Random(); this.nextClaimIndex = randomNumberGenerator.nextInt(GriefPrevention.instance.dataStore.claims.size()); } } @Override public void run() { //don't do anything when there are no claims if(GriefPrevention.instance.dataStore.claims.size() == 0) return; //wrap search around to beginning if(this.nextClaimIndex >= GriefPrevention.instance.dataStore.claims.size()) this.nextClaimIndex = 0; //decide which claim to check next Claim claim = GriefPrevention.instance.dataStore.claims.get(this.nextClaimIndex++); //skip administrative claims if(claim.isAdminClaim()) return; //track whether we do any important work which would require cleanup afterward boolean cleanupChunks = false; //get data for the player, especially last login timestamp PlayerData playerData = null; //determine area of the default chest claim int areaOfDefaultClaim = 0; if(GriefPrevention.instance.config_claims_automaticClaimsForNewPlayersRadius >= 0) { areaOfDefaultClaim = (int)Math.pow(GriefPrevention.instance.config_claims_automaticClaimsForNewPlayersRadius * 2 + 1, 2); } //if this claim is a chest claim and those are set to expire if(claim.getArea() <= areaOfDefaultClaim && GriefPrevention.instance.config_claims_chestClaimExpirationDays > 0) { playerData = GriefPrevention.instance.dataStore.getPlayerData(claim.ownerID); //if the owner has been gone at least a week, and if he has ONLY the new player claim, it will be removed Calendar sevenDaysAgo = Calendar.getInstance(); sevenDaysAgo.add(Calendar.DATE, -GriefPrevention.instance.config_claims_chestClaimExpirationDays); boolean newPlayerClaimsExpired = sevenDaysAgo.getTime().after(playerData.getLastLogin()); if(newPlayerClaimsExpired && playerData.getClaims().size() == 1) { claim.removeSurfaceFluids(null); GriefPrevention.instance.dataStore.deleteClaim(claim); cleanupChunks = true; //if configured to do so, restore the land to natural if((GriefPrevention.instance.creativeRulesApply(claim.getLesserBoundaryCorner()) && GriefPrevention.instance.config_claims_creativeAutoNatureRestoration) || GriefPrevention.instance.config_claims_survivalAutoNatureRestoration) { GriefPrevention.instance.restoreClaim(claim, 0); } GriefPrevention.AddLogEntry(" " + claim.getOwnerName() + "'s new player claim expired."); } } //if configured to always remove claims after some inactivity period without exceptions... else if(GriefPrevention.instance.config_claims_expirationDays > 0) { if(playerData == null) playerData = GriefPrevention.instance.dataStore.getPlayerData(claim.ownerID); Calendar earliestPermissibleLastLogin = Calendar.getInstance(); earliestPermissibleLastLogin.add(Calendar.DATE, -GriefPrevention.instance.config_claims_expirationDays); if(earliestPermissibleLastLogin.getTime().after(playerData.getLastLogin())) { //make a copy of this player's claim list Vector<Claim> claims = new Vector<Claim>(); for(int i = 0; i < playerData.getClaims().size(); i++) { claims.add(playerData.getClaims().get(i)); } //delete them GriefPrevention.instance.dataStore.deleteClaimsForPlayer(claim.ownerID, true); GriefPrevention.AddLogEntry(" All of " + claim.getOwnerName() + "'s claims have expired."); for(int i = 0; i < claims.size(); i++) { //if configured to do so, restore the land to natural if((GriefPrevention.instance.creativeRulesApply(claims.get(i).getLesserBoundaryCorner()) && GriefPrevention.instance.config_claims_creativeAutoNatureRestoration) || GriefPrevention.instance.config_claims_survivalAutoNatureRestoration) { GriefPrevention.instance.restoreClaim(claims.get(i), 0); cleanupChunks = true; } } } } else if(GriefPrevention.instance.config_claims_unusedClaimExpirationDays > 0 && GriefPrevention.instance.creativeRulesApply(claim.getLesserBoundaryCorner())) { //avoid scanning large claims and administrative claims if(claim.isAdminClaim() || claim.getWidth() > 25 || claim.getHeight() > 25) return; //otherwise scan the claim content int minInvestment = 400; long investmentScore = claim.getPlayerInvestmentScore(); cleanupChunks = true; if(investmentScore < minInvestment) { GriefPrevention.instance.dataStore.deleteClaim(claim); GriefPrevention.AddLogEntry("Removed " + claim.getOwnerName() + "'s unused claim @ " + GriefPrevention.getfriendlyLocationString(claim.getLesserBoundaryCorner())); //if configured to do so, restore the claim area to natural state if((GriefPrevention.instance.creativeRulesApply(claim.getLesserBoundaryCorner()) && GriefPrevention.instance.config_claims_creativeAutoNatureRestoration) || GriefPrevention.instance.config_claims_survivalAutoNatureRestoration) { GriefPrevention.instance.restoreClaim(claim, 0); } } } if(playerData != null) GriefPrevention.instance.dataStore.clearCachedPlayerData(claim.ownerID); //since we're potentially loading a lot of chunks to scan parts of the world where there are no players currently playing, be mindful of memory usage if(cleanupChunks) { World world = claim.getLesserBoundaryCorner().getWorld(); Chunk lesserChunk = world.getChunkAt(claim.getLesserBoundaryCorner()); Chunk greaterChunk = world.getChunkAt(claim.getGreaterBoundaryCorner()); for(int x = lesserChunk.getX(); x <= greaterChunk.getX(); x++) { for(int z = lesserChunk.getZ(); z <= greaterChunk.getZ(); z++) { Chunk chunk = world.getChunkAt(x, z); if(chunk.isLoaded()) { chunk.unload(true, true); } } } } } }
src/me/ryanhamshire/GriefPrevention/CleanupUnusedClaimsTask.java
/* GriefPrevention Server Plugin for Minecraft Copyright (C) 2012 Ryan Hamshire This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. */ package me.ryanhamshire.GriefPrevention; import java.util.Calendar; import java.util.Random; import java.util.Vector; import org.bukkit.Chunk; import org.bukkit.World; //FEATURE: automatically remove claims owned by inactive players which: //...aren't protecting much OR //...are a free new player claim (and the player has no other claims) OR //...because the player has been gone a REALLY long time, and that expiration has been configured in config.yml //runs every 1 minute in the main thread class CleanupUnusedClaimsTask implements Runnable { int nextClaimIndex; CleanupUnusedClaimsTask() { //start scanning in a random spot if(GriefPrevention.instance.dataStore.claims.size() == 0) { this.nextClaimIndex = 0; } else { Random randomNumberGenerator = new Random(); this.nextClaimIndex = randomNumberGenerator.nextInt(GriefPrevention.instance.dataStore.claims.size()); } } @Override public void run() { //don't do anything when there are no claims if(GriefPrevention.instance.dataStore.claims.size() == 0) return; //wrap search around to beginning if(this.nextClaimIndex >= GriefPrevention.instance.dataStore.claims.size()) this.nextClaimIndex = 0; //decide which claim to check next Claim claim = GriefPrevention.instance.dataStore.claims.get(this.nextClaimIndex++); //skip administrative claims if(claim.isAdminClaim()) return; //track whether we do any important work which would require cleanup afterward boolean cleanupChunks = false; //get data for the player, especially last login timestamp PlayerData playerData = GriefPrevention.instance.dataStore.getPlayerData(claim.ownerID); //determine area of the default chest claim int areaOfDefaultClaim = 0; if(GriefPrevention.instance.config_claims_automaticClaimsForNewPlayersRadius >= 0) { areaOfDefaultClaim = (int)Math.pow(GriefPrevention.instance.config_claims_automaticClaimsForNewPlayersRadius * 2 + 1, 2); } //if he's been gone at least a week, if he has ONLY the new player claim, it will be removed Calendar sevenDaysAgo = Calendar.getInstance(); sevenDaysAgo.add(Calendar.DATE, -GriefPrevention.instance.config_claims_chestClaimExpirationDays); boolean newPlayerClaimsExpired = sevenDaysAgo.getTime().after(playerData.getLastLogin()); //if only one claim, and the player hasn't played in a week if(newPlayerClaimsExpired && playerData.getClaims().size() == 1) { //if that's a chest claim and those are set to expire if(claim.getArea() <= areaOfDefaultClaim && GriefPrevention.instance.config_claims_chestClaimExpirationDays > 0) { claim.removeSurfaceFluids(null); GriefPrevention.instance.dataStore.deleteClaim(claim); cleanupChunks = true; //if configured to do so, restore the land to natural if((GriefPrevention.instance.creativeRulesApply(claim.getLesserBoundaryCorner()) && GriefPrevention.instance.config_claims_creativeAutoNatureRestoration) || GriefPrevention.instance.config_claims_survivalAutoNatureRestoration) { GriefPrevention.instance.restoreClaim(claim, 0); } GriefPrevention.AddLogEntry(" " + claim.getOwnerName() + "'s new player claim expired."); } } //if configured to always remove claims after some inactivity period without exceptions... else if(GriefPrevention.instance.config_claims_expirationDays > 0) { Calendar earliestPermissibleLastLogin = Calendar.getInstance(); earliestPermissibleLastLogin.add(Calendar.DATE, -GriefPrevention.instance.config_claims_expirationDays); if(earliestPermissibleLastLogin.getTime().after(playerData.getLastLogin())) { //make a copy of this player's claim list Vector<Claim> claims = new Vector<Claim>(); for(int i = 0; i < playerData.getClaims().size(); i++) { claims.add(playerData.getClaims().get(i)); } //delete them GriefPrevention.instance.dataStore.deleteClaimsForPlayer(claim.ownerID, true); GriefPrevention.AddLogEntry(" All of " + claim.getOwnerName() + "'s claims have expired."); for(int i = 0; i < claims.size(); i++) { //if configured to do so, restore the land to natural if((GriefPrevention.instance.creativeRulesApply(claims.get(i).getLesserBoundaryCorner()) && GriefPrevention.instance.config_claims_creativeAutoNatureRestoration) || GriefPrevention.instance.config_claims_survivalAutoNatureRestoration) { GriefPrevention.instance.restoreClaim(claims.get(i), 0); cleanupChunks = true; } } } } else if(GriefPrevention.instance.config_claims_unusedClaimExpirationDays > 0 && GriefPrevention.instance.creativeRulesApply(claim.getLesserBoundaryCorner())) { //avoid scanning large claims and administrative claims if(claim.isAdminClaim() || claim.getWidth() > 25 || claim.getHeight() > 25) return; //otherwise scan the claim content int minInvestment = 400; long investmentScore = claim.getPlayerInvestmentScore(); cleanupChunks = true; if(investmentScore < minInvestment) { GriefPrevention.instance.dataStore.deleteClaim(claim); GriefPrevention.AddLogEntry("Removed " + claim.getOwnerName() + "'s unused claim @ " + GriefPrevention.getfriendlyLocationString(claim.getLesserBoundaryCorner())); //if configured to do so, restore the claim area to natural state if((GriefPrevention.instance.creativeRulesApply(claim.getLesserBoundaryCorner()) && GriefPrevention.instance.config_claims_creativeAutoNatureRestoration) || GriefPrevention.instance.config_claims_survivalAutoNatureRestoration) { GriefPrevention.instance.restoreClaim(claim, 0); } } } GriefPrevention.instance.dataStore.clearCachedPlayerData(claim.ownerID); //since we're potentially loading a lot of chunks to scan parts of the world where there are no players currently playing, be mindful of memory usage if(cleanupChunks) { World world = claim.getLesserBoundaryCorner().getWorld(); Chunk lesserChunk = world.getChunkAt(claim.getLesserBoundaryCorner()); Chunk greaterChunk = world.getChunkAt(claim.getGreaterBoundaryCorner()); for(int x = lesserChunk.getX(); x <= greaterChunk.getX(); x++) { for(int z = lesserChunk.getZ(); z <= greaterChunk.getZ(); z++) { Chunk chunk = world.getChunkAt(x, z); if(chunk.isLoaded()) { chunk.unload(true, true); } } } } } }
Perf: Claim Cleanup
src/me/ryanhamshire/GriefPrevention/CleanupUnusedClaimsTask.java
Perf: Claim Cleanup
Java
mit
900bee62f463989e5d44e334352432fb0685a28d
0
piotrkot/simple-mustache,piotrkot/simple-mustache
/** * The MIT License (MIT) * * Copyright (c) 2016 piotrkot * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.github.piotrkot.mustache.tags; import com.github.piotrkot.mustache.TagIndicate; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import java.util.Collections; import java.util.regex.Pattern; import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; import org.junit.Test; /** * Tests for Section. * @author Piotr Kotlicki ([email protected]) * @version $Id$ * @since 1.0 */ public final class SectionTest { /** * Should render section. * @throws Exception If fails. */ @Test public void shouldRenderSection() throws Exception { MatcherAssert.assertThat( new Section( new SquareIndicate() ).render( "1 [[#2]]X[[/2]] [[#3]]Y[[/3]]", ImmutableMap.of("2", true, "3", ImmutableList.of("12", "32")) ), Matchers.is("1 X YY") ); } /** * Should not render section. * @throws Exception If fails. */ @Test public void shouldNotRenderSection() throws Exception { MatcherAssert.assertThat( new Section( new SquareIndicate() ).render( "[[#A]]X[[/A]][[#B]]Y[[/B]][[#C]]Z[[/C]][[#D]]W[[/D]]", ImmutableMap.of( "A", false, "B", Collections.emptyList(), "C", 1 ) ), Matchers.is("") ); } /** * Should render section with variable. * @throws Exception If fails. * @checkstyle MultipleStringLiterals (10 lines) */ @Test public void shouldRenderSectionWithVariable() throws Exception { MatcherAssert.assertThat( new Section(new SquareIndicate()).render( "1 [[#a]]X [[x]][[/a]] [[#b]][[y]] [[/b]]", ImmutableMap.of( "a", Collections.singletonList(ImmutableMap.of("x", "iks")), "b", ImmutableList.of( ImmutableMap.of("y", "y1"), ImmutableMap.of("y", "y2") ) ) ), Matchers.is("1 X iks y1 y2 ") ); } /** * Should render section with subsection. * @throws Exception If fails. * @checkstyle MultipleStringLiterals (13 lines) */ @Test public void shouldRenderSectionWithSubsection() throws Exception { MatcherAssert.assertThat( new Section(new SquareIndicate()).render( "1 [[#o]]-X [[q]]+[[#i]]Y [[w]][[/i]] [[/o]]", ImmutableMap.of( "o", ImmutableList.of( ImmutableMap.of( "q", "Q1", "i", ImmutableList.of( ImmutableMap.of("w", "W1"), ImmutableMap.of("w", "W2") ) ), ImmutableMap.of( "q", "Q2", "i", ImmutableList.of( ImmutableMap.of("w", "W3"), ImmutableMap.of("w", "W4") ) ) ) ) ), Matchers.is("1 -X Q1+Y W1Y W2 -X Q2+Y W3Y W4 ") ); } /** * Should render section nested. * @throws Exception If fails. * @checkstyle MultipleStringLiterals (13 lines) */ @Test public void shouldRenderSectionNested() throws Exception { MatcherAssert.assertThat( new Section(new SquareIndicate()).render( "[[#out]][[v]][[#in]][[vv]][[/in]][[^in]]none[[/in]][[/out]]", ImmutableMap.of( "out", ImmutableList.of( ImmutableMap.of( "v", "V1", "in", ImmutableList.of( ImmutableMap.of("vv", "X1"), ImmutableMap.of("vv", "X2") ) ), ImmutableMap.of( "v", "V2", "in", Collections.emptyList() ) ) ) ), Matchers.is("V1X1X2V2none") ); } /** * Should render valid tags. * @throws Exception If fails. */ @Test public void shouldRenderValidTags() throws Exception { MatcherAssert.assertThat( new Section( new SquareIndicate() ).render( "[[ #aA0._ ]] [[ / aA0._ ]]", ImmutableMap.of("aA0._", true) ), Matchers.is(" ") ); } /** * Should render on new lines. * @throws Exception If fails. * @checkstyle MultipleStringLiterals (12 lines) */ @Test public void shouldRenderOnNewlines() throws Exception { MatcherAssert.assertThat( new Section( new SquareIndicate() ).render( "[[#nl]]\n[[line]]\n[[/nl]]", ImmutableMap.of( "nl", ImmutableList.of( ImmutableMap.of("line", "1-line"), ImmutableMap.of("line", "2-line") ) ) ), Matchers.is("\n1-line\n\n2-line\n") ); } /** * Tag indicate with double square parentheses. */ private class SquareIndicate implements TagIndicate { @Override public String safeStart() { return Pattern.quote("[["); } @Override public String safeEnd() { return Pattern.quote("]]"); } } }
src/test/java/com/github/piotrkot/mustache/tags/SectionTest.java
/** * The MIT License (MIT) * * Copyright (c) 2016 piotrkot * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.github.piotrkot.mustache.tags; import com.github.piotrkot.mustache.TagIndicate; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import java.util.Collections; import java.util.regex.Pattern; import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; import org.junit.Test; /** * Tests for Section. * @author Piotr Kotlicki ([email protected]) * @version $Id$ * @since 1.0 */ public final class SectionTest { /** * Should render section. * @throws Exception If fails. */ @Test public void shouldRenderSection() throws Exception { MatcherAssert.assertThat( new Section( new SquareIndicate() ).render( "1 [[#2]]X[[/2]] [[#3]]Y[[/3]]", ImmutableMap.of("2", true, "3", ImmutableList.of("12", "32")) ), Matchers.is("1 X YY") ); } /** * Should not render section. * @throws Exception If fails. */ @Test public void shouldNotRenderSection() throws Exception { MatcherAssert.assertThat( new Section( new SquareIndicate() ).render( "[[#A]]X[[/A]][[#B]]Y[[/B]][[#C]]Z[[/C]][[#D]]W[[/D]]", ImmutableMap.of( "A", false, "B", Collections.emptyList(), "C", 1 ) ), Matchers.is("") ); } /** * Should render section with variable. * @throws Exception If fails. * @checkstyle MultipleStringLiterals (10 lines) */ @Test public void shouldRenderSectionWithVariable() throws Exception { MatcherAssert.assertThat( new Section(new SquareIndicate()).render( "1 [[#a]]X [[x]][[/a]] [[#b]][[y]] [[/b]]", ImmutableMap.of( "a", Collections.singletonList(ImmutableMap.of("x", "iks")), "b", ImmutableList.of( ImmutableMap.of("y", "y1"), ImmutableMap.of("y", "y2") ) ) ), Matchers.is("1 X iks y1 y2 ") ); } /** * Should render section with subsection. * @throws Exception If fails. * @checkstyle MultipleStringLiterals (13 lines) */ @Test public void shouldRenderSectionWithSubsection() throws Exception { MatcherAssert.assertThat( new Section(new SquareIndicate()).render( "1 [[#o]]-X [[q]]+[[#i]]Y [[w]][[/i]] [[/o]]", ImmutableMap.of( "o", ImmutableList.of( ImmutableMap.of("q", "Q1"), ImmutableMap.of("q", "Q2") ), "i", ImmutableList.of( ImmutableMap.of("w", "W1"), ImmutableMap.of("w", "W2") ) ) ), Matchers.is("1 -X Q1+Y W1Y W2 -X Q2+Y W1Y W2 ") ); } /** * Should render valid tags. * @throws Exception If fails. */ @Test public void shouldRenderValidTags() throws Exception { MatcherAssert.assertThat( new Section( new SquareIndicate() ).render( "[[ #aA0._ ]] [[ / aA0._ ]]", ImmutableMap.of("aA0._", true) ), Matchers.is(" ") ); } /** * Should render on new lines. * @throws Exception If fails. * @checkstyle MultipleStringLiterals (12 lines) */ @Test public void shouldRenderOnNewlines() throws Exception { MatcherAssert.assertThat( new Section( new SquareIndicate() ).render( "[[#nl]]\n[[line]]\n[[/nl]]", ImmutableMap.of( "nl", ImmutableList.of( ImmutableMap.of("line", "1-line"), ImmutableMap.of("line", "2-line") ) ) ), Matchers.is("\n1-line\n\n2-line\n") ); } /** * Tag indicate with double square parentheses. */ private class SquareIndicate implements TagIndicate { @Override public String safeStart() { return Pattern.quote("[["); } @Override public String safeEnd() { return Pattern.quote("]]"); } } }
Adding failing test
src/test/java/com/github/piotrkot/mustache/tags/SectionTest.java
Adding failing test
Java
mit
d9311b1f16bac944a3006416b58179d114b033fe
0
umesh0492/pill-logger,umesh0492/pill-logger,umesh0492/pill-logger
package uk.co.cntwo.pilllogger.fragments; import android.content.Context; import android.graphics.Color; import android.os.Bundle; import android.app.Fragment; import android.util.SparseIntArray; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ImageView; import android.widget.ListView; import android.widget.RelativeLayout; import android.widget.TextView; import com.echo.holographlibrary.Line; import com.echo.holographlibrary.LineGraph; import com.echo.holographlibrary.LinePoint; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.concurrent.TimeUnit; import uk.co.cntwo.pilllogger.R; import uk.co.cntwo.pilllogger.adapters.ConsumptionListAdapter; import uk.co.cntwo.pilllogger.helpers.Logger; import uk.co.cntwo.pilllogger.listeners.AddConsumptionClickListener; import uk.co.cntwo.pilllogger.models.Consumption; import uk.co.cntwo.pilllogger.models.Pill; import uk.co.cntwo.pilllogger.tasks.GetConsumptionsTask; import uk.co.cntwo.pilllogger.tasks.GetFavouritePillsTask; import uk.co.cntwo.pilllogger.tasks.GetPillsTask; import uk.co.cntwo.pilllogger.tasks.InitTestDbTask; import org.joda.time.DateTime; import org.joda.time.Days; /** * Created by nick on 23/10/13. */ public class MainFragment extends Fragment implements InitTestDbTask.ITaskComplete, GetConsumptionsTask.ITaskComplete, GetFavouritePillsTask.ITaskComplete, GetPillsTask.ITaskComplete { private static final String TAG = "MainFragment"; ListView _listView; ViewGroup _favouriteContainer; View _mainLayout; HashMap<Integer, Pill> _allPills = new HashMap<Integer, Pill>(); @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { // Inflate the layout for this fragment View v = inflater.inflate(R.layout.main_fragment, container, false); _mainLayout = v; Logger.v(TAG, "onCreateView Called"); //Doing this to test - will not be needed when working fully new InitTestDbTask(this.getActivity(), this).execute(); _listView = (ListView) (v != null ? v.findViewById(R.id.main_consumption_list) : null); _favouriteContainer = (ViewGroup) (v!=null ? v.findViewById(R.id.button_container):null); ImageView addConsumption = (ImageView) v.findViewById(R.id.main_add); addConsumption.setOnClickListener(new AddConsumptionClickListener(getActivity())); if (_listView.getAdapter() != null) //Trying this to make the list refresh after adding the new consumption ((ConsumptionListAdapter)_listView.getAdapter()).notifyDataSetChanged(); return v; } @Override public void initComplete() { new GetPillsTask(this.getActivity(), this).execute(); } @Override public void onResume() { super.onResume(); new GetPillsTask(this.getActivity(), this).execute(); new GetFavouritePillsTask(this.getActivity(), this).execute(); } @Override public void consumptionsReceived(List<Consumption> consumptions) { if(consumptions != null && consumptions.size() > 0){ _listView.setAdapter(new ConsumptionListAdapter(getActivity(), R.layout.consumption_list_item, consumptions)); HashMap<Integer, SparseIntArray> xPoints = new HashMap<Integer, SparseIntArray>(); DateTime aMonthAgo = new DateTime().minusMonths(1); for (Consumption c : consumptions) { int pillId = c.get_pill_id(); Days days = Days.daysBetween(aMonthAgo.withTimeAtStartOfDay(), new DateTime(c.get_date()).plusDays(1).withTimeAtStartOfDay()); int x = days.getDays(); SparseIntArray currentLineValues; if(xPoints.containsKey(pillId)) currentLineValues = xPoints.get(pillId); else{ currentLineValues = new SparseIntArray(); xPoints.put(pillId, currentLineValues); } int value = 1; if(currentLineValues.indexOfKey(x) >= 0) value += currentLineValues.get(x); currentLineValues.put(x, value); } Days totalDays = Days.daysBetween(aMonthAgo.withTimeAtStartOfDay(), new DateTime().plusDays(1).withTimeAtStartOfDay()); int dayCount = totalDays.getDays(); plotLineGraph(xPoints, dayCount, R.id.main_graph); } } private void plotLineGraph(HashMap<Integer, SparseIntArray> consumptionData, int days, int graphId){ LineGraph li = (LineGraph)_mainLayout.findViewById(graphId); for(int pillId : consumptionData.keySet()){ Line line = new Line(); Pill p = _allPills.get(pillId); line.setColor(p.getColour()); SparseIntArray points = consumptionData.get(pillId); for(int i = 0; i <= days; i++){ LinePoint linePoint = new LinePoint(); linePoint.setX(i); if(points.indexOfKey(i) > 0) linePoint.setY(points.get(i)); else linePoint.setY(0); line.addPoint(linePoint); } li.addLine(line); } double maxY = li.getMaxY(); li.setRangeY(0, (float)(maxY * 1.05)); } @Override public void favouritePillsReceived(List<Pill> pills) { if(_favouriteContainer == null) return; int children = _favouriteContainer.getChildCount(); int start = 1; if(pills.size() == 0) //remove customise button start = 2; _favouriteContainer.removeViews(start, children -start); for(Pill p : pills){ LayoutInflater layoutInflater = (LayoutInflater)getActivity().getSystemService(Context.LAYOUT_INFLATER_SERVICE); View v = layoutInflater.inflate(R.layout.favourite_pill, null); if(p.getName().length() > 0){ TextView letter = (TextView) v.findViewById(R.id.pill_letter); letter.setText(p.getName().substring(0,1)); Logger.d(TAG, "Adding favourite for: " + p.getName()); } _favouriteContainer.addView(v, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); } } @Override public void pillsReceived(List<Pill> pills) { for(Pill p : pills){ _allPills.put(p.getId(), p); } new GetConsumptionsTask(this.getActivity(), this).execute(); } }
PillLogger/src/uk/co/cntwo/pilllogger/fragments/MainFragment.java
package uk.co.cntwo.pilllogger.fragments; import android.content.Context; import android.graphics.Color; import android.os.Bundle; import android.app.Fragment; import android.util.SparseIntArray; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ImageView; import android.widget.ListView; import android.widget.RelativeLayout; import android.widget.TextView; import com.echo.holographlibrary.Line; import com.echo.holographlibrary.LineGraph; import com.echo.holographlibrary.LinePoint; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.concurrent.TimeUnit; import uk.co.cntwo.pilllogger.R; import uk.co.cntwo.pilllogger.adapters.ConsumptionListAdapter; import uk.co.cntwo.pilllogger.helpers.Logger; import uk.co.cntwo.pilllogger.listeners.AddConsumptionClickListener; import uk.co.cntwo.pilllogger.models.Consumption; import uk.co.cntwo.pilllogger.models.Pill; import uk.co.cntwo.pilllogger.tasks.GetConsumptionsTask; import uk.co.cntwo.pilllogger.tasks.GetFavouritePillsTask; import uk.co.cntwo.pilllogger.tasks.GetPillsTask; import uk.co.cntwo.pilllogger.tasks.InitTestDbTask; import org.joda.time.DateTime; import org.joda.time.Days; /** * Created by nick on 23/10/13. */ public class MainFragment extends Fragment implements InitTestDbTask.ITaskComplete, GetConsumptionsTask.ITaskComplete, GetFavouritePillsTask.ITaskComplete, GetPillsTask.ITaskComplete { private static final String TAG = "MainFragment"; ListView _listView; ViewGroup _favouriteContainer; View _mainLayout; HashMap<Integer, Pill> _allPills = new HashMap<Integer, Pill>(); @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { // Inflate the layout for this fragment View v = inflater.inflate(R.layout.main_fragment, container, false); _mainLayout = v; Logger.v(TAG, "onCreateView Called"); //Doing this to test - will not be needed when working fully new InitTestDbTask(this.getActivity(), this).execute(); _listView = (ListView) (v != null ? v.findViewById(R.id.main_consumption_list) : null); _favouriteContainer = (ViewGroup) (v!=null ? v.findViewById(R.id.button_container):null); ImageView addConsumption = (ImageView) v.findViewById(R.id.main_add); addConsumption.setOnClickListener(new AddConsumptionClickListener(getActivity())); if (_listView.getAdapter() != null) //Trying this to make the list refresh after adding the new consumption ((ConsumptionListAdapter)_listView.getAdapter()).notifyDataSetChanged(); return v; } @Override public void initComplete() { new GetPillsTask(this.getActivity(), this).execute(); } @Override public void onResume() { super.onResume(); new GetPillsTask(this.getActivity(), this).execute(); new GetFavouritePillsTask(this.getActivity(), this).execute(); } @Override public void consumptionsReceived(List<Consumption> consumptions) { if(consumptions != null && consumptions.size() > 0){ _listView.setAdapter(new ConsumptionListAdapter(getActivity(), R.layout.consumption_list_item, consumptions)); HashMap<Integer, SparseIntArray> xPoints = new HashMap<Integer, SparseIntArray>(); DateTime aMonthAgo = new DateTime().minusMonths(1); for (Consumption c : consumptions) { int pillId = c.get_pill_id(); Days days = Days.daysBetween(aMonthAgo.withTimeAtStartOfDay(), new DateTime(c.get_date()).withTimeAtStartOfDay()); int x = days.getDays(); SparseIntArray currentLineValues; if(xPoints.containsKey(pillId)) currentLineValues = xPoints.get(pillId); else{ currentLineValues = new SparseIntArray(); xPoints.put(pillId, currentLineValues); } int value = 1; if(currentLineValues.indexOfKey(x) > 0) value += currentLineValues.get(x); currentLineValues.put(x, value); } Days totalDays = Days.daysBetween(aMonthAgo.withTimeAtStartOfDay(), new DateTime().withTimeAtStartOfDay()); int dayCount = totalDays.getDays(); plotLineGraph(xPoints, dayCount, R.id.main_graph); } } private void plotLineGraph(HashMap<Integer, SparseIntArray> consumptionData, int days, int graphId){ LineGraph li = (LineGraph)_mainLayout.findViewById(graphId); for(int pillId : consumptionData.keySet()){ Line line = new Line(); Pill p = _allPills.get(pillId); line.setColor(p.getColour()); SparseIntArray points = consumptionData.get(pillId); for(int i = 0; i <= days; i++){ LinePoint linePoint = new LinePoint(); linePoint.setX(i); if(points.indexOfKey(i) > 0) linePoint.setY(points.get(i)); else linePoint.setY(0); line.addPoint(linePoint); } li.addLine(line); } double maxY = li.getMaxY(); li.setRangeY(0, (float)(maxY * 1.05)); } @Override public void favouritePillsReceived(List<Pill> pills) { if(_favouriteContainer == null) return; int children = _favouriteContainer.getChildCount(); int start = 1; if(pills.size() == 0) //remove customise button start = 2; _favouriteContainer.removeViews(start, children -start); for(Pill p : pills){ LayoutInflater layoutInflater = (LayoutInflater)getActivity().getSystemService(Context.LAYOUT_INFLATER_SERVICE); View v = layoutInflater.inflate(R.layout.favourite_pill, null); if(p.getName().length() > 0){ TextView letter = (TextView) v.findViewById(R.id.pill_letter); letter.setText(p.getName().substring(0,1)); Logger.d(TAG, "Adding favourite for: " + p.getName()); } _favouriteContainer.addView(v, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); } } @Override public void pillsReceived(List<Pill> pills) { for(Pill p : pills){ _allPills.put(p.getId(), p); } new GetConsumptionsTask(this.getActivity(), this).execute(); } }
Fixed the graph not showing same day pills
PillLogger/src/uk/co/cntwo/pilllogger/fragments/MainFragment.java
Fixed the graph not showing same day pills
Java
epl-1.0
9bff59bf2649b597b1996d6125f3c9510f158d0d
0
smarr/golo-lang,mojavelinux/golo-lang,dynamid/golo-lang-insa-citilab-historical-reference,jeffmaury/golo-lang,mojavelinux/golo-lang,titimoby/golo-lang,jeffmaury/golo-lang,smarr/golo-lang,titimoby/golo-lang,dynamid/golo-lang-insa-citilab-historical-reference,Mogztter/golo-lang,franckverrot/golo-lang,Mogztter/golo-lang,franckverrot/golo-lang,jeffmaury/golo-lang,Mogztter/golo-lang,dynamid/golo-lang-insa-citilab-historical-reference
package gololang.runtime; import java.lang.invoke.CallSite; import java.lang.invoke.ConstantCallSite; import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodType; import java.lang.reflect.Method; import static java.lang.invoke.MethodHandles.Lookup; public final class InvokeDynamicSupport { public static CallSite bootstrapFunctionInvocation(Lookup caller, String name, MethodType type) throws IllegalAccessException, ClassNotFoundException { Class<?> callerClass = caller.lookupClass(); MethodHandle handle = null; Method method = findStaticMethod(callerClass, name, type.parameterArray()); if (method != null) { handle = caller.unreflect(method).asType(type); } else { int methodClassSeparatorIndex = name.lastIndexOf("."); if (methodClassSeparatorIndex == -1) { throw new NoSuchMethodError(name); } String className = name.substring(0, methodClassSeparatorIndex); String methodName = name.substring(methodClassSeparatorIndex + 1); Class<?> targetClass = Class.forName(className, true, callerClass.getClassLoader()); method = findStaticMethod(targetClass, methodName, type.parameterArray()); if (method == null) { throw new NoSuchMethodError(name); } handle = caller.unreflect(method).asType(type); } return new ConstantCallSite(handle); } private static Method findStaticMethod(Class<?> klass, String name, Class<?>[] argumentTypes) { for (Method method : klass.getDeclaredMethods()) { Class<?>[] parameterTypes = method.getParameterTypes(); if (method.getName().equals(name) && parameterTypes.length == argumentTypes.length) { return method; } } return null; } }
src/main/java/gololang/runtime/InvokeDynamicSupport.java
package gololang.runtime; import java.lang.invoke.CallSite; import java.lang.invoke.ConstantCallSite; import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodType; import java.lang.reflect.Method; import static java.lang.invoke.MethodHandles.Lookup; public class InvokeDynamicSupport { public static CallSite bootstrapFunctionInvocation(Lookup caller, String name, MethodType type) throws IllegalAccessException, ClassNotFoundException { Class<?> callerClass = caller.lookupClass(); MethodHandle handle = null; Method method = findStaticMethod(callerClass, name, type.parameterArray()); if (method != null) { handle = caller.unreflect(method).asType(type); } else { int methodClassSeparatorIndex = name.lastIndexOf("."); if (methodClassSeparatorIndex == -1) { throw new NoSuchMethodError(name); } String className = name.substring(0, methodClassSeparatorIndex); String methodName = name.substring(methodClassSeparatorIndex + 1); Class<?> targetClass = Class.forName(className, true, callerClass.getClassLoader()); method = findStaticMethod(targetClass, methodName, type.parameterArray()); if (method == null) { throw new NoSuchMethodError(name); } handle = caller.unreflect(method).asType(type); } return new ConstantCallSite(handle); } private static Method findStaticMethod(Class<?> klass, String name, Class<?>[] argumentTypes) { for (Method method : klass.getDeclaredMethods()) { Class<?>[] parameterTypes = method.getParameterTypes(); if (method.getName().equals(name) && parameterTypes.length == argumentTypes.length) { return method; } } return null; } }
Made final.
src/main/java/gololang/runtime/InvokeDynamicSupport.java
Made final.
Java
agpl-3.0
21adbaae7a1356e9ad01bbad1e21d67249450171
0
cojen/Tupl
/* * Copyright 2021 Cojen.org * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.cojen.tupl.rows; import java.lang.invoke.CallSite; import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; import java.lang.invoke.MethodType; import java.lang.ref.WeakReference; import java.util.Collection; import java.util.LinkedHashMap; import java.util.Map; import java.util.Objects; import java.util.TreeMap; import org.cojen.maker.ClassMaker; import org.cojen.maker.Field; import org.cojen.maker.Label; import org.cojen.maker.MethodMaker; import org.cojen.maker.Variable; import org.cojen.tupl.Cursor; import org.cojen.tupl.DatabaseException; import org.cojen.tupl.Index; import org.cojen.tupl.LockResult; import org.cojen.tupl.RowUpdater; import org.cojen.tupl.Table; import org.cojen.tupl.Transaction; import org.cojen.tupl.UnmodifiableViewException; import org.cojen.tupl.core.RowPredicateLock; import org.cojen.tupl.diag.QueryPlan; import org.cojen.tupl.filter.ColumnFilter; import org.cojen.tupl.views.ViewUtils; /** * Makes Table classes that extend AbstractTable. * * @author Brian S O'Neill */ public class TableMaker { private final RowStore mStore; private final Class<?> mRowType; private final RowGen mRowGen; private final RowInfo mRowInfo; private final RowGen mCodecGen; private final Class<?> mRowClass; private final byte[] mSecondaryDescriptor; private final long mIndexId; private final boolean mSupportsIndexLock; private final ColumnInfo mAutoColumn; private ClassMaker mClassMaker; /** * Constructor for primary table. * * @param store generated class is pinned to this specific instance * @param rowGen describes row encoding */ TableMaker(RowStore store, Class<?> type, RowGen rowGen, long indexId, boolean supportsIndexLock) { this(store, type, rowGen, rowGen, null, indexId, supportsIndexLock); } /** * Constructor for secondary index view table. * * @param store generated class is pinned to this specific instance * @param rowGen describes row encoding * @param codecGen describes key and value codecs (different than rowGen) * @param secondaryDesc secondary index descriptor */ TableMaker(RowStore store, Class<?> type, RowGen rowGen, RowGen codecGen, byte[] secondaryDesc, long indexId, boolean supportsIndexLock) { mStore = store; mRowType = type; mRowGen = rowGen; mRowInfo = rowGen.info; mCodecGen = codecGen; mRowClass = RowMaker.find(type); mSecondaryDescriptor = secondaryDesc; mIndexId = indexId; mSupportsIndexLock = supportsIndexLock; ColumnInfo auto = null; if (isPrimaryTable()) { for (ColumnInfo column : codecGen.info.keyColumns.values()) { if (column.isAutomatic()) { auto = column; break; } } } mAutoColumn = auto; } /** * Return a constructor which accepts a (TableManager, Index, RowPredicateLock) and returns * an AbstractTable implementation. */ MethodHandle finish() { { String suffix; Class baseClass; if (isPrimaryTable()) { suffix = "Table"; baseClass = AbstractTable.class; } else { suffix = "Unjoined"; baseClass = AbstractTableView.class; } mClassMaker = mCodecGen.beginClassMaker(getClass(), mRowType, suffix).public_() .extend(baseClass).implement(TableBasicsMaker.find(mRowType)); } MethodType mt = MethodType.methodType (void.class, TableManager.class, Index.class, RowPredicateLock.class); MethodMaker ctor = mClassMaker.addConstructor(mt); ctor.invokeSuperConstructor(ctor.param(0), ctor.param(1), ctor.param(2)); // Add encode/decode methods. { ColumnCodec[] keyCodecs = mCodecGen.keyCodecs(); addEncodeColumnsMethod("encodePrimaryKey", keyCodecs); addDecodeColumnsMethod("decodePrimaryKey", keyCodecs); if (isPrimaryTable()) { addDynamicEncodeValueColumns(); addDynamicDecodeValueColumns(); } else { // The encodeValue method is only used for storing rows into the table. By // making it always fail, there's no backdoor to permit modifications. mClassMaker.addMethod(byte[].class, "encodeValue", mRowClass) .static_().new_(UnmodifiableViewException.class).throw_(); addDecodeColumnsMethod("decodeValue", mCodecGen.valueCodecs()); } addDecodePartialHandle(); } // Add code to support an automatic column (if defined). if (mAutoColumn != null) { Class autoGenClass, autoGenApplierClass; Object minVal, maxVal; if (mAutoColumn.type == int.class) { if (mAutoColumn.isUnsigned()) { autoGenClass = AutomaticKeyGenerator.OfUInt.class; } else { autoGenClass = AutomaticKeyGenerator.OfInt.class; } autoGenApplierClass = AutomaticKeyGenerator.OfInt.Applier.class; minVal = (int) Math.max(mAutoColumn.autoMin, Integer.MIN_VALUE); maxVal = (int) Math.min(mAutoColumn.autoMax, Integer.MAX_VALUE); } else { if (mAutoColumn.isUnsigned()) { autoGenClass = AutomaticKeyGenerator.OfULong.class; } else { autoGenClass = AutomaticKeyGenerator.OfLong.class; } autoGenApplierClass = AutomaticKeyGenerator.OfLong.Applier.class; minVal = mAutoColumn.autoMin; maxVal = mAutoColumn.autoMax; } mClassMaker.implement(autoGenApplierClass); mClassMaker.addField(autoGenClass, "autogen").private_().final_(); ctor.field("autogen").set (ctor.new_(autoGenClass, ctor.param(1), minVal, maxVal, ctor.this_())); MethodMaker mm = mClassMaker.addMethod (RowPredicateLock.Closer.class, "applyToRow", Transaction.class, Object.class, mAutoColumn.type); mm.public_(); var rowVar = mm.param(1).cast(mRowClass); rowVar.field(mAutoColumn.name).set(mm.param(2)); if (!mSupportsIndexLock) { mm.return_(mm.var(RowPredicateLock.NonCloser.class).field("THE")); } else { mm.return_(mm.field("mIndexLock").invoke("tryOpenAcquire", mm.param(0), rowVar)); } var allButAuto = new TreeMap<>(mCodecGen.info.allColumns); allButAuto.remove(mAutoColumn.name); addCheckSet("checkAllButAutoSet", allButAuto); addStoreAutoMethod(); } // Add private methods which check that required columns are set. { addCheckSet("checkPrimaryKeySet", mCodecGen.info.keyColumns); //addCheckSet("checkValue", mCodecGen.info.valueColumns); if (isPrimaryTable()) { addCheckSet("checkAllSet", mCodecGen.info.allColumns); addRequireSet("requireAllSet", mCodecGen.info.allColumns); } int i = 0; for (ColumnSet altKey : mCodecGen.info.alternateKeys) { addCheckSet("checkAltKeySet$" + i, altKey.keyColumns); i++; } if (isPrimaryTable() && !mCodecGen.info.valueColumns.isEmpty()) { addCheckAllDirty("checkValueAllDirty", mCodecGen.info.valueColumns); } addCheckAnyDirty("checkPrimaryKeyAnyDirty", mCodecGen.info.keyColumns); } // Add the public load/store methods, etc. addByKeyMethod("load"); addByKeyMethod("exists"); if (isPrimaryTable()) { addByKeyMethod("delete"); addStoreMethod("store", null); addStoreMethod("exchange", mRowType); addStoreMethod("insert", boolean.class); addStoreMethod("replace", boolean.class); addDoUpdateMethod(); addUpdateMethod("update", false); addUpdateMethod("merge", true); } addMarkAllCleanMethod(); addToRowMethod(); addToKeyMethod(); addRowStoreRefMethod(); addUnfilteredMethod(); if (!isPrimaryTable()) { addSecondaryDescriptorMethod(); } return doFinish(mt); } /** * Return a constructor which accepts a (Index, RowPredicateLock, TableImpl primary, * TableImpl unjoined) and returns an AbstractTable implementation. * * @param primaryTableClass the primary table implementation class * @param unjoinedClass the table implementation which is passed as the last constructor * parameter */ MethodHandle finishJoined(Class<?> primaryTableClass, Class<?> unjoinedClass) { Objects.requireNonNull(primaryTableClass); mClassMaker = mCodecGen.beginClassMaker(getClass(), mRowType, "Joined").public_() .extend(unjoinedClass); { MethodMaker mm = mClassMaker.addMethod (Class.class, "joinedPrimaryTableClass").protected_(); mm.return_(primaryTableClass); } MethodType mt = MethodType.methodType (void.class, Index.class, RowPredicateLock.class, primaryTableClass, unjoinedClass); MethodMaker ctor = mClassMaker.addConstructor(mt); var indexVar = ctor.param(0); var lockVar = ctor.param(1); var primaryVar = ctor.param(2); var unjoinedVar = ctor.param(3); var managerVar = primaryVar.invoke("tableManager"); ctor.invokeSuperConstructor(managerVar, indexVar, lockVar); mClassMaker.addField(primaryTableClass, "primaryTable").private_().final_(); ctor.field("primaryTable").set(primaryVar); mClassMaker.addField(Index.class, "primaryIndex").private_().final_(); ctor.field("primaryIndex").set(managerVar.invoke("primaryIndex")); mClassMaker.addField(unjoinedClass, "unjoined").private_().final_(); ctor.field("unjoined").set(unjoinedVar); { MethodMaker mm = mClassMaker.addMethod(AbstractTable.class, "viewUnjoined").public_(); mm.return_(mm.field("unjoined")); } addToPrimaryKeyMethod(mClassMaker, false, true); addToPrimaryKeyMethod(mClassMaker, true, true); addJoinedLoadMethod(primaryTableClass); // Define the class that implements the unfiltered JoinedScanController and construct a // singleton instance. var scanControllerClass = makeUnfilteredJoinedScanControllerClass(primaryTableClass); mClassMaker.addField(scanControllerClass, "unfiltered").private_().final_(); ctor.field("unfiltered").set (ctor.new_(scanControllerClass, null, false, null, false, ctor.field("primaryIndex"))); // Override the method inherited from the unjoined class as defined in AbstractTable. MethodMaker mm = mClassMaker.addMethod (SingleScanController.class, "unfiltered").protected_(); mm.return_(mm.field("unfiltered")); // Override the method inherited from AbstractTableView. mm = mClassMaker.addMethod(RowUpdater.class, "newRowUpdater", Transaction.class, ScanController.class).protected_(); mm.return_(mm.invoke("newJoinedRowUpdater", mm.param(0), mm.param(1), mm.field("primaryTable"))); return doFinish(mt); } private void addJoinedLoadMethod(Class<?> primaryTableClass) { MethodMaker mm = mClassMaker.addMethod (boolean.class, "load", Transaction.class, Object.class).public_(); Variable txnVar = mm.param(0); Variable rowVar = mm.param(1).cast(mRowClass); { Label ready = mm.label(); mm.invoke("checkPrimaryKeySet", rowVar).ifTrue(ready); mm.new_(IllegalStateException.class, "Primary key isn't fully specified").throw_(); ready.here(); } Variable valueVar = null; Variable repeatableVar = null; Variable joinedPkVar; Label notFound = mm.label(); if (mCodecGen.info.isAltKey()) { var keyVar = mm.invoke("encodePrimaryKey", rowVar); valueVar = mm.field("mSource").invoke("load", txnVar, keyVar); valueVar.ifEq(null, notFound); joinedPkVar = mm.invoke("toPrimaryKey", rowVar, valueVar); } else { repeatableVar = mm.var(RowUtils.class).invoke("isRepeatable", txnVar); Label ready = mm.label(); repeatableVar.ifFalse(ready); var keyVar = mm.invoke("encodePrimaryKey", rowVar); // Calling exists is necessary for proper lock acquisition order. var resultVar = mm.field("mSource").invoke("exists", txnVar, keyVar); resultVar.ifFalse(notFound); ready.here(); joinedPkVar = mm.invoke("toPrimaryKey", rowVar); } var joinedValueVar = mm.field("primaryIndex").invoke("load", txnVar, joinedPkVar); Label notNull = mm.label(); joinedValueVar.ifNe(null, notNull); notFound.here(); markValuesUnset(rowVar); mm.return_(false); notNull.here(); if (repeatableVar == null) { repeatableVar = mm.var(RowUtils.class).invoke("isRepeatable", txnVar); } Label checked = mm.label(); repeatableVar.ifFalse(checked); if (valueVar != null) { // Decode the primary key columns (required by alt key only). mm.invoke("decodeValue", rowVar, valueVar); } mm.var(primaryTableClass).invoke("decodeValue", rowVar, joinedValueVar); Label success = mm.label().here(); markAllClean(rowVar, mRowGen, mRowGen); mm.return_(true); // This point is reached for double checking that the joined row matches to the // secondary row, which is required when a lock isn't held. checked.here(); // Copy of all the columns which will be modified by decodeValue. Map<String, ColumnInfo> copiedColumns; if (valueVar != null) { // For alt key, the primary key columns will be modified too. copiedColumns = mRowInfo.allColumns; } else { copiedColumns = mRowInfo.valueColumns; } Map<String, Variable> copiedVars = new LinkedHashMap<>(copiedColumns.size()); for (String name : copiedColumns.keySet()) { copiedVars.put(name, rowVar.field(name).get()); } if (valueVar != null) { // For alt key, decode the primary key columns too. mm.invoke("decodeValue", rowVar, valueVar); } mm.var(primaryTableClass).invoke("decodeValue", rowVar, joinedValueVar); // Check all the secondary columns, except those that refer to the primary key, which // won't have changed. Label fail = mm.label(); Map<String, ColumnInfo> pkColumns = mRowInfo.keyColumns; for (ColumnInfo column : mCodecGen.info.allColumns.values()) { String name = column.name; if (pkColumns.containsKey(name)) { continue; } Label pass = mm.label(); // Note that the secondary columns are passed as the compare arguments, because // that's what they effectively are -- a type of filter expression. This is // important because the comparison isn't necessarily symmetrical. See // BigDecimalUtils.matches. CompareUtils.compare(mm, column, rowVar.field(name), copiedColumns.get(name), copiedVars.get(name), ColumnFilter.OP_EQ, pass, fail); pass.here(); } mm.goto_(success); fail.here(); // Restore all the columns back to their original values, preventing any side-effects. // When the load method returns false, it's not supposed to modify any columns, // regardless of their state. for (Map.Entry<String, Variable> e : copiedVars.entrySet()) { rowVar.field(e.getKey()).set(e.getValue()); } mm.goto_(notFound); } private MethodHandle doFinish(MethodType mt) { try { var lookup = mClassMaker.finishLookup(); return lookup.findConstructor(lookup.lookupClass(), mt); } catch (Throwable e) { throw RowUtils.rethrow(e); } } private boolean isPrimaryTable() { return mRowGen == mCodecGen; } private boolean supportsTriggers() { return isPrimaryTable(); } /** * Defines a static method which accepts a row and returns boolean. When it returns true, * all of the given columns are set. * * @param name method name */ private void addCheckSet(String name, Map<String, ColumnInfo> columns) { MethodMaker mm = mClassMaker.addMethod(boolean.class, name, mRowClass).static_(); if (columns.isEmpty()) { mm.return_(true); return; } if (columns.size() == 1) { int num = mRowGen.columnNumbers().get(columns.values().iterator().next().name); Label cont = mm.label(); stateField(mm.param(0), num).and(RowGen.stateFieldMask(num)).ifNe(0, cont); mm.return_(false); cont.here(); mm.return_(true); return; } int num = 0, mask = 0; for (int step = 0; step < 2; step++) { // Key columns are numbered before value columns. Add checks in two steps. // Note that the codecs are accessed, to match encoding order. var baseCodecs = step == 0 ? mRowGen.keyCodecs() : mRowGen.valueCodecs(); for (ColumnCodec codec : baseCodecs) { ColumnInfo info = codec.mInfo; if (columns.containsKey(info.name)) { mask |= RowGen.stateFieldMask(num); } if (isMaskReady(++num, mask)) { // Convert all states of value 0b01 (clean) into value 0b11 (dirty). All // other states stay the same. var state = stateField(mm.param(0), num - 1).get(); state = state.or(state.and(0x5555_5555).shl(1)); // Flip all column state bits. If final result is non-zero, then some // columns were unset. state = state.xor(mask); mask = maskRemainder(num, mask); if (mask != 0xffff_ffff) { state = state.and(mask); } Label cont = mm.label(); state.ifEq(0, cont); mm.return_(false); cont.here(); mask = 0; } } } mm.return_(true); } /** * Defines a static method which accepts a row and returns boolean. When it returns true, * all of the given columns are dirty. * * @param name method name */ private void addCheckAllDirty(String name, Map<String, ColumnInfo> columns) { MethodMaker mm = mClassMaker.addMethod(boolean.class, name, mRowClass).static_(); int num = 0, mask = 0; for (int step = 0; step < 2; step++) { // Key columns are numbered before value columns. Add checks in two steps. // Note that the codecs are accessed, to match encoding order. var baseCodecs = step == 0 ? mRowGen.keyCodecs() : mRowGen.valueCodecs(); for (ColumnCodec codec : baseCodecs) { ColumnInfo info = codec.mInfo; if (columns.containsKey(info.name)) { mask |= RowGen.stateFieldMask(num); } if (isMaskReady(++num, mask)) { Label cont = mm.label(); stateField(mm.param(0), num - 1).and(mask).ifEq(mask, cont); mm.return_(false); cont.here(); mask = 0; } } } mm.return_(true); } /** * Defines a static method which accepts a row and returns boolean. When it returns true, * at least one of the given columns are dirty. * * @param name method name */ private void addCheckAnyDirty(String name, Map<String, ColumnInfo> columns) { MethodMaker mm = mClassMaker.addMethod(boolean.class, name, mRowClass).static_(); int num = 0, mask = 0; for (int step = 0; step < 2; step++) { // Key columns are numbered before value columns. Add checks in two steps. // Note that the codecs are accessed, to match encoding order. var baseCodecs = step == 0 ? mRowGen.keyCodecs() : mRowGen.valueCodecs(); for (ColumnCodec codec : baseCodecs) { ColumnInfo info = codec.mInfo; if (columns.containsKey(info.name)) { mask |= RowGen.stateFieldMask(num, 0b10); } if (isMaskReady(++num, mask)) { Label cont = mm.label(); stateField(mm.param(0), num - 1).and(mask).ifEq(0, cont); mm.return_(true); cont.here(); mask = 0; } } } mm.return_(false); } /** * Called when building state field masks for columns, when iterating them in order. * * @param num column number pre-incremented to the next one * @param mask current group; must be non-zero to have any effect */ private boolean isMaskReady(int num, int mask) { return mask != 0 && ((num & 0b1111) == 0 || num >= mRowInfo.allColumns.size()); } /** * When building a mask for the highest state field, sets the high unused bits on the * mask. This can eliminate an unnecessary 'and' operation. * * @param num column number pre-incremented to the next one * @param mask current group * @return updated mask */ private int maskRemainder(int num, int mask) { if (num >= mRowInfo.allColumns.size()) { int shift = (num & 0b1111) << 1; if (shift != 0) { mask |= 0xffff_ffff << shift; } } return mask; } /** * Defines a static method which accepts a row and always throws a detailed exception * describing the required columns which aren't set. A check method should have been * invoked first. * * @param name method name */ private void addRequireSet(String name, Map<String, ColumnInfo> columns) { MethodMaker mm = mClassMaker.addMethod(null, name, mRowClass).static_(); String initMessage = "Some required columns are unset"; if (columns.isEmpty()) { mm.new_(IllegalStateException.class, initMessage).throw_(); return; } int initLength = initMessage.length() + 2; var bob = mm.new_(StringBuilder.class, initLength << 1) .invoke("append", initMessage).invoke("append", ": "); boolean first = true; for (ColumnInfo info : columns.values()) { int num = mRowGen.columnNumbers().get(info.name); Label isSet = mm.label(); stateField(mm.param(0), num).and(RowGen.stateFieldMask(num)).ifNe(0, isSet); if (!first) { Label sep = mm.label(); bob.invoke("length").ifEq(initLength, sep); bob.invoke("append", ", "); sep.here(); } bob.invoke("append", info.name); isSet.here(); first = false; } mm.new_(IllegalStateException.class, bob.invoke("toString")).throw_(); } /** * @return null if no field is defined for the column (probably SchemaVersionColumnCodec) */ private static Field findField(Variable row, ColumnCodec codec) { ColumnInfo info = codec.mInfo; return info == null ? null : row.field(info.name); } /** * Defines a static method which returns a new composite byte[] key or value. Caller must * check that the columns are set. * * @param name method name */ private void addEncodeColumnsMethod(String name, ColumnCodec[] codecs) { MethodMaker mm = mClassMaker.addMethod(byte[].class, name, mRowClass).static_(); addEncodeColumns(mm, ColumnCodec.bind(codecs, mm)); } /** * @param mm param(0): Row object, return: byte[] * @param codecs must be bound to the MethodMaker */ private static void addEncodeColumns(MethodMaker mm, ColumnCodec[] codecs) { if (codecs.length == 0) { mm.return_(mm.var(RowUtils.class).field("EMPTY_BYTES")); return; } // Determine the minimum byte array size and prepare the encoders. int minSize = 0; for (ColumnCodec codec : codecs) { minSize += codec.minSize(); codec.encodePrepare(); } // Generate code which determines the additional runtime length. Variable totalVar = null; for (ColumnCodec codec : codecs) { Field srcVar = findField(mm.param(0), codec); totalVar = codec.encodeSize(srcVar, totalVar); } // Generate code which allocates the destination byte array. Variable dstVar; if (totalVar == null) { dstVar = mm.new_(byte[].class, minSize); } else { if (minSize != 0) { totalVar = totalVar.add(minSize); } dstVar = mm.new_(byte[].class, totalVar); } // Generate code which fills in the byte array. var offsetVar = mm.var(int.class).set(0); for (ColumnCodec codec : codecs) { codec.encode(findField(mm.param(0), codec), dstVar, offsetVar); } mm.return_(dstVar); } /** * Method isn't implemented until needed, delaying acquisition/creation of the current * schema version. This allows replicas to decode existing rows even when the class * definition has changed, but encoding will still fail. */ private void addDynamicEncodeValueColumns() { MethodMaker mm = mClassMaker.addMethod(byte[].class, "encodeValue", mRowClass).static_(); var indy = mm.var(TableMaker.class).indy ("indyEncodeValueColumns", mStore.ref(), mRowType, mIndexId); mm.return_(indy.invoke(byte[].class, "encodeValue", null, mm.param(0))); } public static CallSite indyEncodeValueColumns (MethodHandles.Lookup lookup, String name, MethodType mt, WeakReference<RowStore> storeRef, Class<?> rowType, long indexId) { return doIndyEncode (lookup, name, mt, storeRef, rowType, indexId, (mm, info, schemaVersion) -> { ColumnCodec[] codecs = info.rowGen().valueCodecs(); addEncodeColumns(mm, ColumnCodec.bind(schemaVersion, codecs, mm)); }); } @FunctionalInterface static interface EncodeFinisher { void finish(MethodMaker mm, RowInfo info, int schemaVersion); } /** * Does the work to obtain the current schema version, handling any exceptions. The given * finisher completes the definition of the encode method when no exception was thrown when * trying to obtain the schema version. If an exception was thrown, the finisher might be * called at a later time. */ private static CallSite doIndyEncode(MethodHandles.Lookup lookup, String name, MethodType mt, WeakReference<RowStore> storeRef, Class<?> rowType, long indexId, EncodeFinisher finisher) { return ExceptionCallSite.make(() -> { MethodMaker mm = MethodMaker.begin(lookup, name, mt); RowStore store = storeRef.get(); if (store == null) { mm.new_(DatabaseException.class, "Closed").throw_(); } else { RowInfo info = RowInfo.find(rowType); int schemaVersion; try { schemaVersion = store.schemaVersion(info, false, indexId, true); } catch (Exception e) { return new ExceptionCallSite.Failed(mt, mm, e); } finisher.finish(mm, info, schemaVersion); } return mm.finish(); }); } /** * Defines a static method which decodes columns from a composite byte[] parameter. * * @param name method name */ private void addDecodeColumnsMethod(String name, ColumnCodec[] codecs) { MethodMaker mm = mClassMaker.addMethod(null, name, mRowClass, byte[].class) .static_().public_(); addDecodeColumns(mm, mRowInfo, codecs, 0); } /** * @param mm param(0): Row object, param(1): byte[], return: void * @param fixedOffset must be after the schema version (when applicable) */ private static void addDecodeColumns(MethodMaker mm, RowInfo dstRowInfo, ColumnCodec[] srcCodecs, int fixedOffset) { srcCodecs = ColumnCodec.bind(srcCodecs, mm); Variable srcVar = mm.param(1); Variable offsetVar = mm.var(int.class).set(fixedOffset); for (ColumnCodec srcCodec : srcCodecs) { String name = srcCodec.mInfo.name; ColumnInfo dstInfo = dstRowInfo.allColumns.get(name); if (dstInfo == null) { srcCodec.decodeSkip(srcVar, offsetVar, null); } else { var rowVar = mm.param(0); Field dstVar = rowVar.field(name); Converter.decode(mm, srcVar, offsetVar, null, srcCodec, dstInfo, dstVar); } } } private void addDynamicDecodeValueColumns() { // First define a method which generates the SwitchCallSite. { MethodMaker mm = mClassMaker.addMethod (SwitchCallSite.class, "decodeValueSwitchCallSite").static_(); var condy = mm.var(TableMaker.class).condy ("condyDecodeValueColumns", mStore.ref(), mRowType, mRowClass, mIndexId); mm.return_(condy.invoke(SwitchCallSite.class, "_")); } // Also define a method to obtain a MethodHandle which decodes for a given schema // version. This must be defined here to ensure that the correct lookup is used. It // must always refer to this table class. { MethodMaker mm = mClassMaker.addMethod (MethodHandle.class, "decodeValueHandle", int.class).static_(); var lookup = mm.var(MethodHandles.class).invoke("lookup"); var mh = mm.invoke("decodeValueSwitchCallSite").invoke("getCase", lookup, mm.param(0)); mm.return_(mh); } MethodMaker mm = mClassMaker.addMethod (null, "decodeValue", mRowClass, byte[].class).static_().public_(); var data = mm.param(1); var schemaVersion = mm.var(RowUtils.class).invoke("decodeSchemaVersion", data); var indy = mm.var(TableMaker.class).indy("indyDecodeValueColumns"); indy.invoke(null, "decodeValue", null, schemaVersion, mm.param(0), data); } /** * Returns a SwitchCallSite instance suitable for decoding all value columns. By defining * it via a "condy" method, the SwitchCallSite instance can be shared by other methods. In * particular, filter subclasses are generated against specific schema versions, and so * they need direct access to just one of the cases. This avoids a redundant version check. * * MethodType is: void (int schemaVersion, RowClass row, byte[] data) */ public static SwitchCallSite condyDecodeValueColumns (MethodHandles.Lookup lookup, String name, Class<?> type, WeakReference<RowStore> storeRef, Class<?> rowType, Class<?> rowClass, long indexId) { MethodType mt = MethodType.methodType(void.class, int.class, rowClass, byte[].class); return new SwitchCallSite(lookup, mt, schemaVersion -> { MethodMaker mm = MethodMaker.begin(lookup, null, "case", rowClass, byte[].class); RowStore store = storeRef.get(); if (store == null) { mm.new_(DatabaseException.class, "Closed").throw_(); } else { RowInfo dstRowInfo = RowInfo.find(rowType); if (schemaVersion == 0) { // No columns to decode, so assign defaults. for (Map.Entry<String, ColumnInfo> e : dstRowInfo.valueColumns.entrySet()) { Converter.setDefault(mm, e.getValue(), mm.param(0).field(e.getKey())); } } else { RowInfo srcRowInfo; try { srcRowInfo = store.rowInfo(rowType, indexId, schemaVersion); } catch (Exception e) { return new ExceptionCallSite.Failed (MethodType.methodType(void.class, rowClass, byte[].class), mm, e); } ColumnCodec[] srcCodecs = srcRowInfo.rowGen().valueCodecs(); int fixedOffset = schemaVersion < 128 ? 1 : 4; addDecodeColumns(mm, dstRowInfo, srcCodecs, fixedOffset); if (dstRowInfo != srcRowInfo) { // Assign defaults for any missing columns. for (Map.Entry<String, ColumnInfo> e : dstRowInfo.valueColumns.entrySet()) { String fieldName = e.getKey(); if (!srcRowInfo.valueColumns.containsKey(fieldName)) { Converter.setDefault (mm, e.getValue(), mm.param(0).field(fieldName)); } } } } } return mm.finish(); }); } /** * This just returns the SwitchCallSite generated by condyDecodeValueColumns. */ public static SwitchCallSite indyDecodeValueColumns(MethodHandles.Lookup lookup, String name, MethodType mt) throws Throwable { MethodHandle mh = lookup.findStatic(lookup.lookupClass(), "decodeValueSwitchCallSite", MethodType.methodType(SwitchCallSite.class)); return (SwitchCallSite) mh.invokeExact(); } private void addDecodePartialHandle() { MethodMaker mm = mClassMaker.addMethod (MethodHandle.class, "makeDecodePartialHandle", byte[].class, int.class).protected_(); var spec = mm.param(0); var lookup = mm.var(MethodHandles.class).invoke("lookup"); Variable decoder; if (isPrimaryTable()) { var schemaVersion = mm.param(1); var storeRef = mm.invoke("rowStoreRef"); decoder = mm.var(DecodePartialMaker.class).invoke ("makeDecoder", lookup, storeRef, mRowType, mRowClass, mm.class_(), mIndexId, spec, schemaVersion); } else { var secondaryDescVar = mm.var(byte[].class).setExact(mSecondaryDescriptor); decoder = mm.var(DecodePartialMaker.class).invoke ("makeDecoder", lookup, mRowType, mRowClass, mm.class_(), secondaryDescVar, spec); } mm.return_(decoder); } /** * @param variant "load", "exists", or "delete" */ private void addByKeyMethod(String variant) { MethodMaker mm = mClassMaker.addMethod (boolean.class, variant, Transaction.class, Object.class).public_(); Variable txnVar = mm.param(0); Variable rowVar = mm.param(1).cast(mRowClass); Label ready = mm.label(); mm.invoke("checkPrimaryKeySet", rowVar).ifTrue(ready); mm.new_(IllegalStateException.class, "Primary key isn't fully specified").throw_(); ready.here(); var keyVar = mm.invoke("encodePrimaryKey", rowVar); final var source = mm.field("mSource"); final Variable valueVar; if (variant != "delete" || !supportsTriggers()) { valueVar = source.invoke(variant, txnVar, keyVar); } else { var triggerVar = mm.var(Trigger.class); Label skipLabel = mm.label(); prepareForTrigger(mm, mm.this_(), triggerVar, skipLabel); Label triggerStart = mm.label().here(); // Trigger requires a non-null transaction. txnVar.set(mm.var(ViewUtils.class).invoke("enterScope", source, txnVar)); Label txnStart = mm.label().here(); var cursorVar = source.invoke("newCursor", txnVar); Label cursorStart = mm.label().here(); cursorVar.invoke("find", keyVar); var oldValueVar = cursorVar.invoke("value"); Label commit = mm.label(); oldValueVar.ifEq(null, commit); triggerVar.invoke("delete", txnVar, rowVar, keyVar, oldValueVar); commit.here(); cursorVar.invoke("commit", (Object) null); mm.return_(oldValueVar.ne(null)); mm.finally_(cursorStart, () -> cursorVar.invoke("reset")); mm.finally_(txnStart, () -> txnVar.invoke("exit")); skipLabel.here(); assert variant == "delete"; valueVar = source.invoke(variant, txnVar, keyVar); mm.finally_(triggerStart, () -> triggerVar.invoke("releaseShared")); } if (variant != "load") { mm.return_(valueVar); } else { Label notNull = mm.label(); valueVar.ifNe(null, notNull); markValuesUnset(rowVar); mm.return_(false); notNull.here(); mm.invoke("decodeValue", rowVar, valueVar); markAllClean(rowVar); mm.return_(true); } } /** * @param variant "store", "exchange", "insert", or "replace" */ private void addStoreMethod(String variant, Class returnType) { MethodMaker mm = mClassMaker.addMethod (returnType, variant, Transaction.class, Object.class).public_(); Variable txnVar = mm.param(0); Variable rowVar = mm.param(1).cast(mRowClass); Label ready = mm.label(); mm.invoke("checkAllSet", rowVar).ifTrue(ready); if (variant != "replace" && mAutoColumn != null) { Label notReady = mm.label(); mm.invoke("checkAllButAutoSet", rowVar).ifFalse(notReady); mm.invoke("storeAuto", txnVar, rowVar); if (variant == "exchange") { mm.return_(null); } else if (variant == "insert") { mm.return_(true); } else { mm.return_(); } notReady.here(); } mm.invoke("requireAllSet", rowVar); ready.here(); var keyVar = mm.invoke("encodePrimaryKey", rowVar); var valueVar = mm.invoke("encodeValue", rowVar); Variable resultVar = null; if (!supportsTriggers()) { resultVar = storeNoTrigger(mm, variant, txnVar, rowVar, keyVar, valueVar); } else { Label cont = mm.label(); var triggerVar = mm.var(Trigger.class); Label skipLabel = mm.label(); prepareForTrigger(mm, mm.this_(), triggerVar, skipLabel); Label triggerStart = mm.label().here(); final var source = mm.field("mSource").get(); // Trigger requires a non-null transaction. txnVar.set(mm.var(ViewUtils.class).invoke("enterScope", source, txnVar)); Label txnStart = mm.label().here(); mm.invoke("redoPredicateMode", txnVar); // Always use a cursor to acquire the upgradable row lock before updating // secondaries. This prevents deadlocks with a concurrent index scan which joins // against the row. The row lock is acquired exclusively after all secondaries have // been updated. At that point, shared lock acquisition against the row is blocked. var cursorVar = source.invoke("newCursor", txnVar); Label cursorStart = mm.label().here(); if (variant == "replace") { cursorVar.invoke("find", keyVar); var oldValueVar = cursorVar.invoke("value"); Label passed = mm.label(); oldValueVar.ifNe(null, passed); mm.return_(false); passed.here(); triggerVar.invoke("store", txnVar, rowVar, keyVar, oldValueVar, valueVar); cursorVar.invoke("commit", valueVar); markAllClean(rowVar); mm.return_(true); } else { Variable closerVar; Label opStart; if (!mSupportsIndexLock) { closerVar = null; opStart = null; } else { closerVar = mm.field("mIndexLock").invoke("openAcquire", txnVar, rowVar); opStart = mm.label().here(); } if (variant == "insert") { cursorVar.invoke("autoload", false); cursorVar.invoke("find", keyVar); if (closerVar != null) { mm.finally_(opStart, () -> closerVar.invoke("close")); } Label passed = mm.label(); cursorVar.invoke("value").ifEq(null, passed); mm.return_(false); passed.here(); triggerVar.invoke("insert", txnVar, rowVar, keyVar, valueVar); cursorVar.invoke("commit", valueVar); markAllClean(rowVar); mm.return_(true); } else { cursorVar.invoke("find", keyVar); if (closerVar != null) { mm.finally_(opStart, () -> closerVar.invoke("close")); } var oldValueVar = cursorVar.invoke("value"); Label wasNull = mm.label(); oldValueVar.ifEq(null, wasNull); triggerVar.invoke("store", txnVar, rowVar, keyVar, oldValueVar, valueVar); Label commit = mm.label().goto_(); wasNull.here(); triggerVar.invoke("insert", txnVar, rowVar, keyVar, valueVar); commit.here(); cursorVar.invoke("commit", valueVar); if (variant == "store") { markAllClean(rowVar); mm.return_(); } else { resultVar = oldValueVar; mm.goto_(cont); } } } mm.finally_(cursorStart, () -> cursorVar.invoke("reset")); mm.finally_(txnStart, () -> txnVar.invoke("exit")); skipLabel.here(); Variable storeResultVar = storeNoTrigger(mm, variant, txnVar, rowVar, keyVar, valueVar); if (resultVar == null) { resultVar = storeResultVar; } else { resultVar.set(storeResultVar); } mm.finally_(triggerStart, () -> triggerVar.invoke("releaseShared")); cont.here(); } if (returnType == null) { // This case is expected only for the "store" variant. markAllClean(rowVar); return; } if (variant != "exchange") { // This case is expected for the "insert" and "replace" variants. Label failed = mm.label(); resultVar.ifFalse(failed); markAllClean(rowVar); failed.here(); mm.return_(resultVar); return; } // The rest is for implementing the "exchange" variant. markAllClean(rowVar); Label found = mm.label(); resultVar.ifNe(null, found); mm.return_(null); found.here(); var copyVar = mm.new_(mRowClass); copyFields(mm, rowVar, copyVar, mCodecGen.info.keyColumns.values()); mm.invoke("decodeValue", copyVar, resultVar); markAllClean(copyVar); mm.return_(copyVar); // Now implement the exchange bridge method. mm = mClassMaker.addMethod (Object.class, variant, Transaction.class, Object.class).public_().bridge(); mm.return_(mm.this_().invoke(returnType, variant, null, mm.param(0), mm.param(1))); } /** * @param variant "store", "exchange", "insert", or "replace" */ private Variable storeNoTrigger(MethodMaker mm, String variant, Variable txnVar, Variable rowVar, Variable keyVar, Variable valueVar) { if (variant == "replace" || !mSupportsIndexLock) { return mm.field("mSource").invoke(variant, txnVar, keyVar, valueVar); } else { // Call protected method inherited from AbstractTable. return mm.invoke(variant, txnVar, rowVar, keyVar, valueVar); } } private void addStoreAutoMethod() { MethodMaker mm = mClassMaker.addMethod(null, "storeAuto", Transaction.class, mRowClass); Variable txnVar = mm.param(0); Variable rowVar = mm.param(1); var keyVar = mm.invoke("encodePrimaryKey", rowVar); var valueVar = mm.invoke("encodeValue", rowVar); // Call enterScopex because bogus transaction doesn't work with AutomaticKeyGenerator. txnVar.set(mm.var(ViewUtils.class).invoke("enterScopex", mm.field("mSource"), txnVar)); Label txnStart = mm.label().here(); mm.invoke("redoPredicateMode", txnVar); if (!supportsTriggers()) { mm.field("autogen").invoke("store", txnVar, rowVar, keyVar, valueVar); txnVar.invoke("commit"); markAllClean(rowVar); mm.return_(); } else { var triggerVar = mm.var(Trigger.class); Label skipLabel = mm.label(); prepareForTrigger(mm, mm.this_(), triggerVar, skipLabel); Label triggerStart = mm.label().here(); mm.field("autogen").invoke("store", txnVar, rowVar, keyVar, valueVar); triggerVar.invoke("insert", txnVar, rowVar, keyVar, valueVar); Label commitLabel = mm.label().goto_(); skipLabel.here(); mm.field("autogen").invoke("store", txnVar, rowVar, keyVar, valueVar); commitLabel.here(); txnVar.invoke("commit"); markAllClean(rowVar); mm.return_(); mm.finally_(triggerStart, () -> triggerVar.invoke("releaseShared")); } mm.finally_(txnStart, () -> txnVar.invoke("exit")); } private static void copyFields(MethodMaker mm, Variable src, Variable dst, Collection<ColumnInfo> infos) { for (ColumnInfo info : infos) { Variable srcField = src.field(info.name); if (info.isArray()) { srcField = srcField.get(); Label isNull = null; if (info.isNullable()) { isNull = mm.label(); srcField.ifEq(null, isNull); } srcField.set(srcField.invoke("clone").cast(info.type)); if (isNull != null) { isNull.here(); } } dst.field(info.name).set(srcField); } } /** * Adds a method which does most of the work for the update and merge methods. The * transaction parameter must not be null, which is committed when changes are made. * * boolean doUpdate(Transaction txn, ActualRow row, boolean merge); */ private void addDoUpdateMethod() { MethodMaker mm = mClassMaker.addMethod (boolean.class, "doUpdate", Transaction.class, mRowClass, boolean.class); Variable txnVar = mm.param(0); Variable rowVar = mm.param(1); Variable mergeVar = mm.param(2); Label ready = mm.label(); mm.invoke("checkPrimaryKeySet", rowVar).ifTrue(ready); mm.new_(IllegalStateException.class, "Primary key isn't fully specified").throw_(); ready.here(); final var keyVar = mm.invoke("encodePrimaryKey", rowVar); final var source = mm.field("mSource"); final var cursorVar = source.invoke("newCursor", txnVar); Label cursorStart = mm.label().here(); // If all value columns are dirty, replace the whole row and commit. { Label cont; if (mCodecGen.info.valueColumns.isEmpty()) { // If the checkValueAllDirty method was defined, it would always return true. cont = null; } else { cont = mm.label(); mm.invoke("checkValueAllDirty", rowVar).ifFalse(cont); } final Variable triggerVar; final Label triggerStart; if (!supportsTriggers()) { triggerVar = null; triggerStart = null; } else { triggerVar = mm.var(Trigger.class); Label skipLabel = mm.label(); prepareForTrigger(mm, mm.this_(), triggerVar, skipLabel); triggerStart = mm.label().here(); cursorVar.invoke("find", keyVar); var oldValueVar = cursorVar.invoke("value"); Label replace = mm.label(); oldValueVar.ifNe(null, replace); mm.return_(false); replace.here(); var valueVar = mm.invoke("encodeValue", rowVar); triggerVar.invoke("store", txnVar, rowVar, keyVar, oldValueVar, valueVar); cursorVar.invoke("commit", valueVar); markAllClean(rowVar); mm.return_(true); skipLabel.here(); } cursorVar.invoke("autoload", false); cursorVar.invoke("find", keyVar); Label replace = mm.label(); cursorVar.invoke("value").ifNe(null, replace); mm.return_(false); replace.here(); cursorVar.invoke("commit", mm.invoke("encodeValue", rowVar)); if (triggerStart != null) { mm.finally_(triggerStart, () -> triggerVar.invoke("releaseShared")); } markAllClean(rowVar); mm.return_(true); if (cont == null) { return; } cont.here(); } cursorVar.invoke("find", keyVar); Label hasValue = mm.label(); cursorVar.invoke("value").ifNe(null, hasValue); mm.return_(false); hasValue.here(); // The bulk of the method isn't implemented until needed, delaying acquisition/creation // of the current schema version. var indy = mm.var(TableMaker.class).indy ("indyDoUpdate", mStore.ref(), mRowType, mIndexId, supportsTriggers() ? 1 : 0); indy.invoke(null, "doUpdate", null, mm.this_(), rowVar, mergeVar, cursorVar); mm.return_(true); mm.finally_(cursorStart, () -> cursorVar.invoke("reset")); } /** * @param triggers 0 for false, 1 for true */ public static CallSite indyDoUpdate(MethodHandles.Lookup lookup, String name, MethodType mt, WeakReference<RowStore> storeRef, Class<?> rowType, long indexId, int triggers) { return doIndyEncode (lookup, name, mt, storeRef, rowType, indexId, (mm, info, schemaVersion) -> { finishIndyDoUpdate(mm, info, schemaVersion, triggers); }); } /** * @param triggers 0 for false, 1 for true */ private static void finishIndyDoUpdate(MethodMaker mm, RowInfo rowInfo, int schemaVersion, int triggers) { // All these variables were provided by the indy call in addDoUpdateMethod. Variable tableVar = mm.param(0); Variable rowVar = mm.param(1); Variable mergeVar = mm.param(2); Variable cursorVar = mm.param(3); Variable valueVar = cursorVar.invoke("value"); var ue = encodeUpdateEntry(mm, rowInfo, schemaVersion, tableVar, rowVar, valueVar); Variable newValueVar = ue.newEntryVar; Variable[] offsetVars = ue.offsetVars; if (triggers == 0) { cursorVar.invoke("commit", newValueVar); } Label doMerge = mm.label(); mergeVar.ifTrue(doMerge); if (triggers != 0) { var triggerVar = mm.var(Trigger.class); Label skipLabel = mm.label(); prepareForTrigger(mm, tableVar, triggerVar, skipLabel); Label triggerStart = mm.label().here(); var txnVar = cursorVar.invoke("link"); var keyVar = cursorVar.invoke("key"); triggerVar.invoke("update", txnVar, rowVar, keyVar, valueVar, newValueVar); cursorVar.invoke("commit", newValueVar); Label cont = mm.label().goto_(); skipLabel.here(); cursorVar.invoke("commit", newValueVar); mm.finally_(triggerStart, () -> triggerVar.invoke("releaseShared")); cont.here(); } markAllUndirty(rowVar, rowInfo); mm.return_(); doMerge.here(); // Decode all the original column values that weren't updated into the row. RowGen rowGen = rowInfo.rowGen(); Map<String, Integer> columnNumbers = rowGen.columnNumbers(); ColumnCodec[] codecs = ColumnCodec.bind(rowGen.valueCodecs(), mm); String stateFieldName = null; Variable stateField = null; for (int i=0; i<codecs.length; i++) { ColumnCodec codec = codecs[i]; ColumnInfo info = codec.mInfo; int num = columnNumbers.get(info.name); String sfName = rowGen.stateField(num); if (!sfName.equals(stateFieldName)) { stateFieldName = sfName; stateField = rowVar.field(stateFieldName).get(); } int sfMask = RowGen.stateFieldMask(num); Label cont = mm.label(); stateField.and(sfMask).ifEq(sfMask, cont); codec.decode(rowVar.field(info.name), valueVar, offsetVars[i], null); cont.here(); } if (triggers != 0) { var triggerVar = mm.var(Trigger.class); Label skipLabel = mm.label(); prepareForTrigger(mm, tableVar, triggerVar, skipLabel); Label triggerStart = mm.label().here(); var txnVar = cursorVar.invoke("link"); var keyVar = cursorVar.invoke("key"); triggerVar.invoke("store", txnVar, rowVar, keyVar, valueVar, newValueVar); cursorVar.invoke("commit", newValueVar); Label cont = mm.label().goto_(); skipLabel.here(); cursorVar.invoke("commit", newValueVar); mm.finally_(triggerStart, () -> triggerVar.invoke("releaseShared")); cont.here(); } markAllClean(rowVar, rowGen, rowGen); } private static class UpdateEntry { Variable newEntryVar; Variable[] offsetVars; } /** * Makes code which encodes a new entry (a key or value) by comparing dirty row columns to * the original entry. Returns the new entry and the column offsets in the original entry. * * @param schemaVersion pass 0 if entry is a key instead of a value; implies that caller * must handle the case where the value must be empty * @param rowVar non-null * @param tableVar doesn't need to be initialized (is used to invoke a static method) * @param entryVar original non-null encoded key or value */ private static UpdateEntry encodeUpdateEntry (MethodMaker mm, RowInfo rowInfo, int schemaVersion, Variable tableVar, Variable rowVar, Variable entryVar) { RowGen rowGen = rowInfo.rowGen(); ColumnCodec[] codecs; int fixedOffset; if (schemaVersion == 0) { codecs = rowGen.keyCodecs(); fixedOffset = 0; } else { codecs = rowGen.valueCodecs(); Variable decodeVersion = mm.var(RowUtils.class).invoke("decodeSchemaVersion", entryVar); Label sameVersion = mm.label(); decodeVersion.ifEq(schemaVersion, sameVersion); // If different schema versions, decode and re-encode a new entry, and then go to // the next step. The simplest way to perform this conversion is to create a new // temp row object, decode the entry into it, and then create a new entry from it. var tempRowVar = mm.new_(rowVar); tableVar.invoke("decodeValue", tempRowVar, entryVar); entryVar.set(tableVar.invoke("encodeValue", tempRowVar)); sameVersion.here(); fixedOffset = schemaVersion < 128 ? 1 : 4; } // Identify the offsets to all the columns in the original entry, and calculate the // size of the new entry. Map<String, Integer> columnNumbers = rowGen.columnNumbers(); codecs = ColumnCodec.bind(codecs, mm); Variable[] offsetVars = new Variable[codecs.length]; var offsetVar = mm.var(int.class).set(fixedOffset); var newSizeVar = mm.var(int.class).set(fixedOffset); // need room for schemaVersion String stateFieldName = null; Variable stateField = null; for (int i=0; i<codecs.length; i++) { ColumnCodec codec = codecs[i]; codec.encodePrepare(); offsetVars[i] = offsetVar.get(); codec.decodeSkip(entryVar, offsetVar, null); ColumnInfo info = codec.mInfo; int num = columnNumbers.get(info.name); String sfName = rowGen.stateField(num); if (!sfName.equals(stateFieldName)) { stateFieldName = sfName; stateField = rowVar.field(stateFieldName).get(); } int sfMask = RowGen.stateFieldMask(num); Label isDirty = mm.label(); stateField.and(sfMask).ifEq(sfMask, isDirty); // Add in the size of original column, which won't be updated. codec.encodeSkip(); newSizeVar.inc(offsetVar.sub(offsetVars[i])); Label cont = mm.label().goto_(); // Add in the size of the dirty column, which needs to be encoded. isDirty.here(); newSizeVar.inc(codec.minSize()); codec.encodeSize(rowVar.field(info.name), newSizeVar); cont.here(); } // Encode the new byte[] entry... var newEntryVar = mm.new_(byte[].class, newSizeVar); var srcOffsetVar = mm.var(int.class).set(0); var dstOffsetVar = mm.var(int.class).set(0); var spanLengthVar = mm.var(int.class).set(schemaVersion < 128 ? 1 : 4); var sysVar = mm.var(System.class); for (int i=0; i<codecs.length; i++) { ColumnCodec codec = codecs[i]; ColumnInfo info = codec.mInfo; int num = columnNumbers.get(info.name); Variable columnLenVar; { Variable endVar; if (i + 1 < codecs.length) { endVar = offsetVars[i + 1]; } else { endVar = entryVar.alength(); } columnLenVar = endVar.sub(offsetVars[i]); } int sfMask = RowGen.stateFieldMask(num); Label isDirty = mm.label(); stateField.and(sfMask).ifEq(sfMask, isDirty); // Increase the copy span length. Label cont = mm.label(); spanLengthVar.inc(columnLenVar); mm.goto_(cont); isDirty.here(); // Copy the current span and prepare for the next span. { Label noSpan = mm.label(); spanLengthVar.ifEq(0, noSpan); sysVar.invoke("arraycopy", entryVar, srcOffsetVar, newEntryVar, dstOffsetVar, spanLengthVar); srcOffsetVar.inc(spanLengthVar); dstOffsetVar.inc(spanLengthVar); spanLengthVar.set(0); noSpan.here(); } // Encode the dirty column, and skip over the original column value. codec.encode(rowVar.field(info.name), newEntryVar, dstOffsetVar); srcOffsetVar.inc(columnLenVar); cont.here(); } // Copy any remaining span. { Label noSpan = mm.label(); spanLengthVar.ifEq(0, noSpan); sysVar.invoke("arraycopy", entryVar, srcOffsetVar, newEntryVar, dstOffsetVar, spanLengthVar); noSpan.here(); } var ue = new UpdateEntry(); ue.newEntryVar = newEntryVar; ue.offsetVars = offsetVars; return ue; } /** * Delegates to the doUpdate method. */ private void addUpdateMethod(String variant, boolean merge) { MethodMaker mm = mClassMaker.addMethod (boolean.class, variant, Transaction.class, Object.class).public_(); Variable txnVar = mm.param(0); Variable rowVar = mm.param(1).cast(mRowClass); Variable source = mm.field("mSource"); txnVar.set(mm.var(ViewUtils.class).invoke("enterScope", source, txnVar)); Label tryStart = mm.label().here(); mm.invoke("redoPredicateMode", txnVar); mm.return_(mm.invoke("doUpdate", txnVar, rowVar, merge)); mm.finally_(tryStart, () -> txnVar.invoke("exit")); } /** * Makes code which obtains the current trigger and acquires the lock which must be held * for the duration of the operation. The lock must be held even if no trigger must be run. * * @param triggerVar type is Trigger and is assigned by the generated code * @param skipLabel label to branch when trigger shouldn't run */ private static void prepareForTrigger(MethodMaker mm, Variable tableVar, Variable triggerVar, Label skipLabel) { Label acquireTriggerLabel = mm.label().here(); triggerVar.set(tableVar.invoke("trigger")); triggerVar.invoke("acquireShared"); var modeVar = triggerVar.invoke("mode"); modeVar.ifEq(Trigger.SKIP, skipLabel); Label activeLabel = mm.label(); modeVar.ifNe(Trigger.DISABLED, activeLabel); triggerVar.invoke("releaseShared"); mm.goto_(acquireTriggerLabel); activeLabel.here(); } private void markAllClean(Variable rowVar) { markAllClean(rowVar, mRowGen, mCodecGen); } private static void markAllClean(Variable rowVar, RowGen rowGen, RowGen codecGen) { if (rowGen == codecGen) { // isPrimaryTable, so truly mark all clean int mask = 0x5555_5555; int i = 0; String[] stateFields = rowGen.stateFields(); for (; i < stateFields.length - 1; i++) { rowVar.field(stateFields[i]).set(mask); } mask >>>= (32 - ((rowGen.info.allColumns.size() & 0b1111) << 1)); rowVar.field(stateFields[i]).set(mask); } else { // Only mark columns clean that are defined by codecGen. All others are unset. markClean(rowVar, rowGen, codecGen.info.allColumns); } } /** * Mark only the given columns as CLEAN. All others are UNSET. */ private static void markClean(final Variable rowVar, final RowGen rowGen, final Map<String, ColumnInfo> columns) { final int maxNum = rowGen.info.allColumns.size(); int num = 0, mask = 0; for (int step = 0; step < 2; step++) { // Key columns are numbered before value columns. Add checks in two steps. // Note that the codecs are accessed, to match encoding order. var baseCodecs = step == 0 ? rowGen.keyCodecs() : rowGen.valueCodecs(); for (ColumnCodec codec : baseCodecs) { if (columns.containsKey(codec.mInfo.name)) { mask |= RowGen.stateFieldMask(num, 0b01); // clean state } if ((++num & 0b1111) == 0 || num >= maxNum) { rowVar.field(rowGen.stateField(num - 1)).set(mask); mask = 0; } } } } private void addMarkAllCleanMethod() { // Used by filter implementations, and it must be public because filters are defined in // a different package. MethodMaker mm = mClassMaker.addMethod(null, "markAllClean", mRowClass).public_().static_(); markAllClean(mm.param(0)); } /** * Remaining states are UNSET or CLEAN. */ private static void markAllUndirty(Variable rowVar, RowInfo info) { int mask = 0x5555_5555; int i = 0; String[] stateFields = info.rowGen().stateFields(); for (; i < stateFields.length - 1; i++) { var field = rowVar.field(stateFields[i]); field.set(field.and(mask)); } mask >>>= (32 - ((info.allColumns.size() & 0b1111) << 1)); var field = rowVar.field(stateFields[i]); field.set(field.and(mask)); } /** * Mark all the value columns as UNSET without modifying the key column states. */ private void markValuesUnset(Variable rowVar) { if (isPrimaryTable()) { // Clear the value column state fields. Skip the key columns, which are numbered // first. Note that the codecs are accessed, to match encoding order. int num = mRowInfo.keyColumns.size(); int mask = 0; for (ColumnCodec codec : mRowGen.valueCodecs()) { mask |= RowGen.stateFieldMask(num); if (isMaskReady(++num, mask)) { mask = maskRemainder(num, mask); Field field = stateField(rowVar, num - 1); mask = ~mask; if (mask == 0) { field.set(mask); } else { field.set(field.and(mask)); mask = 0; } } } return; } final Map<String, ColumnInfo> keyColumns = mCodecGen.info.keyColumns; final int maxNum = mRowInfo.allColumns.size(); int num = 0, mask = 0; for (int step = 0; step < 2; step++) { // Key columns are numbered before value columns. Add checks in two steps. // Note that the codecs are accessed, to match encoding order. var baseCodecs = step == 0 ? mRowGen.keyCodecs() : mRowGen.valueCodecs(); for (ColumnCodec codec : baseCodecs) { if (!keyColumns.containsKey(codec.mInfo.name)) { mask |= RowGen.stateFieldMask(num); } if ((++num & 0b1111) == 0 || num >= maxNum) { Field field = rowVar.field(mRowGen.stateField(num - 1)); mask = ~mask; if (mask == 0) { field.set(mask); } else { field.set(field.and(mask)); mask = 0; } } } } } private Field stateField(Variable rowVar, int columnNum) { return rowVar.field(mRowGen.stateField(columnNum)); } private void addToRowMethod() { MethodMaker mm = mClassMaker.addMethod(mRowType, "toRow", byte[].class).protected_(); var rowVar = mm.new_(mRowClass); mm.invoke("decodePrimaryKey", rowVar, mm.param(0)); markClean(rowVar, mRowGen, mCodecGen.info.keyColumns); mm.return_(rowVar); mm = mClassMaker.addMethod(Object.class, "toRow", byte[].class).protected_().bridge(); mm.return_(mm.this_().invoke(mRowType, "toRow", null, mm.param(0))); } private void addToKeyMethod() { MethodMaker mm = mClassMaker.addMethod(byte[].class, "toKey", Object.class).protected_(); mm.return_(mm.invoke("encodePrimaryKey", mm.param(0).cast(mRowClass))); } private void addRowStoreRefMethod() { MethodMaker mm = mClassMaker.addMethod(WeakReference.class, "rowStoreRef").protected_(); mm.return_(mm.var(WeakReference.class).setExact(mStore.ref())); } private void addSecondaryDescriptorMethod() { MethodMaker mm = mClassMaker.addMethod(byte[].class, "secondaryDescriptor").protected_(); mm.return_(mm.var(byte[].class).setExact(mSecondaryDescriptor)); } /** * Defines a method which returns a singleton SingleScanController instance. */ private void addUnfilteredMethod() { MethodMaker mm = mClassMaker.addMethod (SingleScanController.class, "unfiltered").protected_(); var condy = mm.var(TableMaker.class).condy ("condyDefineUnfiltered", mRowType, mRowClass, mSecondaryDescriptor); mm.return_(condy.invoke(SingleScanController.class, "unfiltered")); } /** * @param secondaryDesc pass null for primary table */ public static Object condyDefineUnfiltered(MethodHandles.Lookup lookup, String name, Class type, Class rowType, Class rowClass, byte[] secondaryDesc) throws Throwable { RowInfo rowInfo = RowInfo.find(rowType); RowGen rowGen = rowInfo.rowGen(); RowGen codecGen = rowGen; if (secondaryDesc != null) { codecGen = RowStore.indexRowInfo(rowInfo, secondaryDesc).rowGen(); } ClassMaker cm = RowGen.beginClassMaker (TableMaker.class, rowType, rowInfo, null, "Unfiltered") .extend(SingleScanController.class).public_(); // Constructor is protected, for use by filter implementation subclasses. MethodType ctorType; { ctorType = MethodType.methodType (void.class, byte[].class, boolean.class, byte[].class, boolean.class); MethodMaker mm = cm.addConstructor(ctorType).protected_(); mm.invokeSuperConstructor(mm.param(0), mm.param(1), mm.param(2), mm.param(3)); } { // Specified by RowDecoderEncoder. MethodMaker mm = cm.addMethod (Object.class, "decodeRow", Cursor.class, LockResult.class, Object.class).public_(); var tableVar = mm.var(lookup.lookupClass()); var rowVar = mm.param(2).cast(rowClass); Label hasRow = mm.label(); rowVar.ifNe(null, hasRow); rowVar.set(mm.new_(rowClass)); hasRow.here(); var cursorVar = mm.param(0); tableVar.invoke("decodePrimaryKey", rowVar, cursorVar.invoke("key")); tableVar.invoke("decodeValue", rowVar, cursorVar.invoke("value")); markAllClean(rowVar, rowGen, codecGen); mm.return_(rowVar); } { // Specified by RowDecoderEncoder. MethodMaker mm = cm.addMethod(byte[].class, "encodeKey", Object.class).public_(); var rowVar = mm.param(0).cast(rowClass); var tableVar = mm.var(lookup.lookupClass()); Label unchanged = mm.label(); tableVar.invoke("checkPrimaryKeyAnyDirty", rowVar).ifFalse(unchanged); mm.return_(tableVar.invoke("encodePrimaryKey", rowVar)); unchanged.here(); mm.return_(null); } { // Specified by RowDecoderEncoder. MethodMaker mm = cm.addMethod(byte[].class, "encodeValue", Object.class).public_(); var rowVar = mm.param(0).cast(rowClass); var tableVar = mm.var(lookup.lookupClass()); mm.return_(tableVar.invoke("encodeValue", rowVar)); } { // Specified by ScanController. MethodMaker mm = cm.addMethod(QueryPlan.class, "plan").public_(); var condy = mm.var(TableMaker.class).condy("condyPlan", rowType, secondaryDesc, 0); mm.return_(condy.invoke(QueryPlan.class, "plan")); } if (rowGen == codecGen) { // isPrimaryTable, so a schema must be decoded // Used by filter subclasses. The int param is the schema version. MethodMaker mm = cm.addMethod (MethodHandle.class, "decodeValueHandle", int.class).protected_().static_(); var tableVar = mm.var(lookup.lookupClass()); mm.return_(tableVar.invoke("decodeValueHandle", mm.param(0))); } var clazz = cm.finish(); return lookup.findConstructor(clazz, ctorType).invoke(null, false, null, false); } public static QueryPlan condyPlan(MethodHandles.Lookup lookup, String name, Class type, Class rowType, byte[] secondaryDesc, int joinOption) { RowInfo primaryRowInfo = RowInfo.find(rowType); RowInfo rowInfo; String which; if (secondaryDesc == null) { rowInfo = primaryRowInfo; which = "primary key"; } else { rowInfo = RowStore.indexRowInfo(primaryRowInfo, secondaryDesc); which = rowInfo.isAltKey() ? "alternate key" : "secondary index"; } QueryPlan plan = new QueryPlan.FullScan(rowInfo.name, which, rowInfo.keySpec(), false); if (joinOption != 0) { rowInfo = primaryRowInfo; plan = new QueryPlan.NaturalJoin(rowInfo.name, "primary key", rowInfo.keySpec(), plan); } return plan; } /** * Define a static method which encodes a primary key when given an encoded secondary key. * * @param hasRow true to pass a row with a fully specified key instead of an encoded key * @param define true to actually define, false to delegate to it */ private void addToPrimaryKeyMethod(ClassMaker cm, boolean hasRow, boolean define) { RowInfo info = mCodecGen.info; Object[] params; if (info.isAltKey()) { // Needs the secondary key and value. params = new Object[] {byte[].class, byte[].class}; } else { // Only needs the secondary key. params = new Object[] {byte[].class}; } if (hasRow) { params[0] = mRowClass; } MethodMaker mm = cm.addMethod(byte[].class, "toPrimaryKey", params).static_(); Variable pkVar; if (define) { pkVar = IndexTriggerMaker.makeToPrimaryKey(mm, mRowType, mRowClass, mRowInfo, info); } else { mm.protected_(); var tableVar = mm.var(mClassMaker); if (params.length == 2) { pkVar = tableVar.invoke("toPrimaryKey", mm.param(0), mm.param(1)); } else { pkVar = tableVar.invoke("toPrimaryKey", mm.param(0)); } } mm.return_(pkVar); } /** * Returns a subclass of JoinedScanController with the same constructor. */ private Class<?> makeUnfilteredJoinedScanControllerClass(Class<?> primaryTableClass) { ClassMaker cm = RowGen.beginClassMaker (TableMaker.class, mRowType, mRowInfo, null, "Unfiltered") .extend(JoinedScanController.class).public_(); // Constructor is protected, for use by filter implementation subclasses. { MethodMaker mm = cm.addConstructor (byte[].class, boolean.class, byte[].class, boolean.class, Index.class); mm.protected_(); mm.invokeSuperConstructor (mm.param(0), mm.param(1), mm.param(2), mm.param(3), mm.param(4)); } // Provide access to the toPrimaryKey method to be accessible by filter implementation // subclasses, which are defined in a different package. addToPrimaryKeyMethod(cm, false, false); // Note regarding the RowDecoderEncoder methods: The decode methods fully resolve rows // by joining to the primary table, and the encode methods return bytes for storing // into the primary table. // Specified by RowDecoderEncoder. addJoinedDecodeRow(cm, primaryTableClass, false); addJoinedDecodeRow(cm, primaryTableClass, true); { // Specified by RowDecoderEncoder. MethodMaker mm = cm.addMethod(byte[].class, "encodeKey", Object.class).public_(); var rowVar = mm.param(0).cast(mRowClass); var tableVar = mm.var(primaryTableClass); mm.return_(tableVar.invoke("encodePrimaryKey", rowVar)); } { // Specified by RowDecoderEncoder. MethodMaker mm = cm.addMethod(byte[].class, "encodeValue", Object.class).public_(); var rowVar = mm.param(0).cast(mRowClass); var tableVar = mm.var(primaryTableClass); mm.return_(tableVar.invoke("encodeValue", rowVar)); } { // Specified by ScanController. MethodMaker mm = cm.addMethod(QueryPlan.class, "plan").public_(); var condy = mm.var(TableMaker.class).condy ("condyPlan", mRowType, mSecondaryDescriptor, 1); mm.return_(condy.invoke(QueryPlan.class, "plan")); } { // Used by filter subclasses when joining to the primary. The int param is the // schema version. MethodMaker mm = cm.addMethod (MethodHandle.class, "decodeValueHandle", int.class).protected_().static_(); var tableVar = mm.var(primaryTableClass); mm.return_(tableVar.invoke("decodeValueHandle", mm.param(0))); } return cm.finish(); } private void addJoinedDecodeRow(ClassMaker cm, Class<?> primaryTableClass, boolean withPrimaryCursor) { Object[] params; if (!withPrimaryCursor) { params = new Object[] {Cursor.class, LockResult.class, Object.class}; } else { params = new Object[] {Cursor.class, LockResult.class, Object.class, Cursor.class}; } MethodMaker mm = cm.addMethod(Object.class, "decodeRow", params).public_(); var cursorVar = mm.param(0); var resultVar = mm.param(1); var keyVar = cursorVar.invoke("key"); Variable primaryKeyVar; { var tableVar = mm.var(mClassMaker); if (mCodecGen.info.isAltKey()) { var valueVar = cursorVar.invoke("value"); primaryKeyVar = tableVar.invoke("toPrimaryKey", keyVar, valueVar); } else { primaryKeyVar = tableVar.invoke("toPrimaryKey", keyVar); } } if (!withPrimaryCursor) { params = new Object[] {cursorVar, resultVar, primaryKeyVar}; } else { params = new Object[] {cursorVar, resultVar, primaryKeyVar, mm.param(3)}; } var primaryValueVar = mm.invoke("join", params); Label hasValue = mm.label(); primaryValueVar.ifNe(null, hasValue); mm.return_(null); hasValue.here(); var rowVar = mm.param(2).cast(mRowClass); Label hasRow = mm.label(); rowVar.ifNe(null, hasRow); rowVar.set(mm.new_(mRowClass)); hasRow.here(); var tableVar = mm.var(primaryTableClass); tableVar.invoke("decodePrimaryKey", rowVar, primaryKeyVar); tableVar.invoke("decodeValue", rowVar, primaryValueVar); tableVar.invoke("markAllClean", rowVar); mm.return_(rowVar); } }
src/main/java/org/cojen/tupl/rows/TableMaker.java
/* * Copyright 2021 Cojen.org * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.cojen.tupl.rows; import java.lang.invoke.CallSite; import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; import java.lang.invoke.MethodType; import java.lang.ref.WeakReference; import java.util.Collection; import java.util.LinkedHashMap; import java.util.Map; import java.util.Objects; import java.util.TreeMap; import org.cojen.maker.ClassMaker; import org.cojen.maker.Field; import org.cojen.maker.Label; import org.cojen.maker.MethodMaker; import org.cojen.maker.Variable; import org.cojen.tupl.Cursor; import org.cojen.tupl.DatabaseException; import org.cojen.tupl.Index; import org.cojen.tupl.LockResult; import org.cojen.tupl.RowUpdater; import org.cojen.tupl.Table; import org.cojen.tupl.Transaction; import org.cojen.tupl.UnmodifiableViewException; import org.cojen.tupl.core.RowPredicateLock; import org.cojen.tupl.diag.QueryPlan; import org.cojen.tupl.filter.ColumnFilter; import org.cojen.tupl.views.ViewUtils; /** * Makes Table classes that extend AbstractTable. * * @author Brian S O'Neill */ public class TableMaker { private final RowStore mStore; private final Class<?> mRowType; private final RowGen mRowGen; private final RowInfo mRowInfo; private final RowGen mCodecGen; private final Class<?> mRowClass; private final byte[] mSecondaryDescriptor; private final long mIndexId; private final boolean mSupportsIndexLock; private final ColumnInfo mAutoColumn; private ClassMaker mClassMaker; /** * Constructor for primary table. * * @param store generated class is pinned to this specific instance * @param rowGen describes row encoding */ TableMaker(RowStore store, Class<?> type, RowGen rowGen, long indexId, boolean supportsIndexLock) { this(store, type, rowGen, rowGen, null, indexId, supportsIndexLock); } /** * Constructor for secondary index view table. * * @param store generated class is pinned to this specific instance * @param rowGen describes row encoding * @param codecGen describes key and value codecs (different than rowGen) * @param secondaryDesc secondary index descriptor */ TableMaker(RowStore store, Class<?> type, RowGen rowGen, RowGen codecGen, byte[] secondaryDesc, long indexId, boolean supportsIndexLock) { mStore = store; mRowType = type; mRowGen = rowGen; mRowInfo = rowGen.info; mCodecGen = codecGen; mRowClass = RowMaker.find(type); mSecondaryDescriptor = secondaryDesc; mIndexId = indexId; mSupportsIndexLock = supportsIndexLock; ColumnInfo auto = null; if (isPrimaryTable()) { for (ColumnInfo column : codecGen.info.keyColumns.values()) { if (column.isAutomatic()) { auto = column; break; } } } mAutoColumn = auto; } /** * Return a constructor which accepts a (TableManager, Index, RowPredicateLock) and returns * an AbstractTable implementation. */ MethodHandle finish() { { String suffix; Class baseClass; if (isPrimaryTable()) { suffix = "Table"; baseClass = AbstractTable.class; } else { suffix = "Unjoined"; baseClass = AbstractTableView.class; } mClassMaker = mCodecGen.beginClassMaker(getClass(), mRowType, suffix).public_() .extend(baseClass).implement(TableBasicsMaker.find(mRowType)); } MethodType mt = MethodType.methodType (void.class, TableManager.class, Index.class, RowPredicateLock.class); MethodMaker ctor = mClassMaker.addConstructor(mt); ctor.invokeSuperConstructor(ctor.param(0), ctor.param(1), ctor.param(2)); // Add encode/decode methods. { ColumnCodec[] keyCodecs = mCodecGen.keyCodecs(); addEncodeColumnsMethod("encodePrimaryKey", keyCodecs); addDecodeColumnsMethod("decodePrimaryKey", keyCodecs); if (isPrimaryTable()) { addDynamicEncodeValueColumns(); addDynamicDecodeValueColumns(); } else { // The encodeValue method is only used for storing rows into the table. By // making it always fail, there's no backdoor to permit modifications. mClassMaker.addMethod(byte[].class, "encodeValue", mRowClass) .static_().new_(UnmodifiableViewException.class).throw_(); addDecodeColumnsMethod("decodeValue", mCodecGen.valueCodecs()); } addDecodePartialHandle(); } // Add code to support an automatic column (if defined). if (mAutoColumn != null) { Class autoGenClass, autoGenApplierClass; Object minVal, maxVal; if (mAutoColumn.type == int.class) { if (mAutoColumn.isUnsigned()) { autoGenClass = AutomaticKeyGenerator.OfUInt.class; } else { autoGenClass = AutomaticKeyGenerator.OfInt.class; } autoGenApplierClass = AutomaticKeyGenerator.OfInt.Applier.class; minVal = (int) Math.max(mAutoColumn.autoMin, Integer.MIN_VALUE); maxVal = (int) Math.min(mAutoColumn.autoMax, Integer.MAX_VALUE); } else { if (mAutoColumn.isUnsigned()) { autoGenClass = AutomaticKeyGenerator.OfULong.class; } else { autoGenClass = AutomaticKeyGenerator.OfLong.class; } autoGenApplierClass = AutomaticKeyGenerator.OfLong.Applier.class; minVal = mAutoColumn.autoMin; maxVal = mAutoColumn.autoMax; } mClassMaker.implement(autoGenApplierClass); mClassMaker.addField(autoGenClass, "autogen").private_().final_(); ctor.field("autogen").set (ctor.new_(autoGenClass, ctor.param(1), minVal, maxVal, ctor.this_())); MethodMaker mm = mClassMaker.addMethod (RowPredicateLock.Closer.class, "applyToRow", Transaction.class, Object.class, mAutoColumn.type); mm.public_(); var rowVar = mm.param(1).cast(mRowClass); rowVar.field(mAutoColumn.name).set(mm.param(2)); if (!mSupportsIndexLock) { mm.return_(mm.var(RowPredicateLock.NonCloser.class).field("THE")); } else { mm.return_(mm.field("mIndexLock").invoke("tryOpenAcquire", mm.param(0), rowVar)); } var allButAuto = new TreeMap<>(mCodecGen.info.allColumns); allButAuto.remove(mAutoColumn.name); addCheckSet("checkAllButAutoSet", allButAuto); addStoreAutoMethod(); } // Add private methods which check that required columns are set. { addCheckSet("checkPrimaryKeySet", mCodecGen.info.keyColumns); //addCheckSet("checkValue", mCodecGen.info.valueColumns); if (isPrimaryTable()) { addCheckSet("checkAllSet", mCodecGen.info.allColumns); addRequireSet("requireAllSet", mCodecGen.info.allColumns); } int i = 0; for (ColumnSet altKey : mCodecGen.info.alternateKeys) { addCheckSet("checkAltKeySet$" + i, altKey.keyColumns); i++; } if (isPrimaryTable() && !mCodecGen.info.valueColumns.isEmpty()) { addCheckAllDirty("checkValueAllDirty", mCodecGen.info.valueColumns); } addCheckAnyDirty("checkPrimaryKeyAnyDirty", mCodecGen.info.keyColumns); } // Add the public load/store methods, etc. addByKeyMethod("load"); addByKeyMethod("exists"); if (isPrimaryTable()) { addByKeyMethod("delete"); addStoreMethod("store", null); addStoreMethod("exchange", mRowType); addStoreMethod("insert", boolean.class); addStoreMethod("replace", boolean.class); addDoUpdateMethod(); addUpdateMethod("update", false); addUpdateMethod("merge", true); } addMarkAllCleanMethod(); addToRowMethod(); addToKeyMethod(); addRowStoreRefMethod(); addUnfilteredMethod(); if (!isPrimaryTable()) { addSecondaryDescriptorMethod(); } return doFinish(mt); } /** * Return a constructor which accepts a (Index, RowPredicateLock, TableImpl primary, * TableImpl unjoined) and returns an AbstractTable implementation. * * @param primaryTableClass the primary table implementation class * @param unjoinedClass the table implementation which is passed as the last constructor * parameter */ MethodHandle finishJoined(Class<?> primaryTableClass, Class<?> unjoinedClass) { Objects.requireNonNull(primaryTableClass); mClassMaker = mCodecGen.beginClassMaker(getClass(), mRowType, "Joined").public_() .extend(unjoinedClass); { MethodMaker mm = mClassMaker.addMethod (Class.class, "joinedPrimaryTableClass").protected_(); mm.return_(primaryTableClass); } MethodType mt = MethodType.methodType (void.class, Index.class, RowPredicateLock.class, primaryTableClass, unjoinedClass); MethodMaker ctor = mClassMaker.addConstructor(mt); var indexVar = ctor.param(0); var lockVar = ctor.param(1); var primaryVar = ctor.param(2); var unjoinedVar = ctor.param(3); var managerVar = primaryVar.invoke("tableManager"); ctor.invokeSuperConstructor(managerVar, indexVar, lockVar); mClassMaker.addField(primaryTableClass, "primaryTable").private_().final_(); ctor.field("primaryTable").set(primaryVar); mClassMaker.addField(Index.class, "primaryIndex").private_().final_(); ctor.field("primaryIndex").set(managerVar.invoke("primaryIndex")); mClassMaker.addField(unjoinedClass, "unjoined").private_().final_(); ctor.field("unjoined").set(unjoinedVar); { MethodMaker mm = mClassMaker.addMethod(AbstractTable.class, "viewUnjoined").public_(); mm.return_(mm.field("unjoined")); } addToPrimaryKeyMethod(mClassMaker, false, true); addToPrimaryKeyMethod(mClassMaker, true, true); addJoinedLoadMethod(primaryTableClass); // Define the class that implements the unfiltered JoinedScanController and construct a // singleton instance. var scanControllerClass = makeUnfilteredJoinedScanControllerClass(primaryTableClass); mClassMaker.addField(scanControllerClass, "unfiltered").private_().final_(); ctor.field("unfiltered").set (ctor.new_(scanControllerClass, null, false, null, false, ctor.field("primaryIndex"))); // Override the method inherited from the unjoined class as defined in AbstractTable. MethodMaker mm = mClassMaker.addMethod (SingleScanController.class, "unfiltered").protected_(); mm.return_(mm.field("unfiltered")); // Override the method inherited from AbstractTableView. mm = mClassMaker.addMethod(RowUpdater.class, "newRowUpdater", Transaction.class, ScanController.class).protected_(); mm.return_(mm.invoke("newJoinedRowUpdater", mm.param(0), mm.param(1), mm.field("primaryTable"))); return doFinish(mt); } private void addJoinedLoadMethod(Class<?> primaryTableClass) { MethodMaker mm = mClassMaker.addMethod (boolean.class, "load", Transaction.class, Object.class).public_(); Variable txnVar = mm.param(0); Variable rowVar = mm.param(1).cast(mRowClass); { Label ready = mm.label(); mm.invoke("checkPrimaryKeySet", rowVar).ifTrue(ready); mm.new_(IllegalStateException.class, "Primary key isn't fully specified").throw_(); ready.here(); } Variable valueVar = null; Variable repeatableVar = null; Variable joinedPkVar; Label notFound = mm.label(); if (mCodecGen.info.isAltKey()) { var keyVar = mm.invoke("encodePrimaryKey", rowVar); valueVar = mm.field("mSource").invoke("load", txnVar, keyVar); valueVar.ifEq(null, notFound); joinedPkVar = mm.invoke("toPrimaryKey", rowVar, valueVar); } else { repeatableVar = mm.var(RowUtils.class).invoke("isRepeatable", txnVar); Label ready = mm.label(); repeatableVar.ifFalse(ready); var keyVar = mm.invoke("encodePrimaryKey", rowVar); // Calling exists is necessary for proper lock acquisition order. var resultVar = mm.field("mSource").invoke("exists", txnVar, keyVar); resultVar.ifFalse(notFound); ready.here(); joinedPkVar = mm.invoke("toPrimaryKey", rowVar); } var joinedValueVar = mm.field("primaryIndex").invoke("load", txnVar, joinedPkVar); Label notNull = mm.label(); joinedValueVar.ifNe(null, notNull); notFound.here(); markValuesUnset(rowVar); mm.return_(false); notNull.here(); if (repeatableVar == null) { repeatableVar = mm.var(RowUtils.class).invoke("isRepeatable", txnVar); } Label checked = mm.label(); repeatableVar.ifFalse(checked); if (valueVar != null) { // Decode the primary key columns (required by alt key only). mm.invoke("decodeValue", rowVar, valueVar); } mm.var(primaryTableClass).invoke("decodeValue", rowVar, joinedValueVar); Label success = mm.label().here(); markAllClean(rowVar, mRowGen, mRowGen); mm.return_(true); // This point is reached for double checking that the joined row matches to the // secondary row, which is required when a lock isn't held. checked.here(); // Copy of all the columns which will be modified by decodeValue. Map<String, ColumnInfo> copiedColumns; if (valueVar != null) { // For alt key, the primary key columns will be modified too. copiedColumns = mRowInfo.allColumns; } else { copiedColumns = mRowInfo.valueColumns; } Map<String, Variable> copiedVars = new LinkedHashMap<>(copiedColumns.size()); for (String name : copiedColumns.keySet()) { copiedVars.put(name, rowVar.field(name).get()); } if (valueVar != null) { // For alt key, decode the primary key columns too. mm.invoke("decodeValue", rowVar, valueVar); } mm.var(primaryTableClass).invoke("decodeValue", rowVar, joinedValueVar); // Check all the secondary columns, except those that refer to the primary key, which // won't have changed. Label fail = mm.label(); Map<String, ColumnInfo> pkColumns = mRowInfo.keyColumns; for (ColumnInfo column : mCodecGen.info.allColumns.values()) { String name = column.name; if (pkColumns.containsKey(name)) { continue; } Label pass = mm.label(); // Note that the secondary columns are passed as the compare arguments, because // that's what they effectively are -- a type of filter expression. This is // important because the comparison isn't necessarily symmetrical. See // BigDecimalUtils.matches. CompareUtils.compare(mm, column, rowVar.field(name), copiedColumns.get(name), copiedVars.get(name), ColumnFilter.OP_EQ, pass, fail); pass.here(); } mm.goto_(success); fail.here(); // Restore all the columns back to their original values, preventing any side-effects. // When the load method returns false, it's not supposed to modify any columns, // regardless of their state. for (Map.Entry<String, Variable> e : copiedVars.entrySet()) { rowVar.field(e.getKey()).set(e.getValue()); } mm.goto_(notFound); } private MethodHandle doFinish(MethodType mt) { try { var lookup = mClassMaker.finishLookup(); return lookup.findConstructor(lookup.lookupClass(), mt); } catch (Throwable e) { throw RowUtils.rethrow(e); } } private boolean isPrimaryTable() { return mRowGen == mCodecGen; } private boolean supportsTriggers() { return isPrimaryTable(); } /** * Defines a static method which accepts a row and returns boolean. When it returns true, * all of the given columns are set. * * @param name method name */ private void addCheckSet(String name, Map<String, ColumnInfo> columns) { MethodMaker mm = mClassMaker.addMethod(boolean.class, name, mRowClass).static_(); if (columns.isEmpty()) { mm.return_(true); return; } if (columns.size() == 1) { int num = mRowGen.columnNumbers().get(columns.values().iterator().next().name); Label cont = mm.label(); stateField(mm.param(0), num).and(RowGen.stateFieldMask(num)).ifNe(0, cont); mm.return_(false); cont.here(); mm.return_(true); return; } int num = 0, mask = 0; for (int step = 0; step < 2; step++) { // Key columns are numbered before value columns. Add checks in two steps. // Note that the codecs are accessed, to match encoding order. var baseCodecs = step == 0 ? mRowGen.keyCodecs() : mRowGen.valueCodecs(); for (ColumnCodec codec : baseCodecs) { ColumnInfo info = codec.mInfo; if (columns.containsKey(info.name)) { mask |= RowGen.stateFieldMask(num); } if (isMaskReady(++num, mask)) { // Convert all states of value 0b01 (clean) into value 0b11 (dirty). All // other states stay the same. var state = stateField(mm.param(0), num - 1).get(); state = state.or(state.and(0x5555_5555).shl(1)); // Flip all column state bits. If final result is non-zero, then some // columns were unset. state = state.xor(mask); mask = maskRemainder(num, mask); if (mask != 0xffff_ffff) { state = state.and(mask); } Label cont = mm.label(); state.ifEq(0, cont); mm.return_(false); cont.here(); mask = 0; } } } mm.return_(true); } /** * Defines a static method which accepts a row and returns boolean. When it returns true, * all of the given columns are dirty. * * @param name method name */ private void addCheckAllDirty(String name, Map<String, ColumnInfo> columns) { MethodMaker mm = mClassMaker.addMethod(boolean.class, name, mRowClass).static_(); int num = 0, mask = 0; for (int step = 0; step < 2; step++) { // Key columns are numbered before value columns. Add checks in two steps. // Note that the codecs are accessed, to match encoding order. var baseCodecs = step == 0 ? mRowGen.keyCodecs() : mRowGen.valueCodecs(); for (ColumnCodec codec : baseCodecs) { ColumnInfo info = codec.mInfo; if (columns.containsKey(info.name)) { mask |= RowGen.stateFieldMask(num); } if (isMaskReady(++num, mask)) { Label cont = mm.label(); stateField(mm.param(0), num - 1).and(mask).ifEq(mask, cont); mm.return_(false); cont.here(); mask = 0; } } } mm.return_(true); } /** * Defines a static method which accepts a row and returns boolean. When it returns true, * at least one of the given columns are dirty. * * @param name method name */ private void addCheckAnyDirty(String name, Map<String, ColumnInfo> columns) { MethodMaker mm = mClassMaker.addMethod(boolean.class, name, mRowClass).static_(); int num = 0, mask = 0; for (int step = 0; step < 2; step++) { // Key columns are numbered before value columns. Add checks in two steps. // Note that the codecs are accessed, to match encoding order. var baseCodecs = step == 0 ? mRowGen.keyCodecs() : mRowGen.valueCodecs(); for (ColumnCodec codec : baseCodecs) { ColumnInfo info = codec.mInfo; if (columns.containsKey(info.name)) { mask |= RowGen.stateFieldMask(num, 0b10); } if (isMaskReady(++num, mask)) { Label cont = mm.label(); stateField(mm.param(0), num - 1).and(mask).ifEq(0, cont); mm.return_(true); cont.here(); mask = 0; } } } mm.return_(false); } /** * Called when building state field masks for columns, when iterating them in order. * * @param num column number pre-incremented to the next one * @param mask current group; must be non-zero to have any effect */ private boolean isMaskReady(int num, int mask) { return mask != 0 && ((num & 0b1111) == 0 || num >= mRowInfo.allColumns.size()); } /** * When building a mask for the highest state field, sets the high unused bits on the * mask. This can eliminate an unnecessary 'and' operation. * * @param num column number pre-incremented to the next one * @param mask current group * @return updated mask */ private int maskRemainder(int num, int mask) { if (num >= mRowInfo.allColumns.size()) { int shift = (num & 0b1111) << 1; if (shift != 0) { mask |= 0xffff_ffff << shift; } } return mask; } /** * Defines a static method which accepts a row and always throws a detailed exception * describing the required columns which aren't set. A check method should have been * invoked first. * * @param name method name */ private void addRequireSet(String name, Map<String, ColumnInfo> columns) { MethodMaker mm = mClassMaker.addMethod(null, name, mRowClass).static_(); String initMessage = "Some required columns are unset"; if (columns.isEmpty()) { mm.new_(IllegalStateException.class, initMessage).throw_(); return; } int initLength = initMessage.length() + 2; var bob = mm.new_(StringBuilder.class, initLength << 1) .invoke("append", initMessage).invoke("append", ": "); boolean first = true; for (ColumnInfo info : columns.values()) { int num = mRowGen.columnNumbers().get(info.name); Label isSet = mm.label(); stateField(mm.param(0), num).and(RowGen.stateFieldMask(num)).ifNe(0, isSet); if (!first) { Label sep = mm.label(); bob.invoke("length").ifEq(initLength, sep); bob.invoke("append", ", "); sep.here(); } bob.invoke("append", info.name); isSet.here(); first = false; } mm.new_(IllegalStateException.class, bob.invoke("toString")).throw_(); } /** * @return null if no field is defined for the column (probably SchemaVersionColumnCodec) */ private static Field findField(Variable row, ColumnCodec codec) { ColumnInfo info = codec.mInfo; return info == null ? null : row.field(info.name); } /** * Defines a static method which returns a new composite byte[] key or value. Caller must * check that the columns are set. * * @param name method name */ private void addEncodeColumnsMethod(String name, ColumnCodec[] codecs) { MethodMaker mm = mClassMaker.addMethod(byte[].class, name, mRowClass).static_(); addEncodeColumns(mm, ColumnCodec.bind(codecs, mm)); } /** * @param mm param(0): Row object, return: byte[] * @param codecs must be bound to the MethodMaker */ private static void addEncodeColumns(MethodMaker mm, ColumnCodec[] codecs) { if (codecs.length == 0) { mm.return_(mm.var(RowUtils.class).field("EMPTY_BYTES")); return; } // Determine the minimum byte array size and prepare the encoders. int minSize = 0; for (ColumnCodec codec : codecs) { minSize += codec.minSize(); codec.encodePrepare(); } // Generate code which determines the additional runtime length. Variable totalVar = null; for (ColumnCodec codec : codecs) { Field srcVar = findField(mm.param(0), codec); totalVar = codec.encodeSize(srcVar, totalVar); } // Generate code which allocates the destination byte array. Variable dstVar; if (totalVar == null) { dstVar = mm.new_(byte[].class, minSize); } else { if (minSize != 0) { totalVar = totalVar.add(minSize); } dstVar = mm.new_(byte[].class, totalVar); } // Generate code which fills in the byte array. var offsetVar = mm.var(int.class).set(0); for (ColumnCodec codec : codecs) { codec.encode(findField(mm.param(0), codec), dstVar, offsetVar); } mm.return_(dstVar); } /** * Method isn't implemented until needed, delaying acquisition/creation of the current * schema version. This allows replicas to decode existing rows even when the class * definition has changed, but encoding will still fail. */ private void addDynamicEncodeValueColumns() { MethodMaker mm = mClassMaker.addMethod(byte[].class, "encodeValue", mRowClass).static_(); var indy = mm.var(TableMaker.class).indy ("indyEncodeValueColumns", mStore.ref(), mRowType, mIndexId); mm.return_(indy.invoke(byte[].class, "encodeValue", null, mm.param(0))); } public static CallSite indyEncodeValueColumns (MethodHandles.Lookup lookup, String name, MethodType mt, WeakReference<RowStore> storeRef, Class<?> rowType, long indexId) { return doIndyEncode (lookup, name, mt, storeRef, rowType, indexId, (mm, info, schemaVersion) -> { ColumnCodec[] codecs = info.rowGen().valueCodecs(); addEncodeColumns(mm, ColumnCodec.bind(schemaVersion, codecs, mm)); }); } @FunctionalInterface static interface EncodeFinisher { void finish(MethodMaker mm, RowInfo info, int schemaVersion); } /** * Does the work to obtain the current schema version, handling any exceptions. The given * finisher completes the definition of the encode method when no exception was thrown when * trying to obtain the schema version. If an exception was thrown, the finisher might be * called at a later time. */ private static CallSite doIndyEncode(MethodHandles.Lookup lookup, String name, MethodType mt, WeakReference<RowStore> storeRef, Class<?> rowType, long indexId, EncodeFinisher finisher) { return ExceptionCallSite.make(() -> { MethodMaker mm = MethodMaker.begin(lookup, name, mt); RowStore store = storeRef.get(); if (store == null) { mm.new_(DatabaseException.class, "Closed").throw_(); } else { RowInfo info = RowInfo.find(rowType); int schemaVersion; try { schemaVersion = store.schemaVersion(info, false, indexId, true); } catch (Exception e) { return new ExceptionCallSite.Failed(mt, mm, e); } finisher.finish(mm, info, schemaVersion); } return mm.finish(); }); } /** * Defines a static method which decodes columns from a composite byte[] parameter. * * @param name method name */ private void addDecodeColumnsMethod(String name, ColumnCodec[] codecs) { MethodMaker mm = mClassMaker.addMethod(null, name, mRowClass, byte[].class) .static_().public_(); addDecodeColumns(mm, mRowInfo, codecs, 0); } /** * @param mm param(0): Row object, param(1): byte[], return: void * @param fixedOffset must be after the schema version (when applicable) */ private static void addDecodeColumns(MethodMaker mm, RowInfo dstRowInfo, ColumnCodec[] srcCodecs, int fixedOffset) { srcCodecs = ColumnCodec.bind(srcCodecs, mm); Variable srcVar = mm.param(1); Variable offsetVar = mm.var(int.class).set(fixedOffset); for (ColumnCodec srcCodec : srcCodecs) { String name = srcCodec.mInfo.name; ColumnInfo dstInfo = dstRowInfo.allColumns.get(name); if (dstInfo == null) { srcCodec.decodeSkip(srcVar, offsetVar, null); } else { var rowVar = mm.param(0); Field dstVar = rowVar.field(name); Converter.decode(mm, srcVar, offsetVar, null, srcCodec, dstInfo, dstVar); } } } private void addDynamicDecodeValueColumns() { // First define a method which generates the SwitchCallSite. { MethodMaker mm = mClassMaker.addMethod (SwitchCallSite.class, "decodeValueSwitchCallSite").static_(); var condy = mm.var(TableMaker.class).condy ("condyDecodeValueColumns", mStore.ref(), mRowType, mRowClass, mIndexId); mm.return_(condy.invoke(SwitchCallSite.class, "_")); } // Also define a method to obtain a MethodHandle which decodes for a given schema // version. This must be defined here to ensure that the correct lookup is used. It // must always refer to this table class. { MethodMaker mm = mClassMaker.addMethod (MethodHandle.class, "decodeValueHandle", int.class).static_(); var lookup = mm.var(MethodHandles.class).invoke("lookup"); var mh = mm.invoke("decodeValueSwitchCallSite").invoke("getCase", lookup, mm.param(0)); mm.return_(mh); } MethodMaker mm = mClassMaker.addMethod (null, "decodeValue", mRowClass, byte[].class).static_().public_(); var data = mm.param(1); var schemaVersion = mm.var(RowUtils.class).invoke("decodeSchemaVersion", data); var indy = mm.var(TableMaker.class).indy("indyDecodeValueColumns"); indy.invoke(null, "decodeValue", null, schemaVersion, mm.param(0), data); } /** * Returns a SwitchCallSite instance suitable for decoding all value columns. By defining * it via a "condy" method, the SwitchCallSite instance can be shared by other methods. In * particular, filter subclasses are generated against specific schema versions, and so * they need direct access to just one of the cases. This avoids a redundant version check. * * MethodType is: void (int schemaVersion, RowClass row, byte[] data) */ public static SwitchCallSite condyDecodeValueColumns (MethodHandles.Lookup lookup, String name, Class<?> type, WeakReference<RowStore> storeRef, Class<?> rowType, Class<?> rowClass, long indexId) { MethodType mt = MethodType.methodType(void.class, int.class, rowClass, byte[].class); return new SwitchCallSite(lookup, mt, schemaVersion -> { MethodMaker mm = MethodMaker.begin(lookup, null, "case", rowClass, byte[].class); RowStore store = storeRef.get(); if (store == null) { mm.new_(DatabaseException.class, "Closed").throw_(); } else { RowInfo dstRowInfo = RowInfo.find(rowType); if (schemaVersion == 0) { // No columns to decode, so assign defaults. for (Map.Entry<String, ColumnInfo> e : dstRowInfo.valueColumns.entrySet()) { Converter.setDefault(mm, e.getValue(), mm.param(0).field(e.getKey())); } } else { RowInfo srcRowInfo; try { srcRowInfo = store.rowInfo(rowType, indexId, schemaVersion); } catch (Exception e) { return new ExceptionCallSite.Failed (MethodType.methodType(void.class, rowClass, byte[].class), mm, e); } ColumnCodec[] srcCodecs = srcRowInfo.rowGen().valueCodecs(); int fixedOffset = schemaVersion < 128 ? 1 : 4; addDecodeColumns(mm, dstRowInfo, srcCodecs, fixedOffset); if (dstRowInfo != srcRowInfo) { // Assign defaults for any missing columns. for (Map.Entry<String, ColumnInfo> e : dstRowInfo.valueColumns.entrySet()) { String fieldName = e.getKey(); if (!srcRowInfo.valueColumns.containsKey(fieldName)) { Converter.setDefault (mm, e.getValue(), mm.param(0).field(fieldName)); } } } } } return mm.finish(); }); } /** * This just returns the SwitchCallSite generated by condyDecodeValueColumns. */ public static SwitchCallSite indyDecodeValueColumns(MethodHandles.Lookup lookup, String name, MethodType mt) throws Throwable { MethodHandle mh = lookup.findStatic(lookup.lookupClass(), "decodeValueSwitchCallSite", MethodType.methodType(SwitchCallSite.class)); return (SwitchCallSite) mh.invokeExact(); } private void addDecodePartialHandle() { MethodMaker mm = mClassMaker.addMethod (MethodHandle.class, "makeDecodePartialHandle", byte[].class, int.class).protected_(); var spec = mm.param(0); var lookup = mm.var(MethodHandles.class).invoke("lookup"); Variable decoder; if (isPrimaryTable()) { var schemaVersion = mm.param(1); var storeRef = mm.invoke("rowStoreRef"); decoder = mm.var(DecodePartialMaker.class).invoke ("makeDecoder", lookup, storeRef, mRowType, mRowClass, mm.class_(), mIndexId, spec, schemaVersion); } else { var secondaryDescVar = mm.var(byte[].class).setExact(mSecondaryDescriptor); decoder = mm.var(DecodePartialMaker.class).invoke ("makeDecoder", lookup, mRowType, mRowClass, mm.class_(), secondaryDescVar, spec); } mm.return_(decoder); } /** * @param variant "load", "exists", or "delete" */ private void addByKeyMethod(String variant) { MethodMaker mm = mClassMaker.addMethod (boolean.class, variant, Transaction.class, Object.class).public_(); Variable txnVar = mm.param(0); Variable rowVar = mm.param(1).cast(mRowClass); Label ready = mm.label(); mm.invoke("checkPrimaryKeySet", rowVar).ifTrue(ready); mm.new_(IllegalStateException.class, "Primary key isn't fully specified").throw_(); ready.here(); var keyVar = mm.invoke("encodePrimaryKey", rowVar); final var source = mm.field("mSource"); final Variable valueVar; if (variant != "delete" || !supportsTriggers()) { valueVar = source.invoke(variant, txnVar, keyVar); } else { var triggerVar = mm.var(Trigger.class); Label skipLabel = mm.label(); prepareForTrigger(mm, mm.this_(), triggerVar, skipLabel); Label triggerStart = mm.label().here(); // Trigger requires a non-null transaction. txnVar.set(mm.var(ViewUtils.class).invoke("enterScope", source, txnVar)); Label txnStart = mm.label().here(); var cursorVar = source.invoke("newCursor", txnVar); Label cursorStart = mm.label().here(); cursorVar.invoke("find", keyVar); var oldValueVar = cursorVar.invoke("value"); Label commit = mm.label(); oldValueVar.ifEq(null, commit); triggerVar.invoke("delete", txnVar, rowVar, keyVar, oldValueVar); commit.here(); cursorVar.invoke("commit", (Object) null); mm.return_(oldValueVar.ne(null)); mm.finally_(cursorStart, () -> cursorVar.invoke("reset")); mm.finally_(txnStart, () -> txnVar.invoke("exit")); skipLabel.here(); assert variant == "delete"; valueVar = source.invoke(variant, txnVar, keyVar); mm.finally_(triggerStart, () -> triggerVar.invoke("releaseShared")); } if (variant != "load") { mm.return_(valueVar); } else { Label notNull = mm.label(); valueVar.ifNe(null, notNull); markValuesUnset(rowVar); mm.return_(false); notNull.here(); mm.invoke("decodeValue", rowVar, valueVar); markAllClean(rowVar); mm.return_(true); } } /** * @param variant "store", "exchange", "insert", or "replace" */ private void addStoreMethod(String variant, Class returnType) { MethodMaker mm = mClassMaker.addMethod (returnType, variant, Transaction.class, Object.class).public_(); Variable txnVar = mm.param(0); Variable rowVar = mm.param(1).cast(mRowClass); Label ready = mm.label(); mm.invoke("checkAllSet", rowVar).ifTrue(ready); if (variant != "replace" && mAutoColumn != null) { Label notReady = mm.label(); mm.invoke("checkAllButAutoSet", rowVar).ifFalse(notReady); mm.invoke("storeAuto", txnVar, rowVar); if (variant == "exchange") { mm.return_(null); } else if (variant == "insert") { mm.return_(true); } else { mm.return_(); } notReady.here(); } mm.invoke("requireAllSet", rowVar); ready.here(); var keyVar = mm.invoke("encodePrimaryKey", rowVar); var valueVar = mm.invoke("encodeValue", rowVar); Variable resultVar = null; if (!supportsTriggers()) { resultVar = storeNoTrigger(mm, variant, txnVar, rowVar, keyVar, valueVar); } else { Label cont = mm.label(); var triggerVar = mm.var(Trigger.class); Label skipLabel = mm.label(); prepareForTrigger(mm, mm.this_(), triggerVar, skipLabel); Label triggerStart = mm.label().here(); final var source = mm.field("mSource").get(); // Trigger requires a non-null transaction. txnVar.set(mm.var(ViewUtils.class).invoke("enterScope", source, txnVar)); Label txnStart = mm.label().here(); mm.invoke("redoPredicateMode", txnVar); // Always use a cursor to acquire the upgradable row lock before updating // secondaries. This prevents deadlocks with a concurrent index scan which joins // against the row. The row lock is acquired exclusively after all secondaries have // been updated. At that point, shared lock acquisition against the row is blocked. var cursorVar = source.invoke("newCursor", txnVar); Label cursorStart = mm.label().here(); if (variant == "replace") { cursorVar.invoke("find", keyVar); var oldValueVar = cursorVar.invoke("value"); Label passed = mm.label(); oldValueVar.ifNe(null, passed); mm.return_(false); passed.here(); triggerVar.invoke("store", txnVar, rowVar, keyVar, oldValueVar, valueVar); cursorVar.invoke("commit", valueVar); markAllClean(rowVar); mm.return_(true); } else { Variable closerVar; Label opStart; if (!mSupportsIndexLock) { closerVar = null; opStart = null; } else { closerVar = mm.field("mIndexLock").invoke("openAcquire", txnVar, rowVar); opStart = mm.label().here(); } if (variant == "insert") { cursorVar.invoke("autoload", false); cursorVar.invoke("find", keyVar); if (closerVar != null) { mm.finally_(opStart, () -> closerVar.invoke("close")); } Label passed = mm.label(); cursorVar.invoke("value").ifEq(null, passed); mm.return_(false); passed.here(); triggerVar.invoke("insert", txnVar, rowVar, keyVar, valueVar); cursorVar.invoke("commit", valueVar); markAllClean(rowVar); mm.return_(true); } else { cursorVar.invoke("find", keyVar); if (closerVar != null) { mm.finally_(opStart, () -> closerVar.invoke("close")); } var oldValueVar = cursorVar.invoke("value"); Label wasNull = mm.label(); oldValueVar.ifEq(null, wasNull); triggerVar.invoke("store", txnVar, rowVar, keyVar, oldValueVar, valueVar); Label commit = mm.label().goto_(); wasNull.here(); triggerVar.invoke("insert", txnVar, rowVar, keyVar, valueVar); commit.here(); cursorVar.invoke("commit", valueVar); if (variant == "store") { markAllClean(rowVar); mm.return_(); } else { resultVar = oldValueVar; mm.goto_(cont); } } } mm.finally_(cursorStart, () -> cursorVar.invoke("reset")); mm.finally_(txnStart, () -> txnVar.invoke("exit")); skipLabel.here(); Variable storeResultVar = storeNoTrigger(mm, variant, txnVar, rowVar, keyVar, valueVar); if (resultVar == null) { resultVar = storeResultVar; } else { resultVar.set(storeResultVar); } mm.finally_(triggerStart, () -> triggerVar.invoke("releaseShared")); cont.here(); } if (returnType == null) { // This case is expected only for the "store" variant. markAllClean(rowVar); return; } if (variant != "exchange") { // This case is expected for the "insert" and "replace" variants. Label failed = mm.label(); resultVar.ifFalse(failed); markAllClean(rowVar); failed.here(); mm.return_(resultVar); return; } // The rest is for implementing the "exchange" variant. markAllClean(rowVar); Label found = mm.label(); resultVar.ifNe(null, found); mm.return_(null); found.here(); var copyVar = mm.new_(mRowClass); copyFields(mm, rowVar, copyVar, mCodecGen.info.keyColumns.values()); mm.invoke("decodeValue", copyVar, resultVar); markAllClean(copyVar); mm.return_(copyVar); // Now implement the exchange bridge method. mm = mClassMaker.addMethod (Object.class, variant, Transaction.class, Object.class).public_().bridge(); mm.return_(mm.this_().invoke(returnType, variant, null, mm.param(0), mm.param(1))); } /** * @param variant "store", "exchange", "insert", or "replace" */ private Variable storeNoTrigger(MethodMaker mm, String variant, Variable txnVar, Variable rowVar, Variable keyVar, Variable valueVar) { if (variant == "replace" || !mSupportsIndexLock) { return mm.field("mSource").invoke(variant, txnVar, keyVar, valueVar); } else { // Call protected method inherited from AbstractTable. return mm.invoke(variant, txnVar, rowVar, keyVar, valueVar); } } private void addStoreAutoMethod() { MethodMaker mm = mClassMaker.addMethod(null, "storeAuto", Transaction.class, mRowClass); Variable txnVar = mm.param(0); Variable rowVar = mm.param(1); var keyVar = mm.invoke("encodePrimaryKey", rowVar); var valueVar = mm.invoke("encodeValue", rowVar); // Call enterScopex because bogus transaction doesn't work with AutomaticKeyGenerator. txnVar.set(mm.var(ViewUtils.class).invoke("enterScopex", mm.field("mSource"), txnVar)); Label txnStart = mm.label().here(); mm.invoke("redoPredicateMode", txnVar); if (!supportsTriggers()) { mm.field("autogen").invoke("store", txnVar, rowVar, keyVar, valueVar); txnVar.invoke("commit"); markAllClean(rowVar); mm.return_(); } else { var triggerVar = mm.var(Trigger.class); Label skipLabel = mm.label(); prepareForTrigger(mm, mm.this_(), triggerVar, skipLabel); Label triggerStart = mm.label().here(); mm.field("autogen").invoke("store", txnVar, rowVar, keyVar, valueVar); triggerVar.invoke("insert", txnVar, rowVar, keyVar, valueVar); Label commitLabel = mm.label().goto_(); skipLabel.here(); mm.field("autogen").invoke("store", txnVar, rowVar, keyVar, valueVar); commitLabel.here(); txnVar.invoke("commit"); markAllClean(rowVar); mm.return_(); mm.finally_(triggerStart, () -> triggerVar.invoke("releaseShared")); } mm.finally_(txnStart, () -> txnVar.invoke("exit")); } private static void copyFields(MethodMaker mm, Variable src, Variable dst, Collection<ColumnInfo> infos) { for (ColumnInfo info : infos) { Variable srcField = src.field(info.name); if (info.isArray()) { srcField = srcField.get(); Label isNull = null; if (info.isNullable()) { isNull = mm.label(); srcField.ifEq(null, isNull); } srcField.set(srcField.invoke("clone").cast(info.type)); if (isNull != null) { isNull.here(); } } dst.field(info.name).set(srcField); } } /** * Adds a method which does most of the work for the update and merge methods. The * transaction parameter must not be null, which is committed when changes are made. * * boolean doUpdate(Transaction txn, ActualRow row, boolean merge); */ private void addDoUpdateMethod() { MethodMaker mm = mClassMaker.addMethod (boolean.class, "doUpdate", Transaction.class, mRowClass, boolean.class); Variable txnVar = mm.param(0); Variable rowVar = mm.param(1); Variable mergeVar = mm.param(2); Label ready = mm.label(); mm.invoke("checkPrimaryKeySet", rowVar).ifTrue(ready); mm.new_(IllegalStateException.class, "Primary key isn't fully specified").throw_(); ready.here(); final var keyVar = mm.invoke("encodePrimaryKey", rowVar); final var source = mm.field("mSource"); final var cursorVar = source.invoke("newCursor", txnVar); Label cursorStart = mm.label().here(); // If all value columns are dirty, replace the whole row and commit. { Label cont; if (mCodecGen.info.valueColumns.isEmpty()) { // If the checkValueAllDirty method was defined, it would always return true. cont = null; } else { cont = mm.label(); mm.invoke("checkValueAllDirty", rowVar).ifFalse(cont); } final Variable triggerVar; final Label triggerStart; if (!supportsTriggers()) { triggerVar = null; triggerStart = null; } else { triggerVar = mm.var(Trigger.class); Label skipLabel = mm.label(); prepareForTrigger(mm, mm.this_(), triggerVar, skipLabel); triggerStart = mm.label().here(); cursorVar.invoke("find", keyVar); var oldValueVar = cursorVar.invoke("value"); Label replace = mm.label(); oldValueVar.ifNe(null, replace); mm.return_(false); replace.here(); var valueVar = mm.invoke("encodeValue", rowVar); triggerVar.invoke("store", txnVar, rowVar, keyVar, oldValueVar, valueVar); cursorVar.invoke("commit", valueVar); markAllClean(rowVar); mm.return_(true); skipLabel.here(); } cursorVar.invoke("autoload", false); cursorVar.invoke("find", keyVar); Label replace = mm.label(); cursorVar.invoke("value").ifNe(null, replace); mm.return_(false); replace.here(); cursorVar.invoke("commit", mm.invoke("encodeValue", rowVar)); if (triggerStart != null) { mm.finally_(triggerStart, () -> triggerVar.invoke("releaseShared")); } markAllClean(rowVar); mm.return_(true); if (cont == null) { return; } cont.here(); } cursorVar.invoke("find", keyVar); Label hasValue = mm.label(); cursorVar.invoke("value").ifNe(null, hasValue); mm.return_(false); hasValue.here(); // The bulk of the method isn't implemented until needed, delaying acquisition/creation // of the current schema version. var indy = mm.var(TableMaker.class).indy ("indyDoUpdate", mStore.ref(), mRowType, mIndexId, supportsTriggers() ? 1 : 0); indy.invoke(null, "doUpdate", null, mm.this_(), rowVar, mergeVar, cursorVar); mm.return_(true); mm.finally_(cursorStart, () -> cursorVar.invoke("reset")); } /** * @param triggers 0 for false, 1 for true */ public static CallSite indyDoUpdate(MethodHandles.Lookup lookup, String name, MethodType mt, WeakReference<RowStore> storeRef, Class<?> rowType, long indexId, int triggers) { return doIndyEncode (lookup, name, mt, storeRef, rowType, indexId, (mm, info, schemaVersion) -> { finishIndyDoUpdate(mm, info, schemaVersion, triggers); }); } /** * @param triggers 0 for false, 1 for true */ private static void finishIndyDoUpdate(MethodMaker mm, RowInfo rowInfo, int schemaVersion, int triggers) { // All these variables were provided by the indy call in addDoUpdateMethod. Variable tableVar = mm.param(0); Variable rowVar = mm.param(1); Variable mergeVar = mm.param(2); Variable cursorVar = mm.param(3); Variable valueVar = cursorVar.invoke("value"); var ue = encodeUpdateEntry(mm, rowInfo, schemaVersion, tableVar, rowVar, valueVar); Variable newValueVar = ue.newEntryVar; Variable[] offsetVars = ue.offsetVars; if (triggers == 0) { cursorVar.invoke("commit", newValueVar); } Label doMerge = mm.label(); mergeVar.ifTrue(doMerge); if (triggers != 0) { var triggerVar = mm.var(Trigger.class); Label skipLabel = mm.label(); prepareForTrigger(mm, tableVar, triggerVar, skipLabel); Label triggerStart = mm.label().here(); var txnVar = cursorVar.invoke("link"); var keyVar = cursorVar.invoke("key"); triggerVar.invoke("update", txnVar, rowVar, keyVar, valueVar, newValueVar); cursorVar.invoke("commit", newValueVar); Label cont = mm.label().goto_(); skipLabel.here(); cursorVar.invoke("commit", newValueVar); mm.finally_(triggerStart, () -> triggerVar.invoke("releaseShared")); cont.here(); } markAllUndirty(rowVar, rowInfo); mm.return_(); doMerge.here(); // Decode all the original column values that weren't updated into the row. RowGen rowGen = rowInfo.rowGen(); Map<String, Integer> columnNumbers = rowGen.columnNumbers(); ColumnCodec[] codecs = ColumnCodec.bind(rowGen.valueCodecs(), mm); String stateFieldName = null; Variable stateField = null; for (int i=0; i<codecs.length; i++) { ColumnCodec codec = codecs[i]; ColumnInfo info = codec.mInfo; int num = columnNumbers.get(info.name); String sfName = rowGen.stateField(num); if (!sfName.equals(stateFieldName)) { stateFieldName = sfName; stateField = rowVar.field(stateFieldName).get(); } int sfMask = RowGen.stateFieldMask(num); Label cont = mm.label(); stateField.and(sfMask).ifEq(sfMask, cont); codec.decode(rowVar.field(info.name), valueVar, offsetVars[i], null); cont.here(); } if (triggers != 0) { var triggerVar = mm.var(Trigger.class); Label skipLabel = mm.label(); prepareForTrigger(mm, tableVar, triggerVar, skipLabel); Label triggerStart = mm.label().here(); var txnVar = cursorVar.invoke("link"); var keyVar = cursorVar.invoke("key"); triggerVar.invoke("store", txnVar, rowVar, keyVar, valueVar, newValueVar); cursorVar.invoke("commit", newValueVar); Label cont = mm.label().goto_(); skipLabel.here(); cursorVar.invoke("commit", newValueVar); mm.finally_(triggerStart, () -> triggerVar.invoke("releaseShared")); cont.here(); } markAllClean(rowVar, rowGen, rowGen); } private record UpdateEntry(Variable newEntryVar, Variable[] offsetVars) {} /** * Makes code which encodes a new entry (a key or value) by comparing dirty row columns to * the original entry. Returns the new entry and the column offsets in the original entry. * * @param schemaVersion pass 0 if entry is a key instead of a value; implies that caller * must handle the case where the value must be empty * @param rowVar non-null * @param tableVar doesn't need to be initialized (is used to invoke a static method) * @param entryVar original non-null encoded key or value */ private static UpdateEntry encodeUpdateEntry (MethodMaker mm, RowInfo rowInfo, int schemaVersion, Variable tableVar, Variable rowVar, Variable entryVar) { RowGen rowGen = rowInfo.rowGen(); ColumnCodec[] codecs; int fixedOffset; if (schemaVersion == 0) { codecs = rowGen.keyCodecs(); fixedOffset = 0; } else { codecs = rowGen.valueCodecs(); Variable decodeVersion = mm.var(RowUtils.class).invoke("decodeSchemaVersion", entryVar); Label sameVersion = mm.label(); decodeVersion.ifEq(schemaVersion, sameVersion); // If different schema versions, decode and re-encode a new entry, and then go to // the next step. The simplest way to perform this conversion is to create a new // temp row object, decode the entry into it, and then create a new entry from it. var tempRowVar = mm.new_(rowVar); tableVar.invoke("decodeValue", tempRowVar, entryVar); entryVar.set(tableVar.invoke("encodeValue", tempRowVar)); sameVersion.here(); fixedOffset = schemaVersion < 128 ? 1 : 4; } // Identify the offsets to all the columns in the original entry, and calculate the // size of the new entry. Map<String, Integer> columnNumbers = rowGen.columnNumbers(); codecs = ColumnCodec.bind(codecs, mm); Variable[] offsetVars = new Variable[codecs.length]; var offsetVar = mm.var(int.class).set(fixedOffset); var newSizeVar = mm.var(int.class).set(fixedOffset); // need room for schemaVersion String stateFieldName = null; Variable stateField = null; for (int i=0; i<codecs.length; i++) { ColumnCodec codec = codecs[i]; codec.encodePrepare(); offsetVars[i] = offsetVar.get(); codec.decodeSkip(entryVar, offsetVar, null); ColumnInfo info = codec.mInfo; int num = columnNumbers.get(info.name); String sfName = rowGen.stateField(num); if (!sfName.equals(stateFieldName)) { stateFieldName = sfName; stateField = rowVar.field(stateFieldName).get(); } int sfMask = RowGen.stateFieldMask(num); Label isDirty = mm.label(); stateField.and(sfMask).ifEq(sfMask, isDirty); // Add in the size of original column, which won't be updated. codec.encodeSkip(); newSizeVar.inc(offsetVar.sub(offsetVars[i])); Label cont = mm.label().goto_(); // Add in the size of the dirty column, which needs to be encoded. isDirty.here(); newSizeVar.inc(codec.minSize()); codec.encodeSize(rowVar.field(info.name), newSizeVar); cont.here(); } // Encode the new byte[] entry... var newEntryVar = mm.new_(byte[].class, newSizeVar); var srcOffsetVar = mm.var(int.class).set(0); var dstOffsetVar = mm.var(int.class).set(0); var spanLengthVar = mm.var(int.class).set(schemaVersion < 128 ? 1 : 4); var sysVar = mm.var(System.class); for (int i=0; i<codecs.length; i++) { ColumnCodec codec = codecs[i]; ColumnInfo info = codec.mInfo; int num = columnNumbers.get(info.name); Variable columnLenVar; { Variable endVar; if (i + 1 < codecs.length) { endVar = offsetVars[i + 1]; } else { endVar = entryVar.alength(); } columnLenVar = endVar.sub(offsetVars[i]); } int sfMask = RowGen.stateFieldMask(num); Label isDirty = mm.label(); stateField.and(sfMask).ifEq(sfMask, isDirty); // Increase the copy span length. Label cont = mm.label(); spanLengthVar.inc(columnLenVar); mm.goto_(cont); isDirty.here(); // Copy the current span and prepare for the next span. { Label noSpan = mm.label(); spanLengthVar.ifEq(0, noSpan); sysVar.invoke("arraycopy", entryVar, srcOffsetVar, newEntryVar, dstOffsetVar, spanLengthVar); srcOffsetVar.inc(spanLengthVar); dstOffsetVar.inc(spanLengthVar); spanLengthVar.set(0); noSpan.here(); } // Encode the dirty column, and skip over the original column value. codec.encode(rowVar.field(info.name), newEntryVar, dstOffsetVar); srcOffsetVar.inc(columnLenVar); cont.here(); } // Copy any remaining span. { Label noSpan = mm.label(); spanLengthVar.ifEq(0, noSpan); sysVar.invoke("arraycopy", entryVar, srcOffsetVar, newEntryVar, dstOffsetVar, spanLengthVar); noSpan.here(); } return new UpdateEntry(newEntryVar, offsetVars); } /** * Delegates to the doUpdate method. */ private void addUpdateMethod(String variant, boolean merge) { MethodMaker mm = mClassMaker.addMethod (boolean.class, variant, Transaction.class, Object.class).public_(); Variable txnVar = mm.param(0); Variable rowVar = mm.param(1).cast(mRowClass); Variable source = mm.field("mSource"); txnVar.set(mm.var(ViewUtils.class).invoke("enterScope", source, txnVar)); Label tryStart = mm.label().here(); mm.invoke("redoPredicateMode", txnVar); mm.return_(mm.invoke("doUpdate", txnVar, rowVar, merge)); mm.finally_(tryStart, () -> txnVar.invoke("exit")); } /** * Makes code which obtains the current trigger and acquires the lock which must be held * for the duration of the operation. The lock must be held even if no trigger must be run. * * @param triggerVar type is Trigger and is assigned by the generated code * @param skipLabel label to branch when trigger shouldn't run */ private static void prepareForTrigger(MethodMaker mm, Variable tableVar, Variable triggerVar, Label skipLabel) { Label acquireTriggerLabel = mm.label().here(); triggerVar.set(tableVar.invoke("trigger")); triggerVar.invoke("acquireShared"); var modeVar = triggerVar.invoke("mode"); modeVar.ifEq(Trigger.SKIP, skipLabel); Label activeLabel = mm.label(); modeVar.ifNe(Trigger.DISABLED, activeLabel); triggerVar.invoke("releaseShared"); mm.goto_(acquireTriggerLabel); activeLabel.here(); } private void markAllClean(Variable rowVar) { markAllClean(rowVar, mRowGen, mCodecGen); } private static void markAllClean(Variable rowVar, RowGen rowGen, RowGen codecGen) { if (rowGen == codecGen) { // isPrimaryTable, so truly mark all clean int mask = 0x5555_5555; int i = 0; String[] stateFields = rowGen.stateFields(); for (; i < stateFields.length - 1; i++) { rowVar.field(stateFields[i]).set(mask); } mask >>>= (32 - ((rowGen.info.allColumns.size() & 0b1111) << 1)); rowVar.field(stateFields[i]).set(mask); } else { // Only mark columns clean that are defined by codecGen. All others are unset. markClean(rowVar, rowGen, codecGen.info.allColumns); } } /** * Mark only the given columns as CLEAN. All others are UNSET. */ private static void markClean(final Variable rowVar, final RowGen rowGen, final Map<String, ColumnInfo> columns) { final int maxNum = rowGen.info.allColumns.size(); int num = 0, mask = 0; for (int step = 0; step < 2; step++) { // Key columns are numbered before value columns. Add checks in two steps. // Note that the codecs are accessed, to match encoding order. var baseCodecs = step == 0 ? rowGen.keyCodecs() : rowGen.valueCodecs(); for (ColumnCodec codec : baseCodecs) { if (columns.containsKey(codec.mInfo.name)) { mask |= RowGen.stateFieldMask(num, 0b01); // clean state } if ((++num & 0b1111) == 0 || num >= maxNum) { rowVar.field(rowGen.stateField(num - 1)).set(mask); mask = 0; } } } } private void addMarkAllCleanMethod() { // Used by filter implementations, and it must be public because filters are defined in // a different package. MethodMaker mm = mClassMaker.addMethod(null, "markAllClean", mRowClass).public_().static_(); markAllClean(mm.param(0)); } /** * Remaining states are UNSET or CLEAN. */ private static void markAllUndirty(Variable rowVar, RowInfo info) { int mask = 0x5555_5555; int i = 0; String[] stateFields = info.rowGen().stateFields(); for (; i < stateFields.length - 1; i++) { var field = rowVar.field(stateFields[i]); field.set(field.and(mask)); } mask >>>= (32 - ((info.allColumns.size() & 0b1111) << 1)); var field = rowVar.field(stateFields[i]); field.set(field.and(mask)); } /** * Mark all the value columns as UNSET without modifying the key column states. */ private void markValuesUnset(Variable rowVar) { if (isPrimaryTable()) { // Clear the value column state fields. Skip the key columns, which are numbered // first. Note that the codecs are accessed, to match encoding order. int num = mRowInfo.keyColumns.size(); int mask = 0; for (ColumnCodec codec : mRowGen.valueCodecs()) { mask |= RowGen.stateFieldMask(num); if (isMaskReady(++num, mask)) { mask = maskRemainder(num, mask); Field field = stateField(rowVar, num - 1); mask = ~mask; if (mask == 0) { field.set(mask); } else { field.set(field.and(mask)); mask = 0; } } } return; } final Map<String, ColumnInfo> keyColumns = mCodecGen.info.keyColumns; final int maxNum = mRowInfo.allColumns.size(); int num = 0, mask = 0; for (int step = 0; step < 2; step++) { // Key columns are numbered before value columns. Add checks in two steps. // Note that the codecs are accessed, to match encoding order. var baseCodecs = step == 0 ? mRowGen.keyCodecs() : mRowGen.valueCodecs(); for (ColumnCodec codec : baseCodecs) { if (!keyColumns.containsKey(codec.mInfo.name)) { mask |= RowGen.stateFieldMask(num); } if ((++num & 0b1111) == 0 || num >= maxNum) { Field field = rowVar.field(mRowGen.stateField(num - 1)); mask = ~mask; if (mask == 0) { field.set(mask); } else { field.set(field.and(mask)); mask = 0; } } } } } private Field stateField(Variable rowVar, int columnNum) { return rowVar.field(mRowGen.stateField(columnNum)); } private void addToRowMethod() { MethodMaker mm = mClassMaker.addMethod(mRowType, "toRow", byte[].class).protected_(); var rowVar = mm.new_(mRowClass); mm.invoke("decodePrimaryKey", rowVar, mm.param(0)); markClean(rowVar, mRowGen, mCodecGen.info.keyColumns); mm.return_(rowVar); mm = mClassMaker.addMethod(Object.class, "toRow", byte[].class).protected_().bridge(); mm.return_(mm.this_().invoke(mRowType, "toRow", null, mm.param(0))); } private void addToKeyMethod() { MethodMaker mm = mClassMaker.addMethod(byte[].class, "toKey", Object.class).protected_(); mm.return_(mm.invoke("encodePrimaryKey", mm.param(0).cast(mRowClass))); } private void addRowStoreRefMethod() { MethodMaker mm = mClassMaker.addMethod(WeakReference.class, "rowStoreRef").protected_(); mm.return_(mm.var(WeakReference.class).setExact(mStore.ref())); } private void addSecondaryDescriptorMethod() { MethodMaker mm = mClassMaker.addMethod(byte[].class, "secondaryDescriptor").protected_(); mm.return_(mm.var(byte[].class).setExact(mSecondaryDescriptor)); } /** * Defines a method which returns a singleton SingleScanController instance. */ private void addUnfilteredMethod() { MethodMaker mm = mClassMaker.addMethod (SingleScanController.class, "unfiltered").protected_(); var condy = mm.var(TableMaker.class).condy ("condyDefineUnfiltered", mRowType, mRowClass, mSecondaryDescriptor); mm.return_(condy.invoke(SingleScanController.class, "unfiltered")); } /** * @param secondaryDesc pass null for primary table */ public static Object condyDefineUnfiltered(MethodHandles.Lookup lookup, String name, Class type, Class rowType, Class rowClass, byte[] secondaryDesc) throws Throwable { RowInfo rowInfo = RowInfo.find(rowType); RowGen rowGen = rowInfo.rowGen(); RowGen codecGen = rowGen; if (secondaryDesc != null) { codecGen = RowStore.indexRowInfo(rowInfo, secondaryDesc).rowGen(); } ClassMaker cm = RowGen.beginClassMaker (TableMaker.class, rowType, rowInfo, null, "Unfiltered") .extend(SingleScanController.class).public_(); // Constructor is protected, for use by filter implementation subclasses. MethodType ctorType; { ctorType = MethodType.methodType (void.class, byte[].class, boolean.class, byte[].class, boolean.class); MethodMaker mm = cm.addConstructor(ctorType).protected_(); mm.invokeSuperConstructor(mm.param(0), mm.param(1), mm.param(2), mm.param(3)); } { // Specified by RowDecoderEncoder. MethodMaker mm = cm.addMethod (Object.class, "decodeRow", Cursor.class, LockResult.class, Object.class).public_(); var tableVar = mm.var(lookup.lookupClass()); var rowVar = mm.param(2).cast(rowClass); Label hasRow = mm.label(); rowVar.ifNe(null, hasRow); rowVar.set(mm.new_(rowClass)); hasRow.here(); var cursorVar = mm.param(0); tableVar.invoke("decodePrimaryKey", rowVar, cursorVar.invoke("key")); tableVar.invoke("decodeValue", rowVar, cursorVar.invoke("value")); markAllClean(rowVar, rowGen, codecGen); mm.return_(rowVar); } { // Specified by RowDecoderEncoder. MethodMaker mm = cm.addMethod(byte[].class, "encodeKey", Object.class).public_(); var rowVar = mm.param(0).cast(rowClass); var tableVar = mm.var(lookup.lookupClass()); Label unchanged = mm.label(); tableVar.invoke("checkPrimaryKeyAnyDirty", rowVar).ifFalse(unchanged); mm.return_(tableVar.invoke("encodePrimaryKey", rowVar)); unchanged.here(); mm.return_(null); } { // Specified by RowDecoderEncoder. MethodMaker mm = cm.addMethod(byte[].class, "encodeValue", Object.class).public_(); var rowVar = mm.param(0).cast(rowClass); var tableVar = mm.var(lookup.lookupClass()); mm.return_(tableVar.invoke("encodeValue", rowVar)); } { // Specified by ScanController. MethodMaker mm = cm.addMethod(QueryPlan.class, "plan").public_(); var condy = mm.var(TableMaker.class).condy("condyPlan", rowType, secondaryDesc, 0); mm.return_(condy.invoke(QueryPlan.class, "plan")); } if (rowGen == codecGen) { // isPrimaryTable, so a schema must be decoded // Used by filter subclasses. The int param is the schema version. MethodMaker mm = cm.addMethod (MethodHandle.class, "decodeValueHandle", int.class).protected_().static_(); var tableVar = mm.var(lookup.lookupClass()); mm.return_(tableVar.invoke("decodeValueHandle", mm.param(0))); } var clazz = cm.finish(); return lookup.findConstructor(clazz, ctorType).invoke(null, false, null, false); } public static QueryPlan condyPlan(MethodHandles.Lookup lookup, String name, Class type, Class rowType, byte[] secondaryDesc, int joinOption) { RowInfo primaryRowInfo = RowInfo.find(rowType); RowInfo rowInfo; String which; if (secondaryDesc == null) { rowInfo = primaryRowInfo; which = "primary key"; } else { rowInfo = RowStore.indexRowInfo(primaryRowInfo, secondaryDesc); which = rowInfo.isAltKey() ? "alternate key" : "secondary index"; } QueryPlan plan = new QueryPlan.FullScan(rowInfo.name, which, rowInfo.keySpec(), false); if (joinOption != 0) { rowInfo = primaryRowInfo; plan = new QueryPlan.NaturalJoin(rowInfo.name, "primary key", rowInfo.keySpec(), plan); } return plan; } /** * Define a static method which encodes a primary key when given an encoded secondary key. * * @param hasRow true to pass a row with a fully specified key instead of an encoded key * @param define true to actually define, false to delegate to it */ private void addToPrimaryKeyMethod(ClassMaker cm, boolean hasRow, boolean define) { RowInfo info = mCodecGen.info; Object[] params; if (info.isAltKey()) { // Needs the secondary key and value. params = new Object[] {byte[].class, byte[].class}; } else { // Only needs the secondary key. params = new Object[] {byte[].class}; } if (hasRow) { params[0] = mRowClass; } MethodMaker mm = cm.addMethod(byte[].class, "toPrimaryKey", params).static_(); Variable pkVar; if (define) { pkVar = IndexTriggerMaker.makeToPrimaryKey(mm, mRowType, mRowClass, mRowInfo, info); } else { mm.protected_(); var tableVar = mm.var(mClassMaker); if (params.length == 2) { pkVar = tableVar.invoke("toPrimaryKey", mm.param(0), mm.param(1)); } else { pkVar = tableVar.invoke("toPrimaryKey", mm.param(0)); } } mm.return_(pkVar); } /** * Returns a subclass of JoinedScanController with the same constructor. */ private Class<?> makeUnfilteredJoinedScanControllerClass(Class<?> primaryTableClass) { ClassMaker cm = RowGen.beginClassMaker (TableMaker.class, mRowType, mRowInfo, null, "Unfiltered") .extend(JoinedScanController.class).public_(); // Constructor is protected, for use by filter implementation subclasses. { MethodMaker mm = cm.addConstructor (byte[].class, boolean.class, byte[].class, boolean.class, Index.class); mm.protected_(); mm.invokeSuperConstructor (mm.param(0), mm.param(1), mm.param(2), mm.param(3), mm.param(4)); } // Provide access to the toPrimaryKey method to be accessible by filter implementation // subclasses, which are defined in a different package. addToPrimaryKeyMethod(cm, false, false); // Note regarding the RowDecoderEncoder methods: The decode methods fully resolve rows // by joining to the primary table, and the encode methods return bytes for storing // into the primary table. // Specified by RowDecoderEncoder. addJoinedDecodeRow(cm, primaryTableClass, false); addJoinedDecodeRow(cm, primaryTableClass, true); { // Specified by RowDecoderEncoder. MethodMaker mm = cm.addMethod(byte[].class, "encodeKey", Object.class).public_(); var rowVar = mm.param(0).cast(mRowClass); var tableVar = mm.var(primaryTableClass); mm.return_(tableVar.invoke("encodePrimaryKey", rowVar)); } { // Specified by RowDecoderEncoder. MethodMaker mm = cm.addMethod(byte[].class, "encodeValue", Object.class).public_(); var rowVar = mm.param(0).cast(mRowClass); var tableVar = mm.var(primaryTableClass); mm.return_(tableVar.invoke("encodeValue", rowVar)); } { // Specified by ScanController. MethodMaker mm = cm.addMethod(QueryPlan.class, "plan").public_(); var condy = mm.var(TableMaker.class).condy ("condyPlan", mRowType, mSecondaryDescriptor, 1); mm.return_(condy.invoke(QueryPlan.class, "plan")); } { // Used by filter subclasses when joining to the primary. The int param is the // schema version. MethodMaker mm = cm.addMethod (MethodHandle.class, "decodeValueHandle", int.class).protected_().static_(); var tableVar = mm.var(primaryTableClass); mm.return_(tableVar.invoke("decodeValueHandle", mm.param(0))); } return cm.finish(); } private void addJoinedDecodeRow(ClassMaker cm, Class<?> primaryTableClass, boolean withPrimaryCursor) { Object[] params; if (!withPrimaryCursor) { params = new Object[] {Cursor.class, LockResult.class, Object.class}; } else { params = new Object[] {Cursor.class, LockResult.class, Object.class, Cursor.class}; } MethodMaker mm = cm.addMethod(Object.class, "decodeRow", params).public_(); var cursorVar = mm.param(0); var resultVar = mm.param(1); var keyVar = cursorVar.invoke("key"); Variable primaryKeyVar; { var tableVar = mm.var(mClassMaker); if (mCodecGen.info.isAltKey()) { var valueVar = cursorVar.invoke("value"); primaryKeyVar = tableVar.invoke("toPrimaryKey", keyVar, valueVar); } else { primaryKeyVar = tableVar.invoke("toPrimaryKey", keyVar); } } if (!withPrimaryCursor) { params = new Object[] {cursorVar, resultVar, primaryKeyVar}; } else { params = new Object[] {cursorVar, resultVar, primaryKeyVar, mm.param(3)}; } var primaryValueVar = mm.invoke("join", params); Label hasValue = mm.label(); primaryValueVar.ifNe(null, hasValue); mm.return_(null); hasValue.here(); var rowVar = mm.param(2).cast(mRowClass); Label hasRow = mm.label(); rowVar.ifNe(null, hasRow); rowVar.set(mm.new_(mRowClass)); hasRow.here(); var tableVar = mm.var(primaryTableClass); tableVar.invoke("decodePrimaryKey", rowVar, primaryKeyVar); tableVar.invoke("decodeValue", rowVar, primaryValueVar); tableVar.invoke("markAllClean", rowVar); mm.return_(rowVar); } }
Using a record is overkill. It generates a much larger class file.
src/main/java/org/cojen/tupl/rows/TableMaker.java
Using a record is overkill. It generates a much larger class file.
Java
lgpl-2.1
a07a295f3a6e4909cd788d51a039842d017ccdad
0
alx/reprap-host-software,alx/reprap-host-software
/* * * !!!!! * NOTE: PLEASE ONLY EDIT THIS USING THE NETBEANS IDE 6.0.1 OR HIGHER!!!! * !!!!! * * ... an .xml file is associated with this class. Cheers. * * GenericExtruderTabPanel.java * * Created on 27 March 2008, 18:22 */ package org.reprap.gui.botConsole; import org.reprap.Preferences; import javax.swing.JOptionPane; import org.reprap.comms.snap.SNAPAddress; import org.reprap.comms.snap.SNAPCommunicator; import org.reprap.devices.GenericExtruder; import java.awt.Color; /** * * @author en0es */ public class GenericExtruderTabPanel extends javax.swing.JPanel { private int extruderID; private Thread pollThread = null; private boolean pollThreadExiting = false; private GenericExtruder extruder; /** Creates new form GenericExtruderTabPanel */ public GenericExtruderTabPanel() { initComponents(); pollThread = new Thread() { public void run() { Thread.currentThread().setName("GUI Poll"); while(!pollThreadExiting) { try { Thread.sleep(500); refreshTemperature(); } catch (InterruptedException ex) { // This is normal when shutting down, so ignore } } } }; pollThread.start(); // try { // // // Test presence by pulsing fan // extruder.setCooler(true); // extruder.setCooler(false); //// pollThread.start(); // } // catch (Exception ex) { // deactivatePanel(); // return; // } } private void deactivatePanel() { // extruder.dispose(); extruder = null; coolingCheck.setEnabled(false); currentTempLabel.setEnabled(false); extrudeButton.setEnabled(false); feedstockQtyLabel.setEnabled(false); heatButton.setEnabled(false); homeAllButton.setEnabled(false); jLabel1.setEnabled(false); jLabel11.setEnabled(false); jLabel12.setEnabled(false); jLabel4.setEnabled(false); jLabel5.setEnabled(false); jLabel6.setEnabled(false); jLabel7.setEnabled(false); jPanel2.setEnabled(false); jPanel3.setEnabled(false); jPanel4.setEnabled(false); materialLabel.setEnabled(false); motorReverseCheck.setEnabled(false); motorSpeedField.setEnabled(false); moveToSwapButton.setEnabled(false); nozzleWipeButton.setEnabled(false); targetTempField.setEnabled(false); tempColor.setEnabled(false); tempProgress.setEnabled(false); } private String prefix; public void initialiseExtruders(int id) throws Exception { extruderID = id; prefix = "Extruder" + id + "_"; extruder = new GenericExtruder(org.reprap.Main.getCommunicator(), new SNAPAddress(Preferences.loadGlobalInt(prefix + "Address")), Preferences.getGlobalPreferences(), extruderID); } /** This method is called from within the constructor to * initialize the form. * WARNING: Do NOT modify this code. The content of this method is * always regenerated by the Form Editor. */ // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { jLabel1 = new javax.swing.JLabel(); materialLabel = new javax.swing.JLabel(); feedstockQtyLabel = new javax.swing.JLabel(); jLabel4 = new javax.swing.JLabel(); jLabel5 = new javax.swing.JLabel(); jPanel2 = new javax.swing.JPanel(); targetTempField = new javax.swing.JTextField(); jLabel7 = new javax.swing.JLabel(); currentTempLabel = new javax.swing.JLabel(); jLabel6 = new javax.swing.JLabel(); tempProgress = new javax.swing.JProgressBar(); tempColor = new javax.swing.JPanel(); heatButton = new javax.swing.JToggleButton(); coolingCheck = new javax.swing.JCheckBox(); jPanel3 = new javax.swing.JPanel(); motorSpeedField = new javax.swing.JTextField(); jLabel12 = new javax.swing.JLabel(); jLabel11 = new javax.swing.JLabel(); motorReverseCheck = new javax.swing.JCheckBox(); extrudeButton = new javax.swing.JToggleButton(); jPanel4 = new javax.swing.JPanel(); nozzleWipeButton = new javax.swing.JToggleButton(); homeAllButton = new javax.swing.JToggleButton(); moveToSwapButton = new javax.swing.JToggleButton(); jLabel1.setFont(new java.awt.Font("Tahoma", 0, 12)); jLabel1.setText("Material:"); materialLabel.setFont(new java.awt.Font("Tahoma", 0, 12)); materialLabel.setText("materialType"); feedstockQtyLabel.setFont(new java.awt.Font("Tahoma", 0, 12)); feedstockQtyLabel.setText("00000"); jLabel4.setFont(new java.awt.Font("Tahoma", 0, 12)); jLabel4.setText("Feedstock remaining:"); jLabel5.setFont(new java.awt.Font("Tahoma", 0, 12)); jLabel5.setText("ml"); jPanel2.setBorder(javax.swing.BorderFactory.createTitledBorder("Temperature (degrees Celcius)")); targetTempField.setColumns(3); targetTempField.setFont(targetTempField.getFont().deriveFont(targetTempField.getFont().getSize()+1f)); targetTempField.setText("000"); jLabel7.setFont(new java.awt.Font("Tahoma", 0, 12)); jLabel7.setText("Target temperature:"); currentTempLabel.setFont(new java.awt.Font("Tahoma", 0, 12)); currentTempLabel.setText("000"); jLabel6.setFont(new java.awt.Font("Tahoma", 0, 12)); jLabel6.setText("Current temperature:"); tempProgress.setOrientation(1); tempColor.setBackground(new java.awt.Color(255, 255, 255)); javax.swing.GroupLayout tempColorLayout = new javax.swing.GroupLayout(tempColor); tempColor.setLayout(tempColorLayout); tempColorLayout.setHorizontalGroup( tempColorLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGap(0, 52, Short.MAX_VALUE) ); tempColorLayout.setVerticalGroup( tempColorLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGap(0, 48, Short.MAX_VALUE) ); heatButton.setText("Heat is off"); heatButton.setFocusCycleRoot(true); heatButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { heatButtonActionPerformed(evt); } }); coolingCheck.setFont(coolingCheck.getFont().deriveFont(coolingCheck.getFont().getSize()+1f)); coolingCheck.setText("Cooling"); coolingCheck.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { coolingCheckActionPerformed(evt); } }); javax.swing.GroupLayout jPanel2Layout = new javax.swing.GroupLayout(jPanel2); jPanel2.setLayout(jPanel2Layout); jPanel2Layout.setHorizontalGroup( jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel2Layout.createSequentialGroup() .addContainerGap() .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) .addComponent(coolingCheck) .addGroup(jPanel2Layout.createSequentialGroup() .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addComponent(jLabel6) .addGap(18, 18, 18) .addComponent(currentTempLabel)) .addGroup(jPanel2Layout.createSequentialGroup() .addComponent(jLabel7) .addGap(18, 18, 18) .addComponent(targetTempField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 105, Short.MAX_VALUE) .addComponent(tempProgress, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(tempColor, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(heatButton, javax.swing.GroupLayout.PREFERRED_SIZE, 110, javax.swing.GroupLayout.PREFERRED_SIZE))) .addContainerGap()) ); jPanel2Layout.setVerticalGroup( jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel2Layout.createSequentialGroup() .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) .addComponent(heatButton, javax.swing.GroupLayout.DEFAULT_SIZE, 48, Short.MAX_VALUE) .addComponent(tempProgress, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.DEFAULT_SIZE, 48, Short.MAX_VALUE) .addComponent(tempColor, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addGroup(javax.swing.GroupLayout.Alignment.LEADING, jPanel2Layout.createSequentialGroup() .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel6) .addComponent(currentTempLabel)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel7) .addComponent(targetTempField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)))) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(coolingCheck)) ); jPanel3.setBorder(javax.swing.BorderFactory.createTitledBorder("Extrude")); motorSpeedField.setColumns(3); motorSpeedField.setFont(motorSpeedField.getFont().deriveFont(motorSpeedField.getFont().getSize()+1f)); motorSpeedField.setText("000"); jLabel12.setFont(new java.awt.Font("Tahoma", 0, 12)); jLabel12.setText("Rpm"); jLabel11.setFont(new java.awt.Font("Tahoma", 0, 12)); jLabel11.setText("Motor speed:"); motorReverseCheck.setFont(motorReverseCheck.getFont().deriveFont(motorReverseCheck.getFont().getSize()+1f)); motorReverseCheck.setText("Reverse"); motorReverseCheck.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { motorReverseCheckActionPerformed(evt); } }); extrudeButton.setText("Extrude"); extrudeButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { extrudeButtonActionPerformed(evt); } }); javax.swing.GroupLayout jPanel3Layout = new javax.swing.GroupLayout(jPanel3); jPanel3.setLayout(jPanel3Layout); jPanel3Layout.setHorizontalGroup( jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel3Layout.createSequentialGroup() .addContainerGap() .addComponent(jLabel11) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(motorSpeedField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jLabel12) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 106, Short.MAX_VALUE) .addComponent(motorReverseCheck) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(extrudeButton, javax.swing.GroupLayout.PREFERRED_SIZE, 110, javax.swing.GroupLayout.PREFERRED_SIZE) .addContainerGap()) ); jPanel3Layout.setVerticalGroup( jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel11) .addComponent(motorSpeedField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(jLabel12) .addComponent(motorReverseCheck) .addComponent(extrudeButton, javax.swing.GroupLayout.PREFERRED_SIZE, 29, javax.swing.GroupLayout.PREFERRED_SIZE)) ); jPanel4.setBorder(javax.swing.BorderFactory.createTitledBorder("Maintenance")); nozzleWipeButton.setText("Nozzle wipe"); nozzleWipeButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { nozzleWipeButtonActionPerformed(evt); } }); homeAllButton.setText("Home all"); moveToSwapButton.setText("Move to swap point"); javax.swing.GroupLayout jPanel4Layout = new javax.swing.GroupLayout(jPanel4); jPanel4.setLayout(jPanel4Layout); jPanel4Layout.setHorizontalGroup( jPanel4Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel4Layout.createSequentialGroup() .addContainerGap(189, Short.MAX_VALUE) .addComponent(homeAllButton) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(moveToSwapButton) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(nozzleWipeButton) .addContainerGap()) ); jPanel4Layout.setVerticalGroup( jPanel4Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel4Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(nozzleWipeButton) .addComponent(moveToSwapButton) .addComponent(homeAllButton)) ); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); this.setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addContainerGap() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addComponent(jPanel4, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addContainerGap()) .addGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addComponent(jLabel1) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(materialLabel)) .addGroup(layout.createSequentialGroup() .addComponent(jLabel4) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(feedstockQtyLabel) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jLabel5))) .addGap(202, 202, 202)) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) .addComponent(jPanel2, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(jPanel3, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) .addContainerGap()))) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addContainerGap() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel1) .addComponent(materialLabel)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel4) .addComponent(jLabel5) .addComponent(feedstockQtyLabel)) .addGap(18, 18, 18) .addComponent(jPanel2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jPanel3, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jPanel4, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addContainerGap(21, Short.MAX_VALUE)) ); }// </editor-fold>//GEN-END:initComponents private void coolingCheckActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_coolingCheckActionPerformed try { extruder.setCooler(coolingCheck.isSelected()); } catch (Exception ex) { JOptionPane.showMessageDialog(null, "Exception setting cooler: " + ex); ex.printStackTrace(); } }//GEN-LAST:event_coolingCheckActionPerformed private void motorReverseCheckActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_motorReverseCheckActionPerformed // TODO add your handling code here: }//GEN-LAST:event_motorReverseCheckActionPerformed private boolean unpushed = true; private void heatButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_heatButtonActionPerformed if (unpushed) { try { extruder.setTemperature(Integer.parseInt(targetTempField.getText())); } catch (Exception ex) { JOptionPane.showMessageDialog(null, "Exception setting temperature: " + ex); ex.printStackTrace(); } heatButton.setText("Heat is ON"); unpushed = false; } else { try { extruder.setTemperature(0); } catch (Exception ex) { JOptionPane.showMessageDialog(null, "Exception setting temperature: " + ex); ex.printStackTrace(); } heatButton.setText("Heat is off"); unpushed = true; } }//GEN-LAST:event_heatButtonActionPerformed private boolean extruding = false; private void extrudeButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_extrudeButtonActionPerformed if (extruding) { extruding = false; extrudeButton.setText("Extrude"); } else { extruding = true; extrudeButton.setText("Stop"); System.out.println("Extruding at speed: " + motorSpeedField.getText()); } setExtruderSpeed(); }//GEN-LAST:event_extrudeButtonActionPerformed private void nozzleWipeButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_nozzleWipeButtonActionPerformed // Reprap.finishedLayer(1); // Reprap.betweenLayers(layerNumber); }//GEN-LAST:event_nozzleWipeButtonActionPerformed private void setExtruderSpeed() { try { extruder.setExtrusion(extruding?Integer.parseInt(motorSpeedField.getText()):0, motorReverseCheck.isSelected()); } catch (Exception ex) { JOptionPane.showMessageDialog(null, "Extruder exception: " + ex); ex.printStackTrace(); } } public void setPrefs() throws Exception { setMaterialLabel(Preferences.loadGlobalString(prefix + "MaterialType(name)")); setMotorSpeedField(Preferences.loadGlobalInt(prefix + "ExtrusionSpeed(0..255)")); setTargetTempField(Preferences.loadGlobalInt(prefix + "ExtrusionTemp(C)")); } private void setMaterialLabel(String materialType) { materialLabel.setText(materialType); } private void setMotorSpeedField(int speed) { motorSpeedField.setText(""+speed); } private void setTargetTempField(int temp) { targetTempField.setText(""+temp); } private int currentTemp; private final int BURNING_TEMP = 70; private double colorFactor = 0; private Color c; private void refreshTemperature() { currentTemp = (int)Math.round(extruder.getTemperature()); currentTempLabel.setText("" + currentTemp); tempProgress.setMinimum(0); tempProgress.setMaximum(Integer.parseInt(targetTempField.getText())); tempProgress.setValue(currentTemp); colorFactor = currentTemp/(BURNING_TEMP*1.0); if (colorFactor > 1) colorFactor = 1; if (colorFactor < 0) colorFactor = 0; int red = (int)(colorFactor * 255.0); int blue = 255-(int)(colorFactor * 255.0); c = new Color(red, 0, blue); tempColor.setBackground(c); } // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JCheckBox coolingCheck; private javax.swing.JLabel currentTempLabel; private javax.swing.JToggleButton extrudeButton; private javax.swing.JLabel feedstockQtyLabel; private javax.swing.JToggleButton heatButton; private javax.swing.JToggleButton homeAllButton; private javax.swing.JLabel jLabel1; private javax.swing.JLabel jLabel11; private javax.swing.JLabel jLabel12; private javax.swing.JLabel jLabel4; private javax.swing.JLabel jLabel5; private javax.swing.JLabel jLabel6; private javax.swing.JLabel jLabel7; private javax.swing.JPanel jPanel2; private javax.swing.JPanel jPanel3; private javax.swing.JPanel jPanel4; private javax.swing.JLabel materialLabel; private javax.swing.JCheckBox motorReverseCheck; private javax.swing.JTextField motorSpeedField; private javax.swing.JToggleButton moveToSwapButton; private javax.swing.JToggleButton nozzleWipeButton; private javax.swing.JTextField targetTempField; private javax.swing.JPanel tempColor; private javax.swing.JProgressBar tempProgress; // End of variables declaration//GEN-END:variables }
src/org/reprap/gui/botConsole/GenericExtruderTabPanel.java
/* * * !!!!! * NOTE: PLEASE ONLY EDIT THIS USING THE NETBEANS IDE 6.0.1 OR HIGHER!!!! * !!!!! * * ... an .xml file is associated with this class. Cheers. * * GenericExtruderTabPanel.java * * Created on 27 March 2008, 18:22 */ package org.reprap.gui.botConsole; import org.reprap.Preferences; import javax.swing.JOptionPane; import org.reprap.comms.snap.SNAPAddress; import org.reprap.comms.snap.SNAPCommunicator; import org.reprap.devices.GenericExtruder; import java.awt.Color; /** * * @author en0es */ public class GenericExtruderTabPanel extends javax.swing.JPanel { private int extruderID; private Thread pollThread = null; private boolean pollThreadExiting = false; private GenericExtruder extruder; /** Creates new form GenericExtruderTabPanel */ public GenericExtruderTabPanel() { initComponents(); pollThread = new Thread() { public void run() { Thread.currentThread().setName("GUI Poll"); while(!pollThreadExiting) { try { Thread.sleep(500); refreshTemperature(); } catch (InterruptedException ex) { // This is normal when shutting down, so ignore } } } }; try { // Test presence by pulsing fan extruder.setCooler(true); extruder.setCooler(false); pollThread.start(); } catch (Exception ex) { deactivatePanel(); return; } } private void deactivatePanel() { // extruder.dispose(); extruder = null; coolingCheck.setEnabled(false); currentTempLabel.setEnabled(false); extrudeButton.setEnabled(false); feedstockQtyLabel.setEnabled(false); heatButton.setEnabled(false); homeAllButton.setEnabled(false); jLabel1.setEnabled(false); jLabel11.setEnabled(false); jLabel12.setEnabled(false); jLabel4.setEnabled(false); jLabel5.setEnabled(false); jLabel6.setEnabled(false); jLabel7.setEnabled(false); jPanel2.setEnabled(false); jPanel3.setEnabled(false); jPanel4.setEnabled(false); materialLabel.setEnabled(false); motorReverseCheck.setEnabled(false); motorSpeedField.setEnabled(false); moveToSwapButton.setEnabled(false); nozzleWipeButton.setEnabled(false); targetTempField.setEnabled(false); tempColor.setEnabled(false); tempProgress.setEnabled(false); } private String prefix; public void initialiseExtruders(int id) throws Exception { extruderID = id; prefix = "Extruder" + id + "_"; extruder = new GenericExtruder(org.reprap.Main.getCommunicator(), new SNAPAddress(Preferences.loadGlobalInt(prefix + "Address")), Preferences.getGlobalPreferences(), extruderID); } /** This method is called from within the constructor to * initialize the form. * WARNING: Do NOT modify this code. The content of this method is * always regenerated by the Form Editor. */ // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { jLabel1 = new javax.swing.JLabel(); materialLabel = new javax.swing.JLabel(); feedstockQtyLabel = new javax.swing.JLabel(); jLabel4 = new javax.swing.JLabel(); jLabel5 = new javax.swing.JLabel(); jPanel2 = new javax.swing.JPanel(); targetTempField = new javax.swing.JTextField(); jLabel7 = new javax.swing.JLabel(); currentTempLabel = new javax.swing.JLabel(); jLabel6 = new javax.swing.JLabel(); tempProgress = new javax.swing.JProgressBar(); tempColor = new javax.swing.JPanel(); heatButton = new javax.swing.JToggleButton(); coolingCheck = new javax.swing.JCheckBox(); jPanel3 = new javax.swing.JPanel(); motorSpeedField = new javax.swing.JTextField(); jLabel12 = new javax.swing.JLabel(); jLabel11 = new javax.swing.JLabel(); motorReverseCheck = new javax.swing.JCheckBox(); extrudeButton = new javax.swing.JToggleButton(); jPanel4 = new javax.swing.JPanel(); nozzleWipeButton = new javax.swing.JToggleButton(); homeAllButton = new javax.swing.JToggleButton(); moveToSwapButton = new javax.swing.JToggleButton(); jLabel1.setFont(new java.awt.Font("Tahoma", 0, 12)); jLabel1.setText("Material:"); materialLabel.setFont(new java.awt.Font("Tahoma", 0, 12)); materialLabel.setText("materialType"); feedstockQtyLabel.setFont(new java.awt.Font("Tahoma", 0, 12)); feedstockQtyLabel.setText("00000"); jLabel4.setFont(new java.awt.Font("Tahoma", 0, 12)); jLabel4.setText("Feedstock remaining:"); jLabel5.setFont(new java.awt.Font("Tahoma", 0, 12)); jLabel5.setText("ml"); jPanel2.setBorder(javax.swing.BorderFactory.createTitledBorder("Temperature (degrees Celcius)")); targetTempField.setColumns(3); targetTempField.setFont(targetTempField.getFont().deriveFont(targetTempField.getFont().getSize()+1f)); targetTempField.setText("000"); jLabel7.setFont(new java.awt.Font("Tahoma", 0, 12)); jLabel7.setText("Target temperature:"); currentTempLabel.setFont(new java.awt.Font("Tahoma", 0, 12)); currentTempLabel.setText("000"); jLabel6.setFont(new java.awt.Font("Tahoma", 0, 12)); jLabel6.setText("Current temperature:"); tempProgress.setOrientation(1); tempColor.setBackground(new java.awt.Color(255, 255, 255)); javax.swing.GroupLayout tempColorLayout = new javax.swing.GroupLayout(tempColor); tempColor.setLayout(tempColorLayout); tempColorLayout.setHorizontalGroup( tempColorLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGap(0, 52, Short.MAX_VALUE) ); tempColorLayout.setVerticalGroup( tempColorLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGap(0, 48, Short.MAX_VALUE) ); heatButton.setText("Heat is off"); heatButton.setFocusCycleRoot(true); heatButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { heatButtonActionPerformed(evt); } }); coolingCheck.setFont(coolingCheck.getFont().deriveFont(coolingCheck.getFont().getSize()+1f)); coolingCheck.setText("Cooling"); coolingCheck.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { coolingCheckActionPerformed(evt); } }); javax.swing.GroupLayout jPanel2Layout = new javax.swing.GroupLayout(jPanel2); jPanel2.setLayout(jPanel2Layout); jPanel2Layout.setHorizontalGroup( jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel2Layout.createSequentialGroup() .addContainerGap() .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) .addComponent(coolingCheck) .addGroup(jPanel2Layout.createSequentialGroup() .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addComponent(jLabel6) .addGap(18, 18, 18) .addComponent(currentTempLabel)) .addGroup(jPanel2Layout.createSequentialGroup() .addComponent(jLabel7) .addGap(18, 18, 18) .addComponent(targetTempField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 105, Short.MAX_VALUE) .addComponent(tempProgress, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(tempColor, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(heatButton, javax.swing.GroupLayout.PREFERRED_SIZE, 110, javax.swing.GroupLayout.PREFERRED_SIZE))) .addContainerGap()) ); jPanel2Layout.setVerticalGroup( jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel2Layout.createSequentialGroup() .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) .addComponent(heatButton, javax.swing.GroupLayout.DEFAULT_SIZE, 48, Short.MAX_VALUE) .addComponent(tempProgress, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.DEFAULT_SIZE, 48, Short.MAX_VALUE) .addComponent(tempColor, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addGroup(javax.swing.GroupLayout.Alignment.LEADING, jPanel2Layout.createSequentialGroup() .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel6) .addComponent(currentTempLabel)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel7) .addComponent(targetTempField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)))) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(coolingCheck)) ); jPanel3.setBorder(javax.swing.BorderFactory.createTitledBorder("Extrude")); motorSpeedField.setColumns(3); motorSpeedField.setFont(motorSpeedField.getFont().deriveFont(motorSpeedField.getFont().getSize()+1f)); motorSpeedField.setText("000"); jLabel12.setFont(new java.awt.Font("Tahoma", 0, 12)); jLabel12.setText("Rpm"); jLabel11.setFont(new java.awt.Font("Tahoma", 0, 12)); jLabel11.setText("Motor speed:"); motorReverseCheck.setFont(motorReverseCheck.getFont().deriveFont(motorReverseCheck.getFont().getSize()+1f)); motorReverseCheck.setText("Reverse"); motorReverseCheck.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { motorReverseCheckActionPerformed(evt); } }); extrudeButton.setText("Extrude"); extrudeButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { extrudeButtonActionPerformed(evt); } }); javax.swing.GroupLayout jPanel3Layout = new javax.swing.GroupLayout(jPanel3); jPanel3.setLayout(jPanel3Layout); jPanel3Layout.setHorizontalGroup( jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel3Layout.createSequentialGroup() .addContainerGap() .addComponent(jLabel11) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(motorSpeedField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jLabel12) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 106, Short.MAX_VALUE) .addComponent(motorReverseCheck) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(extrudeButton, javax.swing.GroupLayout.PREFERRED_SIZE, 110, javax.swing.GroupLayout.PREFERRED_SIZE) .addContainerGap()) ); jPanel3Layout.setVerticalGroup( jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel11) .addComponent(motorSpeedField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(jLabel12) .addComponent(motorReverseCheck) .addComponent(extrudeButton, javax.swing.GroupLayout.PREFERRED_SIZE, 29, javax.swing.GroupLayout.PREFERRED_SIZE)) ); jPanel4.setBorder(javax.swing.BorderFactory.createTitledBorder("Maintenance")); nozzleWipeButton.setText("Nozzle wipe"); nozzleWipeButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { nozzleWipeButtonActionPerformed(evt); } }); homeAllButton.setText("Home all"); moveToSwapButton.setText("Move to swap point"); javax.swing.GroupLayout jPanel4Layout = new javax.swing.GroupLayout(jPanel4); jPanel4.setLayout(jPanel4Layout); jPanel4Layout.setHorizontalGroup( jPanel4Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel4Layout.createSequentialGroup() .addContainerGap(189, Short.MAX_VALUE) .addComponent(homeAllButton) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(moveToSwapButton) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(nozzleWipeButton) .addContainerGap()) ); jPanel4Layout.setVerticalGroup( jPanel4Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel4Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(nozzleWipeButton) .addComponent(moveToSwapButton) .addComponent(homeAllButton)) ); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); this.setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addContainerGap() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addComponent(jPanel4, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addContainerGap()) .addGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addComponent(jLabel1) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(materialLabel)) .addGroup(layout.createSequentialGroup() .addComponent(jLabel4) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(feedstockQtyLabel) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jLabel5))) .addGap(202, 202, 202)) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) .addComponent(jPanel2, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(jPanel3, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) .addContainerGap()))) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addContainerGap() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel1) .addComponent(materialLabel)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel4) .addComponent(jLabel5) .addComponent(feedstockQtyLabel)) .addGap(18, 18, 18) .addComponent(jPanel2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jPanel3, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jPanel4, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addContainerGap(21, Short.MAX_VALUE)) ); }// </editor-fold>//GEN-END:initComponents private void coolingCheckActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_coolingCheckActionPerformed try { extruder.setCooler(coolingCheck.isSelected()); } catch (Exception ex) { JOptionPane.showMessageDialog(null, "Exception setting cooler: " + ex); ex.printStackTrace(); } }//GEN-LAST:event_coolingCheckActionPerformed private void motorReverseCheckActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_motorReverseCheckActionPerformed // TODO add your handling code here: }//GEN-LAST:event_motorReverseCheckActionPerformed private boolean unpushed = true; private void heatButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_heatButtonActionPerformed if (unpushed) { try { extruder.setTemperature(Integer.parseInt(targetTempField.getText())); } catch (Exception ex) { JOptionPane.showMessageDialog(null, "Exception setting temperature: " + ex); ex.printStackTrace(); } heatButton.setText("Heat is ON"); unpushed = false; } else { try { extruder.setTemperature(0); } catch (Exception ex) { JOptionPane.showMessageDialog(null, "Exception setting temperature: " + ex); ex.printStackTrace(); } heatButton.setText("Heat is off"); unpushed = true; } }//GEN-LAST:event_heatButtonActionPerformed private boolean extruding = false; private void extrudeButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_extrudeButtonActionPerformed if (extruding) { extruding = false; extrudeButton.setText("Extrude"); } else { extruding = true; extrudeButton.setText("Stop"); System.out.println("Extruding at speed: " + motorSpeedField.getText()); } setExtruderSpeed(); }//GEN-LAST:event_extrudeButtonActionPerformed private void nozzleWipeButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_nozzleWipeButtonActionPerformed // Reprap.finishedLayer(1); // Reprap.betweenLayers(layerNumber); }//GEN-LAST:event_nozzleWipeButtonActionPerformed private void setExtruderSpeed() { try { extruder.setExtrusion(extruding?Integer.parseInt(motorSpeedField.getText()):0, motorReverseCheck.isSelected()); } catch (Exception ex) { JOptionPane.showMessageDialog(null, "Extruder exception: " + ex); ex.printStackTrace(); } } public void setPrefs() throws Exception { setMaterialLabel(Preferences.loadGlobalString(prefix + "MaterialType(name)")); setMotorSpeedField(Preferences.loadGlobalInt(prefix + "ExtrusionSpeed(0..255)")); setTargetTempField(Preferences.loadGlobalInt(prefix + "ExtrusionTemp(C)")); } private void setMaterialLabel(String materialType) { materialLabel.setText(materialType); } private void setMotorSpeedField(int speed) { motorSpeedField.setText(""+speed); } private void setTargetTempField(int temp) { targetTempField.setText(""+temp); } private int currentTemp; private final int BURNING_TEMP = 70; private double colorFactor = 0; private Color c; private void refreshTemperature() { currentTemp = (int)Math.round(extruder.getTemperature()); currentTempLabel.setText("" + currentTemp); tempProgress.setMinimum(0); tempProgress.setMaximum(Integer.parseInt(targetTempField.getText())); tempProgress.setValue(currentTemp); colorFactor = currentTemp/(BURNING_TEMP*1.0); if (colorFactor > 1) colorFactor = 1; if (colorFactor < 0) colorFactor = 0; int red = (int)(colorFactor * 255.0); int blue = 255-(int)(colorFactor * 255.0); c = new Color(red, 0, blue); tempColor.setBackground(c); } // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JCheckBox coolingCheck; private javax.swing.JLabel currentTempLabel; private javax.swing.JToggleButton extrudeButton; private javax.swing.JLabel feedstockQtyLabel; private javax.swing.JToggleButton heatButton; private javax.swing.JToggleButton homeAllButton; private javax.swing.JLabel jLabel1; private javax.swing.JLabel jLabel11; private javax.swing.JLabel jLabel12; private javax.swing.JLabel jLabel4; private javax.swing.JLabel jLabel5; private javax.swing.JLabel jLabel6; private javax.swing.JLabel jLabel7; private javax.swing.JPanel jPanel2; private javax.swing.JPanel jPanel3; private javax.swing.JPanel jPanel4; private javax.swing.JLabel materialLabel; private javax.swing.JCheckBox motorReverseCheck; private javax.swing.JTextField motorSpeedField; private javax.swing.JToggleButton moveToSwapButton; private javax.swing.JToggleButton nozzleWipeButton; private javax.swing.JTextField targetTempField; private javax.swing.JPanel tempColor; private javax.swing.JProgressBar tempProgress; // End of variables declaration//GEN-END:variables }
Undone code to deactivation of extruder panel if not present. git-svn-id: d2924805b4ae8fffc34e1a97dde5dc7532b2300d@1512 cb376a5e-1013-0410-a455-b6b1f9ac8223
src/org/reprap/gui/botConsole/GenericExtruderTabPanel.java
Undone code to deactivation of extruder panel if not present.
Java
unlicense
130131491f8aac4b9d708f1b8e944af607e466d6
0
Phylogeny/ExtraBitManipulation
package com.phylogeny.extrabitmanipulation.client.eventhandler; import org.lwjgl.opengl.GL11; import org.lwjgl.util.glu.Cylinder; import org.lwjgl.util.glu.Disk; import org.lwjgl.util.glu.GLU; import org.lwjgl.util.glu.Quadric; import org.lwjgl.util.glu.Sphere; import com.phylogeny.extrabitmanipulation.ExtraBitManipulation; import com.phylogeny.extrabitmanipulation.api.ChiselsAndBitsAPIAccess; import com.phylogeny.extrabitmanipulation.client.shape.Prism; import com.phylogeny.extrabitmanipulation.config.ConfigShapeRender; import com.phylogeny.extrabitmanipulation.config.ConfigShapeRenderPair; import com.phylogeny.extrabitmanipulation.helper.SculptSettingsHelper; import com.phylogeny.extrabitmanipulation.item.ItemBitWrench; import com.phylogeny.extrabitmanipulation.item.ItemBitToolBase; import com.phylogeny.extrabitmanipulation.item.ItemSculptingTool; import com.phylogeny.extrabitmanipulation.packet.PacketCycleBitWrenchMode; import com.phylogeny.extrabitmanipulation.packet.PacketSculpt; import com.phylogeny.extrabitmanipulation.reference.Configs; import com.phylogeny.extrabitmanipulation.reference.NBTKeys; import com.phylogeny.extrabitmanipulation.reference.Reference; import com.phylogeny.extrabitmanipulation.reference.Utility; import mod.chiselsandbits.api.APIExceptions.CannotBeChiseled; import mod.chiselsandbits.api.IBitAccess; import mod.chiselsandbits.api.IBitLocation; import mod.chiselsandbits.api.IChiselAndBitsAPI; import net.minecraft.client.Minecraft; import net.minecraft.client.gui.GuiNewChat; import net.minecraft.client.gui.GuiScreen; import net.minecraft.client.renderer.GlStateManager; import net.minecraft.client.renderer.RenderGlobal; import net.minecraft.client.renderer.Tessellator; import net.minecraft.client.renderer.VertexBuffer; import net.minecraft.client.renderer.vertex.DefaultVertexFormats; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.util.math.AxisAlignedBB; import net.minecraft.util.math.BlockPos; import net.minecraft.util.math.RayTraceResult; import net.minecraft.util.EnumFacing; import net.minecraft.util.EnumHand; import net.minecraft.util.ResourceLocation; import net.minecraft.util.math.Vec3d; import net.minecraft.util.math.Vec3i; import net.minecraft.util.text.TextComponentString; import net.minecraft.world.World; import net.minecraftforge.client.event.DrawBlockHighlightEvent; import net.minecraftforge.client.event.MouseEvent; import net.minecraftforge.client.event.RenderWorldLastEvent; import net.minecraftforge.fml.common.eventhandler.SubscribeEvent; public class ClientEventHandler { private int frameCounter; private Vec3d drawnStartPoint = null; private static final ResourceLocation ARROW_HEAD = new ResourceLocation(Reference.GROUP_ID, "textures/overlays/ArrowHead.png"); private static final ResourceLocation ARROW_BIDIRECTIONAL = new ResourceLocation(Reference.GROUP_ID, "textures/overlays/ArrowBidirectional.png"); private static final ResourceLocation ARROW_CYCLICAL = new ResourceLocation(Reference.GROUP_ID, "textures/overlays/ArrowCyclical.png"); private static final ResourceLocation CIRCLE = new ResourceLocation(Reference.GROUP_ID, "textures/overlays/Circle.png"); private static final ResourceLocation INVERSION = new ResourceLocation(Reference.GROUP_ID, "textures/overlays/Inversion.png"); private static final int[] DIRECTION_FORWARD = new int[]{2, 0, 5, 4, 1, 3}; private static final int[] DIRECTION_BACKWARD = new int[]{1, 4, 0, 5, 3, 2}; private static final int[] AXIS_FORWARD = new int[]{2, 3, 4, 5, 0, 1}; private static final int[] AXIS_BACKWARD = new int[]{4, 5, 0, 1, 2, 3}; private static final int[] SHAPE_CURVED = new int[]{1, 2, 0, 0, 0, 0, 0}; // private static final int[] SHAPE_FLAT = new int[]{3, 3, 3, 4, 5, 6, 3}; private static final int[] SHAPE_FLAT = new int[]{3, 3, 3, 6, 3, 3, 3}; @SubscribeEvent public void interceptMouseInput(MouseEvent event) { EntityPlayer player = Minecraft.getMinecraft().thePlayer; if (event.getDwheel() != 0) { ItemStack stack = player.getHeldItemMainhand(); if (stack != null && stack.getItem() instanceof ItemBitToolBase) { boolean forward = event.getDwheel() < 0; if (player.isSneaking()) { if (stack.getItem() instanceof ItemBitWrench) { ExtraBitManipulation.packetNetwork.sendToServer(new PacketCycleBitWrenchMode(forward)); } else { cycleSemiDiameter(player, stack, forward); } event.setCanceled(true); } else if (stack.getItem() instanceof ItemSculptingTool && (GuiScreen.isCtrlKeyDown() || GuiScreen.isAltKeyDown())) { if (GuiScreen.isCtrlKeyDown()) { cycleDirection(player, stack, forward); } else { cycleWallThickness(player, stack, forward); } event.setCanceled(true); } } else { drawnStartPoint = null; } } else if ((GuiScreen.isCtrlKeyDown() || GuiScreen.isAltKeyDown()) && event.isButtonstate()) { ItemStack stack = player.inventory.getCurrentItem(); if (stack != null) { Item item = stack.getItem(); if (item != null && item instanceof ItemSculptingTool) { if (GuiScreen.isCtrlKeyDown()) { if (event.getButton() == 1) { cycleShapeType(player, stack, item); } if (event.getButton() == 0) { toggleBitGridTargeted(player, stack); } } else { if (event.getButton() == 1) { toggleHollowShape(player, stack, item); } if (event.getButton() == 0) { toggleOpenEnds(player, stack); } } event.setCanceled(true); } } } else if (event.getButton() == 0) { if (!player.capabilities.allowEdit) return; ItemStack stack = player.inventory.getCurrentItem(); if (stack != null) { Item item = stack.getItem(); if (event.isButtonstate() && item instanceof ItemBitWrench) { event.setCanceled(true); } else if (item != null && item instanceof ItemSculptingTool) { boolean drawnMode = SculptSettingsHelper.getMode(player, stack.getTagCompound()) == 2; if (!drawnMode) { drawnStartPoint = null; } if (event.isButtonstate() || (drawnMode && drawnStartPoint != null)) { ItemSculptingTool toolItem = (ItemSculptingTool) item; boolean removeBits = toolItem.removeBits(); RayTraceResult target = Minecraft.getMinecraft().objectMouseOver; if (target != null && target.typeOfHit != RayTraceResult.Type.MISS) { if (target.typeOfHit == RayTraceResult.Type.BLOCK) { BlockPos pos = target.getBlockPos(); EnumFacing side = target.sideHit; Vec3d hit = target.hitVec; boolean swingTool = true; if (drawnMode && event.isButtonstate() && drawnStartPoint != null) { event.setCanceled(true); return; } if (!player.isSneaking() && drawnMode && event.isButtonstate()) { IBitLocation bitLoc = ChiselsAndBitsAPIAccess.apiInstance.getBitPos((float) hit.xCoord - pos.getX(), (float) hit.yCoord - pos.getY(), (float) hit.zCoord - pos.getZ(), side, pos, false); if (bitLoc != null) { int x = pos.getX(); int y = pos.getY(); int z = pos.getZ(); float x2 = x + bitLoc.getBitX() * Utility.PIXEL_F; float y2 = y + bitLoc.getBitY() * Utility.PIXEL_F; float z2 = z + bitLoc.getBitZ() * Utility.PIXEL_F; if (!removeBits) { x2 += side.getFrontOffsetX() * Utility.PIXEL_F; y2 += side.getFrontOffsetY() * Utility.PIXEL_F; z2 += side.getFrontOffsetZ() * Utility.PIXEL_F; } drawnStartPoint = new Vec3d(x2, y2, z2); } else { drawnStartPoint = null; swingTool = false; } } else { if (player.isSneaking()) { IChiselAndBitsAPI api = ChiselsAndBitsAPIAccess.apiInstance; IBitLocation bitLoc = api.getBitPos((float) hit.xCoord - pos.getX(), (float) hit.yCoord - pos.getY(), (float) hit.zCoord - pos.getZ(), side, pos, false); if (bitLoc != null) { try { IBitAccess bitAccess = api.getBitAccess(player.worldObj, pos); ItemStack bitStack = bitAccess.getBitAt(bitLoc.getBitX(), bitLoc.getBitY(), bitLoc.getBitZ()).getItemStack(1); SculptSettingsHelper.setBitStack(player, stack, removeBits, bitStack); if ((removeBits ? Configs.sculptSetBitWire : Configs.sculptSetBitSpade).shouldDisplayInChat()) { printChatMessageWithDeletion((removeBits ? "Removing only " : "Sculpting with ") + bitStack.getDisplayName().substring(15)); } } catch (CannotBeChiseled e) { event.setCanceled(true); return; } } } else if (!player.isSneaking() || removeBits || drawnMode) { swingTool = toolItem.sculptBlocks(stack, player, player.worldObj, pos, side, hit, drawnStartPoint); ExtraBitManipulation.packetNetwork.sendToServer(new PacketSculpt(pos, side, hit, drawnStartPoint)); } if (drawnMode && !event.isButtonstate()) { drawnStartPoint = null; } } if (swingTool) player.swingArm(EnumHand.MAIN_HAND); event.setCanceled(true); } } else if (player.isSneaking() && event.isButtonstate() && removeBits) { SculptSettingsHelper.setBitStack(player, stack, true, null); if ((removeBits ? Configs.sculptSetBitWire : Configs.sculptSetBitSpade).shouldDisplayInChat()) { printChatMessageWithDeletion("Removing any/all bits"); } } else if (drawnMode) { drawnStartPoint = null; } } } } } if (!event.isCanceled() && event.getButton() == 1 && event.isButtonstate()) { ItemStack stack = player.inventory.getCurrentItem(); if (stack != null) { Item item = stack.getItem(); if (item != null && item instanceof ItemSculptingTool) { cycleMode(player, stack, !player.isSneaking()); } } } } private void cycleMode(EntityPlayer player, ItemStack stack, boolean forward) { int mode = SculptSettingsHelper.cycleData(SculptSettingsHelper.getMode(player, stack.getTagCompound()), forward, ItemSculptingTool.MODE_TITLES.length); SculptSettingsHelper.setMode(player, stack, mode); if (Configs.sculptMode.shouldDisplayInChat()) { printChatMessageWithDeletion(SculptSettingsHelper.getModeText(mode)); } } private void cycleDirection(EntityPlayer player, ItemStack stack, boolean forward) { NBTTagCompound nbt = stack.hasTagCompound() ? stack.getTagCompound() : new NBTTagCompound(); int direction = SculptSettingsHelper.getDirection(player, nbt); int shapeType = SculptSettingsHelper.getShapeType(player, nbt, ((ItemSculptingTool) stack.getItem()).isCurved()); int rotation = direction / 6; direction %= 6; if (!(shapeType == 4 && (forward ? rotation != 1 : rotation != 0)) && !(shapeType == 5 && (forward ? rotation != 3 : rotation != 0))) { direction = shapeType == 2 || shapeType > 3 ? (forward ? DIRECTION_FORWARD[direction] : DIRECTION_BACKWARD[direction]) : (forward ? AXIS_FORWARD[direction] : AXIS_BACKWARD[direction]); rotation = forward ? 0 : (shapeType == 4 ? 1 : 3); } else { rotation = shapeType == 4 ? (rotation == 0 ? 1 : 0) : SculptSettingsHelper.cycleData(rotation, forward, 4); } direction += 6 * rotation; SculptSettingsHelper.setDirection(player, stack, direction); if (Configs.sculptDirection.shouldDisplayInChat()) { printChatMessageWithDeletion(SculptSettingsHelper.getDirectionText(direction, shapeType == 4 || shapeType == 5)); } } private void cycleShapeType(EntityPlayer player, ItemStack stack, Item item) { boolean isCurved = ((ItemSculptingTool) item).isCurved(); NBTTagCompound nbt = stack.hasTagCompound() ? stack.getTagCompound() : new NBTTagCompound(); int shapeType = SculptSettingsHelper.getShapeType(player, nbt, isCurved); shapeType = isCurved ? SHAPE_CURVED[shapeType] : SHAPE_FLAT[shapeType]; SculptSettingsHelper.setShapeType(player, stack, isCurved, shapeType); if ((isCurved ? Configs.sculptShapeTypeCurved : Configs.sculptShapeTypeFlat).shouldDisplayInChat()) { printChatMessageWithDeletion(SculptSettingsHelper.getShapeTypeText(shapeType)); } } private void toggleBitGridTargeted(EntityPlayer player, ItemStack stack) { boolean targetBitGrid = !SculptSettingsHelper.isBitGridTargeted(player, stack.getTagCompound()); SculptSettingsHelper.setBitGridTargeted(player, stack, targetBitGrid); if (Configs.sculptTargetBitGridVertexes.shouldDisplayInChat()) { printChatMessageWithDeletion(SculptSettingsHelper.getBitGridTargetedText(targetBitGrid)); } } private void cycleSemiDiameter(EntityPlayer player, ItemStack stack, boolean forward) { int semiDiameter = SculptSettingsHelper.cycleData(SculptSettingsHelper.getSemiDiameter(player, stack.getTagCompound()), forward, Configs.maxSemiDiameter); SculptSettingsHelper.setSemiDiameter(player, stack, semiDiameter); if (Configs.sculptSemiDiameter.shouldDisplayInChat()) { printChatMessageWithDeletion(SculptSettingsHelper.getSemiDiameterText(player, stack.getTagCompound(), semiDiameter)); } } private void toggleHollowShape(EntityPlayer player, ItemStack stack, Item item) { boolean isWire = ((ItemSculptingTool) item).removeBits(); boolean isHollowShape = !SculptSettingsHelper.isHollowShape(player, stack.getTagCompound(), isWire); SculptSettingsHelper.setHollowShape(player, stack, isWire, isHollowShape); if ((isWire ? Configs.sculptHollowShapeWire : Configs.sculptHollowShapeSpade).shouldDisplayInChat()) { printChatMessageWithDeletion(SculptSettingsHelper.getHollowShapeText(isHollowShape)); } } private void toggleOpenEnds(EntityPlayer player, ItemStack stack) { boolean areEndsOpen = !SculptSettingsHelper.areEndsOpen(player, stack.getTagCompound()); SculptSettingsHelper.setEndsOpen(player, stack, areEndsOpen); if (Configs.sculptOpenEnds.shouldDisplayInChat()) { printChatMessageWithDeletion(SculptSettingsHelper.getOpenEndsText(areEndsOpen)); } } private void cycleWallThickness(EntityPlayer player, ItemStack stack, boolean forward) { int wallThickness = SculptSettingsHelper.cycleData(SculptSettingsHelper.getWallThickness(player, stack.getTagCompound()), forward, Configs.maxWallThickness); SculptSettingsHelper.setWallThickness(player, stack, wallThickness); if (Configs.sculptWallThickness.shouldDisplayInChat()) { printChatMessageWithDeletion(SculptSettingsHelper.getWallThicknessText(wallThickness)); } } private void printChatMessageWithDeletion(String text) { GuiNewChat chatGUI = Minecraft.getMinecraft().ingameGUI.getChatGUI(); chatGUI.printChatMessageWithOptionalDeletion(new TextComponentString(text), 627250); } @SubscribeEvent public void cancelBoundingBoxDraw(DrawBlockHighlightEvent event) { ItemStack itemStack = event.getPlayer().inventory.getCurrentItem(); if (itemStack != null) { Item item = itemStack.getItem(); if (item != null && item instanceof ItemSculptingTool && SculptSettingsHelper.getMode(event.getPlayer(), itemStack.getTagCompound()) == 1) { event.setCanceled(true); } } } @SubscribeEvent public void renderBoxesSpheresAndOverlays(RenderWorldLastEvent event) { if (!Configs.disableOverlays) { EntityPlayer player = Minecraft.getMinecraft().thePlayer; World world = player.worldObj; ItemStack stack = player.getHeldItemMainhand(); if (stack != null) { RayTraceResult target = Minecraft.getMinecraft().objectMouseOver; if (target != null && target.typeOfHit.equals(RayTraceResult.Type.BLOCK) && stack.getItem() instanceof ItemBitToolBase) { IChiselAndBitsAPI api = ChiselsAndBitsAPIAccess.apiInstance; float ticks = event.getPartialTicks(); double playerX = player.lastTickPosX + (player.posX - player.lastTickPosX) * ticks; double playerY = player.lastTickPosY + (player.posY - player.lastTickPosY) * ticks; double playerZ = player.lastTickPosZ + (player.posZ - player.lastTickPosZ) * ticks; EnumFacing dir = target.sideHit; Tessellator t = Tessellator.getInstance(); VertexBuffer vb = t.getBuffer(); BlockPos pos = target.getBlockPos(); int x = pos.getX(); int y = pos.getY(); int z = pos.getZ(); double diffX = playerX - x; double diffY = playerY - y; double diffZ = playerZ - z; Vec3d hit = target.hitVec; if (stack.getItem() instanceof ItemBitWrench && api.isBlockChiseled(world, target.getBlockPos())) { int mode = !stack.hasTagCompound() ? 0 : stack.getTagCompound().getInteger(NBTKeys.MODE); frameCounter++; int side = dir.ordinal(); boolean upDown = side <= 1; boolean eastWest = side >= 4; boolean northSouth = !upDown && !eastWest; AxisAlignedBB box = new AxisAlignedBB(eastWest ? hit.xCoord : x, upDown ? hit.yCoord : y, northSouth ? hit.zCoord : z, eastWest ? hit.xCoord : x + 1, upDown ? hit.yCoord : y + 1, northSouth ? hit.zCoord : z + 1); int offsetX = Math.abs(dir.getFrontOffsetX()); int offsetY = Math.abs(dir.getFrontOffsetY()); int offsetZ = Math.abs(dir.getFrontOffsetZ()); double invOffsetX = offsetX ^ 1; double invOffsetY = offsetY ^ 1; double invOffsetZ = offsetZ ^ 1; boolean invertDirection = player.isSneaking(); GlStateManager.pushMatrix(); GlStateManager.disableLighting(); GlStateManager.enableAlpha(); GlStateManager.enableBlend(); GlStateManager.blendFunc(GL11.GL_SRC_ALPHA, GL11.GL_ONE_MINUS_SRC_ALPHA); GlStateManager.enableTexture2D(); GlStateManager.pushMatrix(); double angle = getInitialAngle(mode); if (mode == 3) { if (side % 2 == 1) angle += 180; if (side >= 4) angle -= 90; } else { if (mode == 0) { if (side % 2 == (invertDirection ? 0 : 1)) angle *= -1; } else { if (side < 2 || side > 3) angle *= -1; } if (eastWest) angle += 90; if (side == (mode == 1 ? 1 : 0) || side == 3 || side == 4) angle += 180; } double offsetX2 = 0.5 * invOffsetX; double offsetY2 = 0.5 * invOffsetY; double offsetZ2 = 0.5 * invOffsetZ; double mirTravel = mode == 1 ? Configs.mirrorAmplitude * Math.cos(Math.PI * 2 * frameCounter / Configs.mirrorPeriod) : 0; double mirTravel1 = mirTravel; double mirTravel2 = 0; boolean mirrorInversion = invertDirection && mode == 1; if (mirrorInversion && side <= 1 && player.getHorizontalFacing().ordinal() > 3) { angle += 90; mirTravel1 = 0; mirTravel2 = mirTravel; } translateAndRotateTexture(playerX, playerY, playerZ, dir, upDown, eastWest, offsetX, offsetY, offsetZ, angle, diffX, diffY, diffZ, offsetX2, offsetY2, offsetZ2, mirTravel1, mirTravel2); Minecraft.getMinecraft().renderEngine.bindTexture(mode == 0 ? ARROW_CYCLICAL : (mode == 1 ? ARROW_BIDIRECTIONAL : (mode == 2 ? CIRCLE : INVERSION))); float minU = 0; float maxU = 1; float minV = 0; float maxV = 1; if (mode == 0) { if (invertDirection) { float minU2 = minU; minU = maxU; maxU = minU2; } } else if (mode == 2) { EnumFacing dir2 = side <= 1 ? EnumFacing.WEST : (side <= 3 ? EnumFacing.WEST : EnumFacing.DOWN); box = contractBoxOrRenderArrows(true, t, vb, side, northSouth, dir2, box, invOffsetX, invOffsetY, invOffsetZ, invertDirection, minU, maxU, minV, maxV); } renderTexturedSide(t, vb, side, northSouth, box, minU, maxU, minV, maxV, 1); GlStateManager.popMatrix(); AxisAlignedBB box3 = world.getBlockState(pos).getSelectedBoundingBox(world, pos); for (int s = 0; s < 6; s++) { if (s != side) { GlStateManager.pushMatrix(); upDown = s <= 1; eastWest = s >= 4; northSouth = !upDown && !eastWest; dir = EnumFacing.getFront(s); box = new AxisAlignedBB(eastWest ? (s == 5 ? box3.maxX : box3.minX) : x, upDown ? (s == 1 ? box3.maxY : box3.minY) : y, northSouth ? (s == 3 ? box3.maxZ : box3.minZ) : z, eastWest ? (s == 4 ? box3.minX : box3.maxX) : x + 1, upDown ? (s == 0 ? box3.minY : box3.maxY) : y + 1, northSouth ? (s == 2 ? box3.minZ : box3.maxZ) : z + 1); angle = getInitialAngle(mode); boolean oppRotation = false; int mode2 = mode; if (mode != 3) { oppRotation = dir == EnumFacing.getFront(side).getOpposite(); if (mode == 0) { if (!oppRotation) { Minecraft.getMinecraft().renderEngine.bindTexture(ARROW_HEAD); angle = 90; if (side % 2 == 0) angle += 180; if (invertDirection) angle += 180; mode2 = 2; } else { Minecraft.getMinecraft().renderEngine.bindTexture(ARROW_CYCLICAL); mode2 = 0; } } else if (mode == 2) { if (!oppRotation) { Minecraft.getMinecraft().renderEngine.bindTexture(ARROW_HEAD); if (side == 0 ? s == 2 || s == 5 : (side == 1 ? s == 3 || s == 4 : (side == 2 ? s == 1 || s == 5 : (side == 3 ? s == 0 || s == 4 : (side == 4 ? s == 1 || s == 2 : s == 0 || s == 3))))) angle += 180; if (invertDirection) angle += 180; } else { Minecraft.getMinecraft().renderEngine.bindTexture(CIRCLE); } } } mirTravel1 = mirTravel; mirTravel2 = 0; if (mode != 3 && (((side <= 1 && mirrorInversion ? side > 1 : side <= 1) && s > 1) || ((mirrorInversion ? (oppRotation ? player.getHorizontalFacing().ordinal() > 3 : side > 3) : (side == 2 || side == 3)) && s <= 1))) { angle += 90; mirTravel1 = 0; mirTravel2 = mirTravel; } if (mode == 3) { if (s % 2 == 1) angle += 180; if (s >= 4) angle -= 90; } else { if (mode2 == 0) { if (s % 2 == (invertDirection ? 0 : 1)) angle *= -1; if (oppRotation) angle *= -1; } else { if (s < 2 || s > 3) angle *= -1; } if (eastWest) angle -= 90; if (s == (mode2 == 1 ? 1 : 0) || s == 3 || s == 5) angle += 180; } offsetX = Math.abs(dir.getFrontOffsetX()); offsetY = Math.abs(dir.getFrontOffsetY()); offsetZ = Math.abs(dir.getFrontOffsetZ()); invOffsetX = offsetX ^ 1; invOffsetY = offsetY ^ 1; invOffsetZ = offsetZ ^ 1; offsetX2 = 0.5 * invOffsetX; offsetY2 = 0.5 * invOffsetY; offsetZ2 = 0.5 * invOffsetZ; translateAndRotateTexture(playerX, playerY, playerZ, dir, upDown, eastWest, offsetX, offsetY, offsetZ, angle, diffX, diffY, diffZ, offsetX2, offsetY2, offsetZ2, mirTravel1, mirTravel2); minU = 0; maxU = 1; minV = 0; maxV = 1; if (mode2 == 0) { if (oppRotation) { minU = 1; maxU = 0; } if (invertDirection) { float minU2 = minU; minU = maxU; maxU = minU2; } } else if (mode2 == 2) { EnumFacing dir2 = side <= 1 ? (s == 2 || s == 3 ? EnumFacing.WEST : EnumFacing.DOWN) : (side >= 4 ? EnumFacing.WEST : (s <= 1 ? EnumFacing.WEST : EnumFacing.DOWN)); box = contractBoxOrRenderArrows(oppRotation, t, vb, side, northSouth, dir2, box, invOffsetX, invOffsetY, invOffsetZ, invertDirection, minU, maxU, minV, maxV); } if (mode2 != 2 || oppRotation) renderTexturedSide(t, vb, s, northSouth, box, minU, maxU, minV, maxV, 1); GlStateManager.popMatrix(); } } GlStateManager.enableLighting(); GlStateManager.disableBlend(); GlStateManager.enableTexture2D(); GlStateManager.popMatrix(); } else if (stack.getItem() instanceof ItemSculptingTool) { ItemSculptingTool toolItem = (ItemSculptingTool) stack.getItem(); boolean removeBits = toolItem.removeBits(); int mode = SculptSettingsHelper.getMode(player, stack.getTagCompound()); if (!removeBits || mode > 0 || api.canBeChiseled(world, target.getBlockPos())) { float hitX = (float) hit.xCoord - pos.getX(); float hitY = (float) hit.yCoord - pos.getY(); float hitZ = (float) hit.zCoord - pos.getZ(); IBitLocation bitLoc = api.getBitPos(hitX, hitY, hitZ, dir, pos, false); if (bitLoc != null) { NBTTagCompound nbt = stack.hasTagCompound() ? stack.getTagCompound() : new NBTTagCompound(); int x2 = bitLoc.getBitX(); int y2 = bitLoc.getBitY(); int z2 = bitLoc.getBitZ(); if (!toolItem.removeBits()) { x2 += dir.getFrontOffsetX(); y2 += dir.getFrontOffsetY(); z2 += dir.getFrontOffsetZ(); } boolean isDrawn = drawnStartPoint != null; boolean drawnBox = mode == 2 && isDrawn; int shapeType = SculptSettingsHelper.getShapeType(player, nbt, toolItem.isCurved()); boolean fixedNotSym = !drawnBox && shapeType == 2 || shapeType > 4; GlStateManager.enableBlend(); GlStateManager.tryBlendFuncSeparate(770, 771, 1, 0); GlStateManager.disableTexture2D(); GlStateManager.depthMask(false); double r = SculptSettingsHelper.getSemiDiameter(player, nbt) * Utility.PIXEL_D; ConfigShapeRenderPair configPair = Configs.itemShapeMap.get(toolItem); ConfigShapeRender configBox = configPair.boundingBox; AxisAlignedBB box = null, shapeBox = null; double x3 = x + x2 * Utility.PIXEL_D; double y3 = y + y2 * Utility.PIXEL_D; double z3 = z + z2 * Utility.PIXEL_D; if (configBox.renderInnerShape || configBox.renderOuterShape) { GlStateManager.pushMatrix(); GL11.glLineWidth(configBox.lineWidth); boolean inside = ItemSculptingTool.wasInsideClicked(dir, hit, pos); if (drawnBox) { double x4 = drawnStartPoint.xCoord; double y4 = drawnStartPoint.yCoord; double z4 = drawnStartPoint.zCoord; if (Math.max(x3, x4) == x3) { x3 += Utility.PIXEL_D; } else { x4 += Utility.PIXEL_D; } if (Math.max(y3, y4) == y3) { y3 += Utility.PIXEL_D; } else { y4 += Utility.PIXEL_D; } if (Math.max(z3, z4) == z3) { z3 += Utility.PIXEL_D; } else { z4 += Utility.PIXEL_D; } box = new AxisAlignedBB(x4, y4, z4, x3, y3, z3); } else { double f = 0; float x4 = 0, y4 = 0, z4 = 0; boolean targetBitGrid = SculptSettingsHelper.isBitGridTargeted(player, nbt); if (mode == 2) { r = 0; } else if (targetBitGrid) { f = Utility.PIXEL_D * 0.5; x4 = hitX < (Math.round(hitX/Utility.PIXEL_F) * Utility.PIXEL_F) ? 1 : -1; y4 = hitY < (Math.round(hitY/Utility.PIXEL_F) * Utility.PIXEL_F) ? 1 : -1; z4 = hitZ < (Math.round(hitZ/Utility.PIXEL_F) * Utility.PIXEL_F) ? 1 : -1; double offsetX = Math.abs(dir.getFrontOffsetX()); double offsetY = Math.abs(dir.getFrontOffsetY()); double offsetZ = Math.abs(dir.getFrontOffsetZ()); int s = dir.ordinal(); if (s % 2 == 0) { if (offsetX > 0) x4 *= -1; if (offsetY > 0) y4 *= -1; if (offsetZ > 0) z4 *= -1; } boolean su = s== 1 || s == 3; if (removeBits ? (!inside || !su) : (inside && su)) { if (offsetX > 0) x4 *= -1; if (offsetY > 0) y4 *= -1; if (offsetZ > 0) z4 *= -1; } r -= f; } box = new AxisAlignedBB(x - r, y - r, z - r, x + r + Utility.PIXEL_D, y + r + Utility.PIXEL_D, z + r + Utility.PIXEL_D) .offset(x2 * Utility.PIXEL_D + f * x4, y2 * Utility.PIXEL_D + f * y4, z2 * Utility.PIXEL_D + f * z4); if (targetBitGrid && mode != 2) { x3 = (box.maxX + box.minX) * 0.5 - f; y3 = (box.maxY + box.minY) * 0.5 - f; z3 = (box.maxZ + box.minZ) * 0.5 - f; } } if (fixedNotSym) { shapeBox = box.expand(0, 0, 0); } if (mode == 0) { BlockPos pos2 = !removeBits && !inside ? pos.offset(dir) : pos; AxisAlignedBB box2 = !removeBits ? new AxisAlignedBB(pos2) : world.getBlockState(pos2).getSelectedBoundingBox(world, pos2); if ((int) Math.round(box2.minX) != pos2.getX() || (int) Math.round(box2.minY) != pos2.getY() || (int) Math.round(box2.minZ) != pos2.getZ()) { box2 = box2.offset(pos2); } box = limitBox(box, box2); } double f = 0.0020000000949949026; if (configBox.renderOuterShape) { GlStateManager.color(configBox.red, configBox.green, configBox.blue, configBox.outerShapeAlpha); RenderGlobal.drawSelectionBoundingBox(box.expand(f, f, f).offset(-playerX, -playerY, -playerZ)); } if (configBox.renderInnerShape) { GlStateManager.color(configBox.red, configBox.green, configBox.blue, configBox.innerShapeAlpha); GlStateManager.depthFunc(GL11.GL_GREATER); RenderGlobal.drawSelectionBoundingBox(box.expand(f, f, f).offset(-playerX, -playerY, -playerZ)); GlStateManager.depthFunc(GL11.GL_LEQUAL); } GlStateManager.popMatrix(); } if (!fixedNotSym) { shapeBox = box.expand(0, 0, 0); } boolean isHollow = SculptSettingsHelper.isHollowShape(player, nbt, removeBits); boolean isOpen = isHollow && SculptSettingsHelper.areEndsOpen(player, nbt); renderEnvelopedShapes(player, shapeType, nbt, playerX, playerY, playerZ, isDrawn, drawnBox, r, configPair, shapeBox, x3, y3, z3, 0, isOpen); float wallThickness = SculptSettingsHelper.getWallThickness(player, nbt) * Utility.PIXEL_F; if (wallThickness > 0 && isHollow && !(mode == 2 && !drawnBox)) { renderEnvelopedShapes(player, shapeType, nbt, playerX, playerY, playerZ, isDrawn, drawnBox, r, configPair, shapeBox, x3, y3, z3, wallThickness, isOpen); } GlStateManager.depthMask(true); GlStateManager.enableTexture2D(); GlStateManager.disableBlend(); } } } } } } } private void renderEnvelopedShapes(EntityPlayer player, int shapeType, NBTTagCompound nbt, double playerX, double playerY, double playerZ, boolean isDrawn, boolean drawnBox, double r, ConfigShapeRenderPair configPair, AxisAlignedBB box, double x, double y, double z, double contraction, boolean isOpen) { ConfigShapeRender configShape = configPair.envelopedShape; if (configShape.renderInnerShape || configShape.renderOuterShape) { double a = 0, b = 0, c = 0; /* 0 = sphere * 1 = cylinder * 2 = cone * 3 = cube * 4 = triangular prism * 5 = triangular pyramid * 6 = square pyramid */ int dir = SculptSettingsHelper.getDirection(player, nbt); // int rotation = dir / 6; dir %= 6; boolean notFullSym = shapeType != 0 && shapeType != 3; boolean notSym = shapeType == 2 || shapeType > 4; double ri = r + Utility.PIXEL_D * 0.5; r = Math.max(ri - contraction, 0); boolean drawnNotSym = notSym && drawnBox; double base = 0; double v; if (drawnBox || notSym) { double f = 0.5; double minX = box.minX * f; double minY = box.minY * f; double minZ = box.minZ * f; double maxX = box.maxX * f; double maxY = box.maxY * f; double maxZ = box.maxZ * f; double x2 = maxX - minX; double y2 = maxY - minY; double z2 = maxZ - minZ; if (drawnNotSym) { if (dir == 2 || dir == 3) { v = y2; y2 = z2; z2 = v; } else if (dir > 3) { v = y2; y2 = x2; x2 = v; } } if (notSym && contraction > 0) { if (!isOpen) base = contraction; y2 *= 2; double y2sq = y2 * y2; double aInset = (Math.sqrt(x2 * x2 + y2sq) * contraction) / x2 + base; double cInset = (Math.sqrt(z2 * z2 + y2sq) * contraction) / z2 + base; a = Math.max((y2 - aInset) * (x2 / y2), 0); c = Math.max((y2 - cInset) * (z2 / y2), 0); contraction = Math.min(aInset - base, cInset - base); b = Math.max(y2 * 0.5 - contraction * 0.5 - base * 0.5, 0); } else { a = Math.max(x2 - (!isOpen || !notFullSym || dir < 4 ? contraction : 0), 0); c = Math.max(z2 - (!isOpen || !notFullSym || dir != 2 && dir != 3 ? contraction : 0), 0); b = Math.max(y2 - (!isOpen || !notFullSym || dir > 1 ? contraction : 0), 0); } r = Math.max(Math.max(a, b), c); x = maxX + minX; y = maxY + minY; z = maxZ + minZ; if (drawnBox) { if (notSym || !notFullSym) { if (dir < 2 || dir > 3 || !notFullSym) { v = b; b = c; c = v; } } else { if (dir < 2) { v = b; b = c; c = v; } else if (dir > 3) { v = a; a = c; c = v; } else { v = b; b = a; a = v; } } } } else { a = b = c = r; if (b > 0 && notFullSym && isOpen) { b += contraction * (isDrawn ? 0 : 1); } } Quadric shape = shapeType > 2 ? new Prism(shapeType > 4, shapeType == 4 || shapeType == 5) : (notFullSym ? new Cylinder() : new Sphere()); shape.setDrawStyle(GLU.GLU_LINE); Quadric lid = new Disk(); lid.setDrawStyle(GLU.GLU_LINE); GlStateManager.pushMatrix(); GL11.glLineWidth(configShape.lineWidth); double x2 = x - playerX; double y2 = y - playerY; double z2 = z - playerZ; if (!notSym && !isDrawn) { double hp = Utility.PIXEL_D * 0.5; x2 += hp; y2 += hp; z2 += hp; } if (notFullSym) { if (isOpen && contraction > 0 && !notSym) { double offset = contraction * (notSym ? 0.5 : (drawnBox ? 0 : -1)); if (dir != 3) { y2 += dir == 0 ? offset : -offset; } if (dir > 2) { x2 += dir == 5 ? -offset : offset; } if (dir == 2 || dir == 3) { z2 += dir == 2 ? offset : -offset; } } } GlStateManager.translate(x2, y2, z2); int rot2 = dir; if (!(drawnNotSym && dir == 2)) { if (notFullSym && rot2 != 1) { int angle = 90; if (rot2 == 3) { rot2 = 0; angle = 180; if (!(drawnNotSym && dir == 3)) { GlStateManager.rotate(90, 0, 0, 1); } } else if (rot2 > 1) { rot2 %= 4; } else { rot2 = rot2 ^ 1 + 4; } Vec3i vec = EnumFacing.getFront(rot2).getOpposite().getDirectionVec(); GlStateManager.rotate(angle, vec.getX(), vec.getY(), vec.getZ()); } else { GlStateManager.rotate(90, 1, 0, 0); } } boolean openSym = notFullSym && !notSym && isOpen && !isDrawn; if (notFullSym) { double offset1 = 0; double offset2 = 0; double r2 = r; if (notSym) { r2 -= contraction * 0.5 - base * 0.5; } else if (openSym) { double m = -contraction; if (dir == 0) m *= 2; if (dir != 1) r -= m; if (dir > 1) { if (dir < 3) { offset1 = m; } else { offset2 = m; } } } GlStateManager.translate(offset1, offset2, -r2); } if (openSym) { v = b; b = c; c = v; } if (drawnNotSym) { if (dir == 2 || dir == 3) { v = b; b = c; c = v; } else if (dir > 3) { v = b; b = a; a = v; } } if (notFullSym && drawnBox) { if (b > c && b > a) { GlStateManager.translate(0, 0, b - c); } else if (a > c && a >= b) { GlStateManager.translate(0, 0, a - c); } } GlStateManager.scale(a / ri, b / ri, c / ri); if (configShape.renderOuterShape) { drawEnvelopedShapes(ri, configShape, shapeType, shape, lid, true, notSym, isOpen); } if (configShape.renderInnerShape) { GlStateManager.depthFunc(GL11.GL_GREATER); drawEnvelopedShapes(ri, configShape, shapeType, shape, lid, false, notSym, isOpen); GlStateManager.depthFunc(GL11.GL_LEQUAL); } GlStateManager.popMatrix(); } } private void drawEnvelopedShapes(double r, ConfigShapeRender configShape, int shapeType, Quadric shape, Quadric lid, boolean isOuter, boolean isCylinder, boolean isOpen) { GlStateManager.pushMatrix(); drawEnvelopedShape(shape, r, isOuter, configShape, isCylinder, isOpen); if (shapeType > 0 && shapeType < 3 && !isOpen) { if (shapeType == 1) { drawEnvelopedShape(lid, r, isOuter, configShape, isCylinder, isOpen); } GlStateManager.translate(0, 0, r * 2); drawEnvelopedShape(lid, r, isOuter, configShape, isCylinder, isOpen); } GlStateManager.popMatrix(); } private void drawEnvelopedShape(Quadric shape, double radius, boolean isOuter, ConfigShapeRender configShape, boolean isCone, boolean isOpen) { GlStateManager.pushMatrix(); GlStateManager.color(configShape.red, configShape.green, configShape.blue, isOuter ? configShape.outerShapeAlpha : configShape.innerShapeAlpha); float r = (float) radius; if (shape instanceof Prism) { ((Prism) shape).draw(r, isOpen); } else if (shape instanceof Sphere) { ((Sphere) shape).draw(r, 32, 32); } else if (shape instanceof Cylinder) { ((Cylinder) shape).draw(isCone ? 0 : r, r, r * 2, 32, 32); } else if (shape instanceof Disk) { ((Disk) shape).draw(0, r, 32, 32); } GlStateManager.popMatrix(); } private AxisAlignedBB limitBox(AxisAlignedBB box, AxisAlignedBB mask) { double d0 = Math.max(box.minX, mask.minX); double d1 = Math.max(box.minY, mask.minY); double d2 = Math.max(box.minZ, mask.minZ); double d3 = Math.min(box.maxX, mask.maxX); double d4 = Math.min(box.maxY, mask.maxY); double d5 = Math.min(box.maxZ, mask.maxZ); return new AxisAlignedBB(d0, d1, d2, d3, d4, d5); } private double getInitialAngle(int mode) { return mode == 0 ? (frameCounter * (360 / Configs.rotationPeriod)) % 360 : 0; } private void translateAndRotateTexture(double playerX, double playerY, double playerZ, EnumFacing dir, boolean upDown, boolean eastWest, int offsetX, int offsetY, int offsetZ, double angle, double diffX, double diffY, double diffZ, double offsetX2, double offsetY2, double offsetZ2, double mirTravel1, double mirTravel2) { double cos = Math.cos(Math.toRadians(angle)); double sin = Math.sin(Math.toRadians(angle)); if (upDown) { GL11.glTranslated(diffX * cos + diffZ * sin - diffX + mirTravel1, 0, -diffX * sin + diffZ * cos - diffZ + mirTravel2); } else if (eastWest) { GL11.glTranslated(0, diffY * cos - diffZ * sin - diffY + mirTravel2, diffY * sin + diffZ * cos - diffZ + mirTravel1); } else { GL11.glTranslated(diffX * cos - diffY * sin - diffX + mirTravel1, diffX * sin + diffY * cos - diffY + mirTravel2, 0); } GL11.glTranslated(offsetX2, offsetY2, offsetZ2); GL11.glRotated(angle, offsetX, offsetY, offsetZ); GL11.glTranslated(-offsetX2, -offsetY2, -offsetZ2); GL11.glTranslated(-playerX + 0.002 * dir.getFrontOffsetX(), -playerY + 0.002 * dir.getFrontOffsetY(), -playerZ + 0.002 * dir.getFrontOffsetZ()); } private AxisAlignedBB contractBoxOrRenderArrows(boolean contractBox, Tessellator t, VertexBuffer vb, int side, boolean northSouth, EnumFacing dir, AxisAlignedBB box, double invOffsetX, double invOffsetY, double invOffsetZ, boolean invertDirection, float minU, float maxU, float minV, float maxV) { if (contractBox) { double amount = (frameCounter % Configs.translationScalePeriod) / Configs.translationScalePeriod; amount /= invertDirection ? -2 : 2; if (invertDirection && Configs.translationScalePeriod > 1) amount += 0.5; box = box.expand(-amount * invOffsetX, -amount * invOffsetY, -amount * invOffsetZ); } else if (Configs.translationDistance > 0) { double distance = Configs.translationDistance; double fadeDistance = Configs.translationFadeDistance; double period = Configs.translationMovementPeriod; double offsetDistance = Configs.translationOffsetDistance; int timeOffset = offsetDistance > 0 ? (int) (period / (distance / offsetDistance)) : 0; if (timeOffset > period / 3.0) timeOffset = (int) (period / 3.0); if (fadeDistance > distance / 2.0) fadeDistance = distance / 2.0; int n = offsetDistance == 0 || period == 1 ? 1 : 3; for (int i = 0; i < n; i++) { double amount = ((frameCounter + timeOffset * i) % period) / (period / (distance * 100.0) * 100.0); double alpha = 1; if (period > 1) { if (amount < fadeDistance) { alpha = amount / fadeDistance; } else if (amount > distance - fadeDistance) { alpha = (distance - amount) / fadeDistance; } amount -= distance / 2.0; } AxisAlignedBB box2 = new AxisAlignedBB(box.minX, box.minY, box.minZ, box.maxX, box.maxY, box.maxZ) .offset(amount * dir.getFrontOffsetX(), amount * dir.getFrontOffsetY(), amount * dir.getFrontOffsetZ()); renderTexturedSide(t, vb, side, northSouth, box2, minU, maxU, minV, maxV, alpha); } } else { renderTexturedSide(t, vb, side, northSouth, box, minU, maxU, minV, maxV, 1); } return box; } private void renderTexturedSide(Tessellator t, VertexBuffer vb, int side, boolean northSouth, AxisAlignedBB box, float minU, float maxU, float minV, float maxV, double alpha) { GL11.glColor4d(1, 1, 1, alpha); if (side == 1 || side == 3 || side == 4) { vb.begin(7, DefaultVertexFormats.POSITION_TEX); vb.pos(box.minX, box.minY, box.maxZ).tex(maxU, minV).endVertex(); vb.pos(box.maxX, northSouth ? box.minY : box.maxY, box.maxZ).tex(minU, minV).endVertex(); vb.pos(box.maxX, box.maxY, box.minZ).tex(minU, maxV).endVertex(); vb.pos(box.minX, northSouth ? box.maxY : box.minY, box.minZ).tex(maxU, maxV).endVertex(); t.draw(); vb.begin(7, DefaultVertexFormats.POSITION_TEX); vb.pos(box.maxX, northSouth ? box.minY : box.maxY, box.maxZ).tex(minU, minV).endVertex(); vb.pos(box.minX, box.minY, box.maxZ).tex(maxU, minV).endVertex(); vb.pos(box.minX, northSouth ? box.maxY : box.minY, box.minZ).tex(maxU, maxV).endVertex(); vb.pos(box.maxX, box.maxY, box.minZ).tex(minU, maxV).endVertex(); t.draw(); } else { vb.begin(7, DefaultVertexFormats.POSITION_TEX); vb.pos(box.minX, northSouth ? box.maxY : box.minY, box.minZ).tex(maxU, minV).endVertex(); vb.pos(box.maxX, box.maxY, box.minZ).tex(minU, minV).endVertex(); vb.pos(box.maxX, northSouth ? box.minY : box.maxY, box.maxZ).tex(minU, maxV).endVertex(); vb.pos(box.minX, box.minY, box.maxZ).tex(maxU, maxV).endVertex(); t.draw(); vb.begin(7, DefaultVertexFormats.POSITION_TEX); vb.pos(box.maxX, box.maxY, box.minZ).tex(minU, minV).endVertex(); vb.pos(box.minX, northSouth ? box.maxY : box.minY, box.minZ).tex(maxU, minV).endVertex(); vb.pos(box.minX, box.minY, box.maxZ).tex(maxU, maxV).endVertex(); vb.pos(box.maxX, northSouth ? box.minY : box.maxY, box.maxZ).tex(minU, maxV).endVertex(); t.draw(); } } }
src/main/java/com/phylogeny/extrabitmanipulation/client/eventhandler/ClientEventHandler.java
package com.phylogeny.extrabitmanipulation.client.eventhandler; import org.lwjgl.opengl.GL11; import org.lwjgl.util.glu.Cylinder; import org.lwjgl.util.glu.Disk; import org.lwjgl.util.glu.GLU; import org.lwjgl.util.glu.Quadric; import org.lwjgl.util.glu.Sphere; import com.phylogeny.extrabitmanipulation.ExtraBitManipulation; import com.phylogeny.extrabitmanipulation.api.ChiselsAndBitsAPIAccess; import com.phylogeny.extrabitmanipulation.client.shape.Prism; import com.phylogeny.extrabitmanipulation.config.ConfigShapeRender; import com.phylogeny.extrabitmanipulation.config.ConfigShapeRenderPair; import com.phylogeny.extrabitmanipulation.helper.SculptSettingsHelper; import com.phylogeny.extrabitmanipulation.item.ItemBitWrench; import com.phylogeny.extrabitmanipulation.item.ItemBitToolBase; import com.phylogeny.extrabitmanipulation.item.ItemSculptingTool; import com.phylogeny.extrabitmanipulation.packet.PacketCycleBitWrenchMode; import com.phylogeny.extrabitmanipulation.packet.PacketSculpt; import com.phylogeny.extrabitmanipulation.reference.Configs; import com.phylogeny.extrabitmanipulation.reference.NBTKeys; import com.phylogeny.extrabitmanipulation.reference.Reference; import com.phylogeny.extrabitmanipulation.reference.Utility; import mod.chiselsandbits.api.APIExceptions.CannotBeChiseled; import mod.chiselsandbits.api.IBitAccess; import mod.chiselsandbits.api.IBitLocation; import mod.chiselsandbits.api.IChiselAndBitsAPI; import net.minecraft.client.Minecraft; import net.minecraft.client.gui.GuiNewChat; import net.minecraft.client.gui.GuiScreen; import net.minecraft.client.renderer.GlStateManager; import net.minecraft.client.renderer.RenderGlobal; import net.minecraft.client.renderer.Tessellator; import net.minecraft.client.renderer.VertexBuffer; import net.minecraft.client.renderer.vertex.DefaultVertexFormats; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.util.math.AxisAlignedBB; import net.minecraft.util.math.BlockPos; import net.minecraft.util.math.RayTraceResult; import net.minecraft.util.EnumFacing; import net.minecraft.util.EnumHand; import net.minecraft.util.ResourceLocation; import net.minecraft.util.math.Vec3d; import net.minecraft.util.math.Vec3i; import net.minecraft.util.text.TextComponentString; import net.minecraft.world.World; import net.minecraftforge.client.event.DrawBlockHighlightEvent; import net.minecraftforge.client.event.MouseEvent; import net.minecraftforge.client.event.RenderWorldLastEvent; import net.minecraftforge.fml.common.eventhandler.SubscribeEvent; public class ClientEventHandler { private int frameCounter; private Vec3d drawnStartPoint = null; private static final ResourceLocation ARROW_HEAD = new ResourceLocation(Reference.GROUP_ID, "textures/overlays/ArrowHead.png"); private static final ResourceLocation ARROW_BIDIRECTIONAL = new ResourceLocation(Reference.GROUP_ID, "textures/overlays/ArrowBidirectional.png"); private static final ResourceLocation ARROW_CYCLICAL = new ResourceLocation(Reference.GROUP_ID, "textures/overlays/ArrowCyclical.png"); private static final ResourceLocation CIRCLE = new ResourceLocation(Reference.GROUP_ID, "textures/overlays/Circle.png"); private static final ResourceLocation INVERSION = new ResourceLocation(Reference.GROUP_ID, "textures/overlays/Inversion.png"); private static final int[] DIRECTION_FORWARD = new int[]{2, 0, 5, 4, 1, 3}; private static final int[] DIRECTION_BACKWARD = new int[]{1, 4, 0, 5, 3, 2}; private static final int[] AXIS_FORWARD = new int[]{2, 3, 4, 5, 0, 1}; private static final int[] AXIS_BACKWARD = new int[]{4, 5, 0, 1, 2, 3}; private static final int[] SHAPE_CURVED = new int[]{1, 2, 0, 0, 0, 0, 0}; // private static final int[] SHAPE_FLAT = new int[]{3, 3, 3, 4, 5, 6, 3}; private static final int[] SHAPE_FLAT = new int[]{3, 3, 3, 6, 3, 3, 3}; @SubscribeEvent public void interceptMouseInput(MouseEvent event) { EntityPlayer player = Minecraft.getMinecraft().thePlayer; if (event.getDwheel() != 0) { ItemStack stack = player.getHeldItemMainhand(); if (stack != null && stack.getItem() instanceof ItemBitToolBase) { boolean forward = event.getDwheel() < 0; if (player.isSneaking()) { if (stack.getItem() instanceof ItemBitWrench) { ExtraBitManipulation.packetNetwork.sendToServer(new PacketCycleBitWrenchMode(forward)); } else { cycleSemiDiameter(player, stack, forward); } event.setCanceled(true); } else if (stack.getItem() instanceof ItemSculptingTool && (GuiScreen.isCtrlKeyDown() || GuiScreen.isAltKeyDown())) { if (GuiScreen.isCtrlKeyDown()) { cycleDirection(player, stack, forward); } else { cycleWallThickness(player, stack, forward); } event.setCanceled(true); } } else { drawnStartPoint = null; } } else if ((GuiScreen.isCtrlKeyDown() || GuiScreen.isAltKeyDown()) && event.isButtonstate()) { ItemStack stack = player.inventory.getCurrentItem(); if (stack != null) { Item item = stack.getItem(); if (item != null && item instanceof ItemSculptingTool) { if (GuiScreen.isCtrlKeyDown()) { if (event.getButton() == 1) { cycleShapeType(player, stack, item); } if (event.getButton() == 0) { toggleBitGridTargeted(player, stack); } } else { if (event.getButton() == 1) { toggleHollowShape(player, stack, item); } if (event.getButton() == 0) { toggleOpenEnds(player, stack); } } event.setCanceled(true); } } } else if (event.getButton() == 0) { if (!player.capabilities.allowEdit) return; ItemStack stack = player.inventory.getCurrentItem(); if (stack != null) { Item item = stack.getItem(); if (event.isButtonstate() && item instanceof ItemBitWrench) { event.setCanceled(true); } else if (item != null && item instanceof ItemSculptingTool) { boolean drawnMode = SculptSettingsHelper.getMode(player, stack.getTagCompound()) == 2; if (!drawnMode) { drawnStartPoint = null; } if (event.isButtonstate() || (drawnMode && drawnStartPoint != null)) { ItemSculptingTool toolItem = (ItemSculptingTool) item; boolean removeBits = toolItem.removeBits(); RayTraceResult target = Minecraft.getMinecraft().objectMouseOver; if (target != null && target.typeOfHit != RayTraceResult.Type.MISS) { if (target.typeOfHit == RayTraceResult.Type.BLOCK) { BlockPos pos = target.getBlockPos(); EnumFacing side = target.sideHit; Vec3d hit = target.hitVec; boolean swingTool = true; if (drawnMode && event.isButtonstate() && drawnStartPoint != null) { event.setCanceled(true); return; } if (!player.isSneaking() && drawnMode && event.isButtonstate()) { IBitLocation bitLoc = ChiselsAndBitsAPIAccess.apiInstance.getBitPos((float) hit.xCoord - pos.getX(), (float) hit.yCoord - pos.getY(), (float) hit.zCoord - pos.getZ(), side, pos, false); if (bitLoc != null) { int x = pos.getX(); int y = pos.getY(); int z = pos.getZ(); float x2 = x + bitLoc.getBitX() * Utility.PIXEL_F; float y2 = y + bitLoc.getBitY() * Utility.PIXEL_F; float z2 = z + bitLoc.getBitZ() * Utility.PIXEL_F; if (!removeBits) { x2 += side.getFrontOffsetX() * Utility.PIXEL_F; y2 += side.getFrontOffsetY() * Utility.PIXEL_F; z2 += side.getFrontOffsetZ() * Utility.PIXEL_F; } drawnStartPoint = new Vec3d(x2, y2, z2); } else { drawnStartPoint = null; swingTool = false; } } else { if (player.isSneaking()) { IChiselAndBitsAPI api = ChiselsAndBitsAPIAccess.apiInstance; IBitLocation bitLoc = api.getBitPos((float) hit.xCoord - pos.getX(), (float) hit.yCoord - pos.getY(), (float) hit.zCoord - pos.getZ(), side, pos, false); if (bitLoc != null) { try { IBitAccess bitAccess = api.getBitAccess(player.worldObj, pos); ItemStack bitStack = bitAccess.getBitAt(bitLoc.getBitX(), bitLoc.getBitY(), bitLoc.getBitZ()).getItemStack(1); SculptSettingsHelper.setBitStack(player, stack, removeBits, bitStack); if ((removeBits ? Configs.sculptSetBitWire : Configs.sculptSetBitSpade).shouldDisplayInChat()) { printChatMessageWithDeletion((removeBits ? "Removing only " : "Sculpting with ") + bitStack.getDisplayName().substring(15)); } } catch (CannotBeChiseled e) { event.setCanceled(true); return; } } } else if (!player.isSneaking() || removeBits || drawnMode) { swingTool = toolItem.sculptBlocks(stack, player, player.worldObj, pos, side, hit, drawnStartPoint); ExtraBitManipulation.packetNetwork.sendToServer(new PacketSculpt(pos, side, hit, drawnStartPoint)); } if (drawnMode && !event.isButtonstate()) { drawnStartPoint = null; } } if (swingTool) player.swingArm(EnumHand.MAIN_HAND); event.setCanceled(true); } } else if (player.isSneaking() && event.isButtonstate() && removeBits) { SculptSettingsHelper.setBitStack(player, stack, true, null); if ((removeBits ? Configs.sculptSetBitWire : Configs.sculptSetBitSpade).shouldDisplayInChat()) { printChatMessageWithDeletion("Removing any/all bits"); } } else if (drawnMode) { drawnStartPoint = null; } } } } } if (!event.isCanceled() && event.getButton() == 1 && event.isButtonstate()) { ItemStack stack = player.inventory.getCurrentItem(); if (stack != null) { Item item = stack.getItem(); if (item != null && item instanceof ItemSculptingTool) { cycleMode(player, stack, !player.isSneaking()); } } } } private void cycleMode(EntityPlayer player, ItemStack stack, boolean forward) { int mode = SculptSettingsHelper.cycleData(SculptSettingsHelper.getMode(player, stack.getTagCompound()), forward, ItemSculptingTool.MODE_TITLES.length); SculptSettingsHelper.setMode(player, stack, mode); if (Configs.sculptMode.shouldDisplayInChat()) { printChatMessageWithDeletion(SculptSettingsHelper.getModeText(mode)); } } private void cycleDirection(EntityPlayer player, ItemStack stack, boolean forward) { NBTTagCompound nbt = stack.hasTagCompound() ? stack.getTagCompound() : new NBTTagCompound(); int direction = SculptSettingsHelper.getDirection(player, nbt); int shapeType = SculptSettingsHelper.getShapeType(player, nbt, ((ItemSculptingTool) stack.getItem()).isCurved()); int rotation = direction / 6; direction %= 6; if (!(shapeType == 4 && (forward ? rotation != 1 : rotation != 0)) && !(shapeType == 5 && (forward ? rotation != 3 : rotation != 0))) { direction = shapeType == 2 || shapeType > 3 ? (forward ? DIRECTION_FORWARD[direction] : DIRECTION_BACKWARD[direction]) : (forward ? AXIS_FORWARD[direction] : AXIS_BACKWARD[direction]); rotation = forward ? 0 : (shapeType == 4 ? 1 : 3); } else { rotation = shapeType == 4 ? (rotation == 0 ? 1 : 0) : SculptSettingsHelper.cycleData(rotation, forward, 4); } direction += 6 * rotation; SculptSettingsHelper.setDirection(player, stack, direction); if (Configs.sculptDirection.shouldDisplayInChat()) { printChatMessageWithDeletion(SculptSettingsHelper.getDirectionText(direction, shapeType == 4 || shapeType == 5)); } } private void cycleShapeType(EntityPlayer player, ItemStack stack, Item item) { boolean isCurved = ((ItemSculptingTool) item).isCurved(); NBTTagCompound nbt = stack.hasTagCompound() ? stack.getTagCompound() : new NBTTagCompound(); int shapeType = SculptSettingsHelper.getShapeType(player, nbt, isCurved); shapeType = isCurved ? SHAPE_CURVED[shapeType] : SHAPE_FLAT[shapeType]; SculptSettingsHelper.setShapeType(player, stack, isCurved, shapeType); if ((isCurved ? Configs.sculptShapeTypeCurved : Configs.sculptShapeTypeFlat).shouldDisplayInChat()) { printChatMessageWithDeletion(SculptSettingsHelper.getShapeTypeText(shapeType)); } } private void toggleBitGridTargeted(EntityPlayer player, ItemStack stack) { boolean targetBitGrid = !SculptSettingsHelper.isBitGridTargeted(player, stack.getTagCompound()); SculptSettingsHelper.setBitGridTargeted(player, stack, targetBitGrid); if (Configs.sculptTargetBitGridVertexes.shouldDisplayInChat()) { printChatMessageWithDeletion(SculptSettingsHelper.getBitGridTargetedText(targetBitGrid)); } } private void cycleSemiDiameter(EntityPlayer player, ItemStack stack, boolean forward) { int semiDiameter = SculptSettingsHelper.cycleData(SculptSettingsHelper.getSemiDiameter(player, stack.getTagCompound()), forward, Configs.maxSemiDiameter); SculptSettingsHelper.setSemiDiameter(player, stack, semiDiameter); if (Configs.sculptSemiDiameter.shouldDisplayInChat()) { printChatMessageWithDeletion(SculptSettingsHelper.getSemiDiameterText(player, stack.getTagCompound(), semiDiameter)); } } private void toggleHollowShape(EntityPlayer player, ItemStack stack, Item item) { boolean isWire = ((ItemSculptingTool) item).removeBits(); boolean isHollowShape = !SculptSettingsHelper.isHollowShape(player, stack.getTagCompound(), isWire); SculptSettingsHelper.setHollowShape(player, stack, isWire, isHollowShape); if ((isWire ? Configs.sculptHollowShapeWire : Configs.sculptHollowShapeSpade).shouldDisplayInChat()) { printChatMessageWithDeletion(SculptSettingsHelper.getHollowShapeText(isHollowShape)); } } private void toggleOpenEnds(EntityPlayer player, ItemStack stack) { boolean areEndsOpen = !SculptSettingsHelper.areEndsOpen(player, stack.getTagCompound()); SculptSettingsHelper.setEndsOpen(player, stack, areEndsOpen); if (Configs.sculptOpenEnds.shouldDisplayInChat()) { printChatMessageWithDeletion(SculptSettingsHelper.getOpenEndsText(areEndsOpen)); } } private void cycleWallThickness(EntityPlayer player, ItemStack stack, boolean forward) { int wallThickness = SculptSettingsHelper.cycleData(SculptSettingsHelper.getWallThickness(player, stack.getTagCompound()), forward, Configs.maxWallThickness); SculptSettingsHelper.setWallThickness(player, stack, wallThickness); if (Configs.sculptWallThickness.shouldDisplayInChat()) { printChatMessageWithDeletion(SculptSettingsHelper.getWallThicknessText(wallThickness)); } } private void printChatMessageWithDeletion(String text) { GuiNewChat chatGUI = Minecraft.getMinecraft().ingameGUI.getChatGUI(); chatGUI.printChatMessageWithOptionalDeletion(new TextComponentString(text), 627250); } @SubscribeEvent public void cancelBoundingBoxDraw(DrawBlockHighlightEvent event) { ItemStack itemStack = event.getPlayer().inventory.getCurrentItem(); if (itemStack != null) { Item item = itemStack.getItem(); if (item != null && item instanceof ItemSculptingTool && SculptSettingsHelper.getMode(event.getPlayer(), itemStack.getTagCompound()) == 1) { event.setCanceled(true); } } } @SubscribeEvent public void renderBoxesSpheresAndOverlays(RenderWorldLastEvent event) { if (!Configs.disableOverlays) { EntityPlayer player = Minecraft.getMinecraft().thePlayer; World world = player.worldObj; ItemStack stack = player.getHeldItemMainhand(); if (stack != null) { RayTraceResult target = Minecraft.getMinecraft().objectMouseOver; if (target != null && target.typeOfHit.equals(RayTraceResult.Type.BLOCK) && stack.getItem() instanceof ItemBitToolBase) { IChiselAndBitsAPI api = ChiselsAndBitsAPIAccess.apiInstance; float ticks = event.getPartialTicks(); double playerX = player.lastTickPosX + (player.posX - player.lastTickPosX) * ticks; double playerY = player.lastTickPosY + (player.posY - player.lastTickPosY) * ticks; double playerZ = player.lastTickPosZ + (player.posZ - player.lastTickPosZ) * ticks; EnumFacing dir = target.sideHit; Tessellator t = Tessellator.getInstance(); VertexBuffer wr = t.getBuffer(); BlockPos pos = target.getBlockPos(); int x = pos.getX(); int y = pos.getY(); int z = pos.getZ(); double diffX = playerX - x; double diffY = playerY - y; double diffZ = playerZ - z; Vec3d hit = target.hitVec; if (stack.getItem() instanceof ItemBitWrench && api.isBlockChiseled(world, target.getBlockPos())) { int mode = !stack.hasTagCompound() ? 0 : stack.getTagCompound().getInteger(NBTKeys.MODE); frameCounter++; int side = dir.ordinal(); boolean upDown = side <= 1; boolean eastWest = side >= 4; boolean northSouth = !upDown && !eastWest; AxisAlignedBB box = new AxisAlignedBB(eastWest ? hit.xCoord : x, upDown ? hit.yCoord : y, northSouth ? hit.zCoord : z, eastWest ? hit.xCoord : x + 1, upDown ? hit.yCoord : y + 1, northSouth ? hit.zCoord : z + 1); int offsetX = Math.abs(dir.getFrontOffsetX()); int offsetY = Math.abs(dir.getFrontOffsetY()); int offsetZ = Math.abs(dir.getFrontOffsetZ()); double invOffsetX = offsetX ^ 1; double invOffsetY = offsetY ^ 1; double invOffsetZ = offsetZ ^ 1; boolean invertDirection = player.isSneaking(); GlStateManager.pushMatrix(); GlStateManager.disableLighting(); GlStateManager.enableAlpha(); GlStateManager.enableBlend(); GlStateManager.blendFunc(GL11.GL_SRC_ALPHA, GL11.GL_ONE_MINUS_SRC_ALPHA); GlStateManager.enableTexture2D(); GlStateManager.pushMatrix(); double angle = getInitialAngle(mode); if (mode == 3) { if (side % 2 == 1) angle += 180; if (side >= 4) angle -= 90; } else { if (mode == 0) { if (side % 2 == (invertDirection ? 0 : 1)) angle *= -1; } else { if (side < 2 || side > 3) angle *= -1; } if (eastWest) angle += 90; if (side == (mode == 1 ? 1 : 0) || side == 3 || side == 4) angle += 180; } double offsetX2 = 0.5 * invOffsetX; double offsetY2 = 0.5 * invOffsetY; double offsetZ2 = 0.5 * invOffsetZ; double mirTravel = mode == 1 ? Configs.mirrorAmplitude * Math.cos(Math.PI * 2 * frameCounter / Configs.mirrorPeriod) : 0; double mirTravel1 = mirTravel; double mirTravel2 = 0; boolean mirrorInversion = invertDirection && mode == 1; if (mirrorInversion && side <= 1 && player.getHorizontalFacing().ordinal() > 3) { angle += 90; mirTravel1 = 0; mirTravel2 = mirTravel; } translateAndRotateTexture(playerX, playerY, playerZ, dir, upDown, eastWest, offsetX, offsetY, offsetZ, angle, diffX, diffY, diffZ, offsetX2, offsetY2, offsetZ2, mirTravel1, mirTravel2); Minecraft.getMinecraft().renderEngine.bindTexture(mode == 0 ? ARROW_CYCLICAL : (mode == 1 ? ARROW_BIDIRECTIONAL : (mode == 2 ? CIRCLE : INVERSION))); float minU = 0; float maxU = 1; float minV = 0; float maxV = 1; if (mode == 0) { if (invertDirection) { float minU2 = minU; minU = maxU; maxU = minU2; } } else if (mode == 2) { EnumFacing dir2 = side <= 1 ? EnumFacing.WEST : (side <= 3 ? EnumFacing.WEST : EnumFacing.DOWN); box = contractBoxOrRenderArrows(true, t, wr, side, northSouth, dir2, box, invOffsetX, invOffsetY, invOffsetZ, invertDirection, minU, maxU, minV, maxV); } renderTexturedSide(t, wr, side, northSouth, box, minU, maxU, minV, maxV, 1); GlStateManager.popMatrix(); AxisAlignedBB box3 = world.getBlockState(pos).getSelectedBoundingBox(world, pos); for (int s = 0; s < 6; s++) { if (s != side) { GlStateManager.pushMatrix(); upDown = s <= 1; eastWest = s >= 4; northSouth = !upDown && !eastWest; dir = EnumFacing.getFront(s); box = new AxisAlignedBB(eastWest ? (s == 5 ? box3.maxX : box3.minX) : x, upDown ? (s == 1 ? box3.maxY : box3.minY) : y, northSouth ? (s == 3 ? box3.maxZ : box3.minZ) : z, eastWest ? (s == 4 ? box3.minX : box3.maxX) : x + 1, upDown ? (s == 0 ? box3.minY : box3.maxY) : y + 1, northSouth ? (s == 2 ? box3.minZ : box3.maxZ) : z + 1); angle = getInitialAngle(mode); boolean oppRotation = false; int mode2 = mode; if (mode != 3) { oppRotation = dir == EnumFacing.getFront(side).getOpposite(); if (mode == 0) { if (!oppRotation) { Minecraft.getMinecraft().renderEngine.bindTexture(ARROW_HEAD); angle = 90; if (side % 2 == 0) angle += 180; if (invertDirection) angle += 180; mode2 = 2; } else { Minecraft.getMinecraft().renderEngine.bindTexture(ARROW_CYCLICAL); mode2 = 0; } } else if (mode == 2) { if (!oppRotation) { Minecraft.getMinecraft().renderEngine.bindTexture(ARROW_HEAD); if (side == 0 ? s == 2 || s == 5 : (side == 1 ? s == 3 || s == 4 : (side == 2 ? s == 1 || s == 5 : (side == 3 ? s == 0 || s == 4 : (side == 4 ? s == 1 || s == 2 : s == 0 || s == 3))))) angle += 180; if (invertDirection) angle += 180; } else { Minecraft.getMinecraft().renderEngine.bindTexture(CIRCLE); } } } mirTravel1 = mirTravel; mirTravel2 = 0; if (mode != 3 && (((side <= 1 && mirrorInversion ? side > 1 : side <= 1) && s > 1) || ((mirrorInversion ? (oppRotation ? player.getHorizontalFacing().ordinal() > 3 : side > 3) : (side == 2 || side == 3)) && s <= 1))) { angle += 90; mirTravel1 = 0; mirTravel2 = mirTravel; } if (mode == 3) { if (s % 2 == 1) angle += 180; if (s >= 4) angle -= 90; } else { if (mode2 == 0) { if (s % 2 == (invertDirection ? 0 : 1)) angle *= -1; if (oppRotation) angle *= -1; } else { if (s < 2 || s > 3) angle *= -1; } if (eastWest) angle -= 90; if (s == (mode2 == 1 ? 1 : 0) || s == 3 || s == 5) angle += 180; } offsetX = Math.abs(dir.getFrontOffsetX()); offsetY = Math.abs(dir.getFrontOffsetY()); offsetZ = Math.abs(dir.getFrontOffsetZ()); invOffsetX = offsetX ^ 1; invOffsetY = offsetY ^ 1; invOffsetZ = offsetZ ^ 1; offsetX2 = 0.5 * invOffsetX; offsetY2 = 0.5 * invOffsetY; offsetZ2 = 0.5 * invOffsetZ; translateAndRotateTexture(playerX, playerY, playerZ, dir, upDown, eastWest, offsetX, offsetY, offsetZ, angle, diffX, diffY, diffZ, offsetX2, offsetY2, offsetZ2, mirTravel1, mirTravel2); minU = 0; maxU = 1; minV = 0; maxV = 1; if (mode2 == 0) { if (oppRotation) { minU = 1; maxU = 0; } if (invertDirection) { float minU2 = minU; minU = maxU; maxU = minU2; } } else if (mode2 == 2) { EnumFacing dir2 = side <= 1 ? (s == 2 || s == 3 ? EnumFacing.WEST : EnumFacing.DOWN) : (side >= 4 ? EnumFacing.WEST : (s <= 1 ? EnumFacing.WEST : EnumFacing.DOWN)); box = contractBoxOrRenderArrows(oppRotation, t, wr, side, northSouth, dir2, box, invOffsetX, invOffsetY, invOffsetZ, invertDirection, minU, maxU, minV, maxV); } if (mode2 != 2 || oppRotation) renderTexturedSide(t, wr, s, northSouth, box, minU, maxU, minV, maxV, 1); GlStateManager.popMatrix(); } } GlStateManager.enableLighting(); GlStateManager.disableBlend(); GlStateManager.enableTexture2D(); GlStateManager.popMatrix(); } else if (stack.getItem() instanceof ItemSculptingTool) { ItemSculptingTool toolItem = (ItemSculptingTool) stack.getItem(); boolean removeBits = toolItem.removeBits(); int mode = SculptSettingsHelper.getMode(player, stack.getTagCompound()); if (!removeBits || mode > 0 || api.canBeChiseled(world, target.getBlockPos())) { float hitX = (float) hit.xCoord - pos.getX(); float hitY = (float) hit.yCoord - pos.getY(); float hitZ = (float) hit.zCoord - pos.getZ(); IBitLocation bitLoc = api.getBitPos(hitX, hitY, hitZ, dir, pos, false); if (bitLoc != null) { NBTTagCompound nbt = stack.hasTagCompound() ? stack.getTagCompound() : new NBTTagCompound(); int x2 = bitLoc.getBitX(); int y2 = bitLoc.getBitY(); int z2 = bitLoc.getBitZ(); if (!toolItem.removeBits()) { x2 += dir.getFrontOffsetX(); y2 += dir.getFrontOffsetY(); z2 += dir.getFrontOffsetZ(); } boolean isDrawn = drawnStartPoint != null; boolean drawnBox = mode == 2 && isDrawn; int shapeType = SculptSettingsHelper.getShapeType(player, nbt, toolItem.isCurved()); boolean fixedNotSym = !drawnBox && shapeType == 2 || shapeType > 4; GlStateManager.enableBlend(); GlStateManager.tryBlendFuncSeparate(770, 771, 1, 0); GlStateManager.disableTexture2D(); GlStateManager.depthMask(false); double r = SculptSettingsHelper.getSemiDiameter(player, nbt) * Utility.PIXEL_D; ConfigShapeRenderPair configPair = Configs.itemShapeMap.get(toolItem); ConfigShapeRender configBox = configPair.boundingBox; AxisAlignedBB box = null, shapeBox = null; double x3 = x + x2 * Utility.PIXEL_D; double y3 = y + y2 * Utility.PIXEL_D; double z3 = z + z2 * Utility.PIXEL_D; if (configBox.renderInnerShape || configBox.renderOuterShape) { GlStateManager.pushMatrix(); GL11.glLineWidth(configBox.lineWidth); boolean inside = ItemSculptingTool.wasInsideClicked(dir, hit, pos); if (drawnBox) { double x4 = drawnStartPoint.xCoord; double y4 = drawnStartPoint.yCoord; double z4 = drawnStartPoint.zCoord; if (Math.max(x3, x4) == x3) { x3 += Utility.PIXEL_D; } else { x4 += Utility.PIXEL_D; } if (Math.max(y3, y4) == y3) { y3 += Utility.PIXEL_D; } else { y4 += Utility.PIXEL_D; } if (Math.max(z3, z4) == z3) { z3 += Utility.PIXEL_D; } else { z4 += Utility.PIXEL_D; } box = new AxisAlignedBB(x4, y4, z4, x3, y3, z3); } else { double f = 0; float x4 = 0, y4 = 0, z4 = 0; boolean targetBitGrid = SculptSettingsHelper.isBitGridTargeted(player, nbt); if (mode == 2) { r = 0; } else if (targetBitGrid) { f = Utility.PIXEL_D * 0.5; x4 = hitX < (Math.round(hitX/Utility.PIXEL_F) * Utility.PIXEL_F) ? 1 : -1; y4 = hitY < (Math.round(hitY/Utility.PIXEL_F) * Utility.PIXEL_F) ? 1 : -1; z4 = hitZ < (Math.round(hitZ/Utility.PIXEL_F) * Utility.PIXEL_F) ? 1 : -1; double offsetX = Math.abs(dir.getFrontOffsetX()); double offsetY = Math.abs(dir.getFrontOffsetY()); double offsetZ = Math.abs(dir.getFrontOffsetZ()); int s = dir.ordinal(); if (s % 2 == 0) { if (offsetX > 0) x4 *= -1; if (offsetY > 0) y4 *= -1; if (offsetZ > 0) z4 *= -1; } boolean su = s== 1 || s == 3; if (removeBits ? (!inside || !su) : (inside && su)) { if (offsetX > 0) x4 *= -1; if (offsetY > 0) y4 *= -1; if (offsetZ > 0) z4 *= -1; } r -= f; } box = new AxisAlignedBB(x - r, y - r, z - r, x + r + Utility.PIXEL_D, y + r + Utility.PIXEL_D, z + r + Utility.PIXEL_D) .offset(x2 * Utility.PIXEL_D + f * x4, y2 * Utility.PIXEL_D + f * y4, z2 * Utility.PIXEL_D + f * z4); if (targetBitGrid && mode != 2) { x3 = (box.maxX + box.minX) * 0.5 - f; y3 = (box.maxY + box.minY) * 0.5 - f; z3 = (box.maxZ + box.minZ) * 0.5 - f; } } if (fixedNotSym) { shapeBox = box.expand(0, 0, 0); } if (mode == 0) { BlockPos pos2 = !removeBits && !inside ? pos.offset(dir) : pos; AxisAlignedBB box2 = !removeBits ? new AxisAlignedBB(pos2) : world.getBlockState(pos2).getSelectedBoundingBox(world, pos2); if ((int) Math.round(box2.minX) != pos2.getX() || (int) Math.round(box2.minY) != pos2.getY() || (int) Math.round(box2.minZ) != pos2.getZ()) { box2 = box2.offset(pos2); } box = limitBox(box, box2); } double f = 0.0020000000949949026; if (configBox.renderOuterShape) { GlStateManager.color(configBox.red, configBox.green, configBox.blue, configBox.outerShapeAlpha); RenderGlobal.drawSelectionBoundingBox(box.expand(f, f, f).offset(-playerX, -playerY, -playerZ)); } if (configBox.renderInnerShape) { GlStateManager.color(configBox.red, configBox.green, configBox.blue, configBox.innerShapeAlpha); GlStateManager.depthFunc(GL11.GL_GREATER); RenderGlobal.drawSelectionBoundingBox(box.expand(f, f, f).offset(-playerX, -playerY, -playerZ)); GlStateManager.depthFunc(GL11.GL_LEQUAL); } GlStateManager.popMatrix(); } if (!fixedNotSym) { shapeBox = box.expand(0, 0, 0); } boolean isHollow = SculptSettingsHelper.isHollowShape(player, nbt, removeBits); boolean isOpen = isHollow && SculptSettingsHelper.areEndsOpen(player, nbt); renderEnvelopedShapes(player, shapeType, nbt, playerX, playerY, playerZ, isDrawn, drawnBox, r, configPair, shapeBox, x3, y3, z3, 0, isOpen); float wallThickness = SculptSettingsHelper.getWallThickness(player, nbt) * Utility.PIXEL_F; if (wallThickness > 0 && isHollow && !(mode == 2 && !drawnBox)) { renderEnvelopedShapes(player, shapeType, nbt, playerX, playerY, playerZ, isDrawn, drawnBox, r, configPair, shapeBox, x3, y3, z3, wallThickness, isOpen); } GlStateManager.depthMask(true); GlStateManager.enableTexture2D(); GlStateManager.disableBlend(); } } } } } } } private void renderEnvelopedShapes(EntityPlayer player, int shapeType, NBTTagCompound nbt, double playerX, double playerY, double playerZ, boolean isDrawn, boolean drawnBox, double r, ConfigShapeRenderPair configPair, AxisAlignedBB box, double x, double y, double z, double contraction, boolean isOpen) { ConfigShapeRender configShape = configPair.envelopedShape; if (configShape.renderInnerShape || configShape.renderOuterShape) { double a = 0, b = 0, c = 0; /* 0 = sphere * 1 = cylinder * 2 = cone * 3 = cube * 4 = triangular prism * 5 = triangular pyramid * 6 = square pyramid */ int dir = SculptSettingsHelper.getDirection(player, nbt); // int rotation = dir / 6; dir %= 6; boolean notFullSym = shapeType != 0 && shapeType != 3; boolean notSym = shapeType == 2 || shapeType > 4; double ri = r + Utility.PIXEL_D * 0.5; r = Math.max(ri - contraction, 0); boolean drawnNotSym = notSym && drawnBox; double base = 0; double v; if (drawnBox || notSym) { double f = 0.5; double minX = box.minX * f; double minY = box.minY * f; double minZ = box.minZ * f; double maxX = box.maxX * f; double maxY = box.maxY * f; double maxZ = box.maxZ * f; double x2 = maxX - minX; double y2 = maxY - minY; double z2 = maxZ - minZ; if (drawnNotSym) { if (dir == 2 || dir == 3) { v = y2; y2 = z2; z2 = v; } else if (dir > 3) { v = y2; y2 = x2; x2 = v; } } if (notSym && contraction > 0) { if (!isOpen) base = contraction; y2 *= 2; double y2sq = y2 * y2; double aInset = (Math.sqrt(x2 * x2 + y2sq) * contraction) / x2 + base; double cInset = (Math.sqrt(z2 * z2 + y2sq) * contraction) / z2 + base; a = Math.max((y2 - aInset) * (x2 / y2), 0); c = Math.max((y2 - cInset) * (z2 / y2), 0); contraction = Math.min(aInset - base, cInset - base); b = Math.max(y2 * 0.5 - contraction * 0.5 - base * 0.5, 0); } else { a = Math.max(x2 - (!isOpen || !notFullSym || dir < 4 ? contraction : 0), 0); c = Math.max(z2 - (!isOpen || !notFullSym || dir != 2 && dir != 3 ? contraction : 0), 0); b = Math.max(y2 - (!isOpen || !notFullSym || dir > 1 ? contraction : 0), 0); } r = Math.max(Math.max(a, b), c); x = maxX + minX; y = maxY + minY; z = maxZ + minZ; if (drawnBox) { if (notSym || !notFullSym) { if (dir < 2 || dir > 3 || !notFullSym) { v = b; b = c; c = v; } } else { if (dir < 2) { v = b; b = c; c = v; } else if (dir > 3) { v = a; a = c; c = v; } else { v = b; b = a; a = v; } } } } else { a = b = c = r; if (b > 0 && notFullSym && isOpen) { b += contraction * (isDrawn ? 0 : 1); } } Quadric shape = shapeType > 2 ? new Prism(shapeType > 4, shapeType == 4 || shapeType == 5) : (notFullSym ? new Cylinder() : new Sphere()); shape.setDrawStyle(GLU.GLU_LINE); Quadric lid = new Disk(); lid.setDrawStyle(GLU.GLU_LINE); GlStateManager.pushMatrix(); GL11.glLineWidth(configShape.lineWidth); double x2 = x - playerX; double y2 = y - playerY; double z2 = z - playerZ; if (!notSym && !isDrawn) { double hp = Utility.PIXEL_D * 0.5; x2 += hp; y2 += hp; z2 += hp; } if (notFullSym) { if (isOpen && contraction > 0 && !notSym) { double offset = contraction * (notSym ? 0.5 : (drawnBox ? 0 : -1)); if (dir != 3) { y2 += dir == 0 ? offset : -offset; } if (dir > 2) { x2 += dir == 5 ? -offset : offset; } if (dir == 2 || dir == 3) { z2 += dir == 2 ? offset : -offset; } } } GlStateManager.translate(x2, y2, z2); int rot2 = dir; if (!(drawnNotSym && dir == 2)) { if (notFullSym && rot2 != 1) { int angle = 90; if (rot2 == 3) { rot2 = 0; angle = 180; if (!(drawnNotSym && dir == 3)) { GlStateManager.rotate(90, 0, 0, 1); } } else if (rot2 > 1) { rot2 %= 4; } else { rot2 = rot2 ^ 1 + 4; } Vec3i vec = EnumFacing.getFront(rot2).getOpposite().getDirectionVec(); GlStateManager.rotate(angle, vec.getX(), vec.getY(), vec.getZ()); } else { GlStateManager.rotate(90, 1, 0, 0); } } boolean openSym = notFullSym && !notSym && isOpen && !isDrawn; if (notFullSym) { double offset1 = 0; double offset2 = 0; double r2 = r; if (notSym) { r2 -= contraction * 0.5 - base * 0.5; } else if (openSym) { double m = -contraction; if (dir == 0) m *= 2; if (dir != 1) r -= m; if (dir > 1) { if (dir < 3) { offset1 = m; } else { offset2 = m; } } } GlStateManager.translate(offset1, offset2, -r2); } if (openSym) { v = b; b = c; c = v; } if (drawnNotSym) { if (dir == 2 || dir == 3) { v = b; b = c; c = v; } else if (dir > 3) { v = b; b = a; a = v; } } if (notFullSym && drawnBox) { if (b > c && b > a) { GlStateManager.translate(0, 0, b - c); } else if (a > c && a >= b) { GlStateManager.translate(0, 0, a - c); } } GlStateManager.scale(a / ri, b / ri, c / ri); if (configShape.renderOuterShape) { drawEnvelopedShapes(ri, configShape, shapeType, shape, lid, true, notSym, isOpen); } if (configShape.renderInnerShape) { GlStateManager.depthFunc(GL11.GL_GREATER); drawEnvelopedShapes(ri, configShape, shapeType, shape, lid, false, notSym, isOpen); GlStateManager.depthFunc(GL11.GL_LEQUAL); } GlStateManager.popMatrix(); } } private void drawEnvelopedShapes(double r, ConfigShapeRender configShape, int shapeType, Quadric shape, Quadric lid, boolean isOuter, boolean isCylinder, boolean isOpen) { GlStateManager.pushMatrix(); drawEnvelopedShape(shape, r, isOuter, configShape, isCylinder, isOpen); if (shapeType > 0 && shapeType < 3 && !isOpen) { if (shapeType == 1) { drawEnvelopedShape(lid, r, isOuter, configShape, isCylinder, isOpen); } GlStateManager.translate(0, 0, r * 2); drawEnvelopedShape(lid, r, isOuter, configShape, isCylinder, isOpen); } GlStateManager.popMatrix(); } private void drawEnvelopedShape(Quadric shape, double radius, boolean isOuter, ConfigShapeRender configShape, boolean isCone, boolean isOpen) { GlStateManager.pushMatrix(); GlStateManager.color(configShape.red, configShape.green, configShape.blue, isOuter ? configShape.outerShapeAlpha : configShape.innerShapeAlpha); float r = (float) radius; if (shape instanceof Prism) { ((Prism) shape).draw(r, isOpen); } else if (shape instanceof Sphere) { ((Sphere) shape).draw(r, 32, 32); } else if (shape instanceof Cylinder) { ((Cylinder) shape).draw(isCone ? 0 : r, r, r * 2, 32, 32); } else if (shape instanceof Disk) { ((Disk) shape).draw(0, r, 32, 32); } GlStateManager.popMatrix(); } private AxisAlignedBB limitBox(AxisAlignedBB box, AxisAlignedBB mask) { double d0 = Math.max(box.minX, mask.minX); double d1 = Math.max(box.minY, mask.minY); double d2 = Math.max(box.minZ, mask.minZ); double d3 = Math.min(box.maxX, mask.maxX); double d4 = Math.min(box.maxY, mask.maxY); double d5 = Math.min(box.maxZ, mask.maxZ); return new AxisAlignedBB(d0, d1, d2, d3, d4, d5); } private double getInitialAngle(int mode) { return mode == 0 ? (frameCounter * (360 / Configs.rotationPeriod)) % 360 : 0; } private void translateAndRotateTexture(double playerX, double playerY, double playerZ, EnumFacing dir, boolean upDown, boolean eastWest, int offsetX, int offsetY, int offsetZ, double angle, double diffX, double diffY, double diffZ, double offsetX2, double offsetY2, double offsetZ2, double mirTravel1, double mirTravel2) { double cos = Math.cos(Math.toRadians(angle)); double sin = Math.sin(Math.toRadians(angle)); if (upDown) { GL11.glTranslated(diffX * cos + diffZ * sin - diffX + mirTravel1, 0, -diffX * sin + diffZ * cos - diffZ + mirTravel2); } else if (eastWest) { GL11.glTranslated(0, diffY * cos - diffZ * sin - diffY + mirTravel2, diffY * sin + diffZ * cos - diffZ + mirTravel1); } else { GL11.glTranslated(diffX * cos - diffY * sin - diffX + mirTravel1, diffX * sin + diffY * cos - diffY + mirTravel2, 0); } GL11.glTranslated(offsetX2, offsetY2, offsetZ2); GL11.glRotated(angle, offsetX, offsetY, offsetZ); GL11.glTranslated(-offsetX2, -offsetY2, -offsetZ2); GL11.glTranslated(-playerX + 0.002 * dir.getFrontOffsetX(), -playerY + 0.002 * dir.getFrontOffsetY(), -playerZ + 0.002 * dir.getFrontOffsetZ()); } private AxisAlignedBB contractBoxOrRenderArrows(boolean contractBox, Tessellator t, VertexBuffer vb, int side, boolean northSouth, EnumFacing dir, AxisAlignedBB box, double invOffsetX, double invOffsetY, double invOffsetZ, boolean invertDirection, float minU, float maxU, float minV, float maxV) { if (contractBox) { double amount = (frameCounter % Configs.translationScalePeriod) / Configs.translationScalePeriod; amount /= invertDirection ? -2 : 2; if (invertDirection && Configs.translationScalePeriod > 1) amount += 0.5; box = box.expand(-amount * invOffsetX, -amount * invOffsetY, -amount * invOffsetZ); } else if (Configs.translationDistance > 0) { double distance = Configs.translationDistance; double fadeDistance = Configs.translationFadeDistance; double period = Configs.translationMovementPeriod; double offsetDistance = Configs.translationOffsetDistance; int timeOffset = offsetDistance > 0 ? (int) (period / (distance / offsetDistance)) : 0; if (timeOffset > period / 3.0) timeOffset = (int) (period / 3.0); if (fadeDistance > distance / 2.0) fadeDistance = distance / 2.0; int n = offsetDistance == 0 || period == 1 ? 1 : 3; for (int i = 0; i < n; i++) { double amount = ((frameCounter + timeOffset * i) % period) / (period / (distance * 100.0) * 100.0); double alpha = 1; if (period > 1) { if (amount < fadeDistance) { alpha = amount / fadeDistance; } else if (amount > distance - fadeDistance) { alpha = (distance - amount) / fadeDistance; } amount -= distance / 2.0; } AxisAlignedBB box2 = new AxisAlignedBB(box.minX, box.minY, box.minZ, box.maxX, box.maxY, box.maxZ) .offset(amount * dir.getFrontOffsetX(), amount * dir.getFrontOffsetY(), amount * dir.getFrontOffsetZ()); renderTexturedSide(t, vb, side, northSouth, box2, minU, maxU, minV, maxV, alpha); } } else { renderTexturedSide(t, vb, side, northSouth, box, minU, maxU, minV, maxV, 1); } return box; } private void renderTexturedSide(Tessellator t, VertexBuffer vb, int side, boolean northSouth, AxisAlignedBB box, float minU, float maxU, float minV, float maxV, double alpha) { GL11.glColor4d(1, 1, 1, alpha); if (side == 1 || side == 3 || side == 4) { vb.begin(7, DefaultVertexFormats.POSITION_TEX); vb.pos(box.minX, box.minY, box.maxZ).tex(maxU, minV).endVertex(); vb.pos(box.maxX, northSouth ? box.minY : box.maxY, box.maxZ).tex(minU, minV).endVertex(); vb.pos(box.maxX, box.maxY, box.minZ).tex(minU, maxV).endVertex(); vb.pos(box.minX, northSouth ? box.maxY : box.minY, box.minZ).tex(maxU, maxV).endVertex(); t.draw(); vb.begin(7, DefaultVertexFormats.POSITION_TEX); vb.pos(box.maxX, northSouth ? box.minY : box.maxY, box.maxZ).tex(minU, minV).endVertex(); vb.pos(box.minX, box.minY, box.maxZ).tex(maxU, minV).endVertex(); vb.pos(box.minX, northSouth ? box.maxY : box.minY, box.minZ).tex(maxU, maxV).endVertex(); vb.pos(box.maxX, box.maxY, box.minZ).tex(minU, maxV).endVertex(); t.draw(); } else { vb.begin(7, DefaultVertexFormats.POSITION_TEX); vb.pos(box.minX, northSouth ? box.maxY : box.minY, box.minZ).tex(maxU, minV).endVertex(); vb.pos(box.maxX, box.maxY, box.minZ).tex(minU, minV).endVertex(); vb.pos(box.maxX, northSouth ? box.minY : box.maxY, box.maxZ).tex(minU, maxV).endVertex(); vb.pos(box.minX, box.minY, box.maxZ).tex(maxU, maxV).endVertex(); t.draw(); vb.begin(7, DefaultVertexFormats.POSITION_TEX); vb.pos(box.maxX, box.maxY, box.minZ).tex(minU, minV).endVertex(); vb.pos(box.minX, northSouth ? box.maxY : box.minY, box.minZ).tex(maxU, minV).endVertex(); vb.pos(box.minX, box.minY, box.maxZ).tex(maxU, maxV).endVertex(); vb.pos(box.maxX, northSouth ? box.minY : box.maxY, box.maxZ).tex(minU, maxV).endVertex(); t.draw(); } } }
Changed variable name.
src/main/java/com/phylogeny/extrabitmanipulation/client/eventhandler/ClientEventHandler.java
Changed variable name.
Java
apache-2.0
15c2aeae9aad0da42bfaa90fa92c6b098a48214e
0
designreuse/essentials,onehippo/essentials,onehippo/essentials,onehippo/essentials,designreuse/essentials,onehippo/essentials,designreuse/essentials,designreuse/essentials,onehippo/essentials,designreuse/essentials,designreuse/essentials,designreuse/essentials,onehippo/essentials
/* * Copyright 2013 Hippo B.V. (http://www.onehippo.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onehippo.cms7.essentials.dashboard.instruction; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayDeque; import java.util.Deque; import java.util.Map; import java.util.Set; import javax.inject.Inject; import javax.inject.Named; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlRootElement; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.onehippo.cms7.essentials.dashboard.ctx.PluginContext; import org.onehippo.cms7.essentials.dashboard.event.InstructionEvent; import org.onehippo.cms7.essentials.dashboard.event.MessageEvent; import org.onehippo.cms7.essentials.dashboard.instructions.InstructionStatus; import org.onehippo.cms7.essentials.dashboard.utils.EssentialConst; import org.onehippo.cms7.essentials.dashboard.utils.TemplateUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Component; import com.google.common.base.Strings; import com.google.common.collect.ImmutableSet; import com.google.common.eventbus.EventBus; /** * @version "$Id$" */ @Component @XmlRootElement(name = "file", namespace = EssentialConst.URI_ESSENTIALS_INSTRUCTIONS) public class FileInstruction extends PluginInstruction { public static final Set<String> VALID_ACTIONS = new ImmutableSet.Builder<String>() .add(COPY) .add(DELETE) .build(); private static final Logger log = LoggerFactory.getLogger(FileInstruction.class); private String message; @Inject private EventBus eventBus; @Value("${instruction.message.file.delete}") private String messageDelete; @Value("${instruction.message.file.copy}") private String messageCopy; @Value("${instruction.message.file.copy.error}") private String messageCopyError; @Value("${instruction.message.folder.create}") private String messageFolderCreate; private boolean overwrite; private String source; private String target; private String action; private String folderMessage; private String createdFolders; private String createdFoldersTarget; private PluginContext context; @Override public InstructionStatus process(final PluginContext context, final InstructionStatus previousStatus) { log.debug("executing FILE Instruction {}", this); processPlaceholders(context.getPlaceholderData()); this.context = context; if (!valid()) { eventBus.post(new MessageEvent("Invalid instruction descriptor: " + toString())); eventBus.post(new InstructionEvent(this)); return InstructionStatus.FAILED; } // check action: if (action.equals(COPY)) { return copy(); } else if (action.equals(DELETE)) { return delete(); } eventBus.post(new InstructionEvent(this)); return InstructionStatus.FAILED; } private InstructionStatus copy() { final File destination = new File(target); if (!overwrite && destination.exists()) { log.info("File already exists {}", destination); eventBus.post(new InstructionEvent(this)); return InstructionStatus.SKIPPED; } File file = new File(source); if (!file.exists()) { // try to read as resource: final InputStream stream = getClass().getClassLoader().getResourceAsStream(source); if (stream != null) { try { if (!destination.exists()) { //Recursively creates parent directories in case they don't exist yet Deque<String> directories = new ArrayDeque<>(); String parent = destination.getParent(); while (!new File(parent).exists()) { directories.push(parent); parent = new File(parent).getParent(); } if (!directories.isEmpty()) { folderMessage = directories.size() > 1 ? directories.size() - 1 + " directories" : "directory"; createdFolders = directories.getLast().substring(directories.getFirst().length()); createdFoldersTarget = directories.getLast(); Files.createDirectories(new File(directories.getLast()).toPath()); eventBus.post(new InstructionEvent(messageFolderCreate)); } Files.createFile(destination.toPath()); } // replace file placeholders if needed: if (isBinary()) { FileUtils.copyInputStreamToFile(stream, destination); } else { final String replacedData = TemplateUtils.injectTemplate(source, context.getPlaceholderData(), getClass()); FileUtils.copyInputStreamToFile(IOUtils.toInputStream(replacedData), destination); } sendEvents(); return InstructionStatus.SUCCESS; } catch (IOException e) { log.error("Error while copy resource", e); } finally { IOUtils.closeQuietly(stream); } } log.error("Source file doesn't exists: {}", file); message = messageCopyError; eventBus.post(new InstructionEvent(this)); return InstructionStatus.FAILED; } try { FileUtils.copyFile(file, destination); sendEvents(); return InstructionStatus.SUCCESS; } catch (IOException e) { log.error("Error creating file", e); } eventBus.post(new InstructionEvent(this)); return InstructionStatus.FAILED; } private boolean isBinary() { return source.endsWith(".png") || source.endsWith(".jpeg"); } private InstructionStatus delete() { try { Path path = new File(target).toPath(); final boolean deleted = Files.deleteIfExists(path); if (deleted) { sendEvents(); log.debug("Deleted file {}", target); return InstructionStatus.SUCCESS; } else { log.debug("File not deleted {}", target); eventBus.post(new InstructionEvent(this)); return InstructionStatus.SKIPPED; } } catch (IOException e) { log.error("Error deleting file", e); } eventBus.post(new InstructionEvent(this)); return InstructionStatus.FAILED; } @Override public void processPlaceholders(final Map<String, Object> data) { final String myTarget = TemplateUtils.replaceTemplateData(target, data); if (myTarget != null) { target = myTarget; } // final String mySource = TemplateUtils.replaceTemplateData(source, data); if (mySource != null) { source = mySource; } // add local data data.put(EssentialConst.PLACEHOLDER_SOURCE, source); data.put(EssentialConst.PLACEHOLDER_TARGET, target); //TODO check what Wicket can offer regarding placeholders and localization, it's probably reusable data.put("folderMessage", folderMessage); data.put("createdFolders", createdFolders); data.put("createdFoldersTarget", createdFoldersTarget); // setup messages: if (Strings.isNullOrEmpty(message)) { // check message based on action: if (action.equals(COPY)) { message = messageCopy; } else if (action.equals(DELETE)) { message = messageDelete; } } super.processPlaceholders(data); // messageCopyError = TemplateUtils.replaceTemplateData(messageCopyError, data); message = TemplateUtils.replaceTemplateData(message, data); } private boolean valid() { if (Strings.isNullOrEmpty(action) || !VALID_ACTIONS.contains(action) || Strings.isNullOrEmpty(target)) { return false; } if (action.equals(COPY) && (Strings.isNullOrEmpty(source))) { return false; } return true; } @XmlAttribute public boolean isOverwrite() { return overwrite; } public void setOverwrite(final boolean overwrite) { this.overwrite = overwrite; } @XmlAttribute public String getSource() { return source; } public void setSource(final String source) { this.source = source; } @XmlAttribute public String getTarget() { return target; } public void setTarget(final String target) { this.target = target; } @XmlAttribute @Override public String getMessage() { return message; } @Override public void setMessage(final String message) { this.message = message; } @XmlAttribute @Override public String getAction() { return action; } @Override public void setAction(final String action) { this.action = action; } @Override public String toString() { final StringBuilder sb = new StringBuilder("FileInstruction{"); sb.append("message='").append(message).append('\''); sb.append(", overwrite=").append(overwrite); sb.append(", source='").append(source).append('\''); sb.append(", target='").append(target).append('\''); sb.append(", action='").append(action).append('\''); sb.append('}'); return sb.toString(); } }
plugin-api/implementation/src/main/java/org/onehippo/cms7/essentials/dashboard/instruction/FileInstruction.java
/* * Copyright 2013 Hippo B.V. (http://www.onehippo.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onehippo.cms7.essentials.dashboard.instruction; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayDeque; import java.util.Deque; import java.util.Map; import java.util.Set; import javax.inject.Inject; import javax.inject.Named; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlRootElement; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.onehippo.cms7.essentials.dashboard.ctx.PluginContext; import org.onehippo.cms7.essentials.dashboard.event.InstructionEvent; import org.onehippo.cms7.essentials.dashboard.event.MessageEvent; import org.onehippo.cms7.essentials.dashboard.instructions.InstructionStatus; import org.onehippo.cms7.essentials.dashboard.utils.EssentialConst; import org.onehippo.cms7.essentials.dashboard.utils.TemplateUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Component; import com.google.common.base.Strings; import com.google.common.collect.ImmutableSet; import com.google.common.eventbus.EventBus; /** * @version "$Id$" */ //@Component @XmlRootElement(name = "file", namespace = EssentialConst.URI_ESSENTIALS_INSTRUCTIONS) public class FileInstruction extends PluginInstruction { public static final Set<String> VALID_ACTIONS = new ImmutableSet.Builder<String>() .add(COPY) .add(DELETE) .build(); private static final Logger log = LoggerFactory.getLogger(FileInstruction.class); private String message; @Inject private EventBus eventBus; @Value("${instruction.message.file.delete}") private String messageDelete; @Value("${instruction.message.file.copy}") private String messageCopy; @Value("${instruction.message.file.copy.error}") private String messageCopyError; @Value("#{instruction.message.folder.create}") private String messageFolderCreate; private boolean overwrite; private String source; private String target; private String action; private String folderMessage; private String createdFolders; private String createdFoldersTarget; private PluginContext context; @Override public InstructionStatus process(final PluginContext context, final InstructionStatus previousStatus) { log.debug("executing FILE Instruction {}", this); processPlaceholders(context.getPlaceholderData()); this.context = context; if (!valid()) { eventBus.post(new MessageEvent("Invalid instruction descriptor: " + toString())); eventBus.post(new InstructionEvent(this)); return InstructionStatus.FAILED; } // check action: if (action.equals(COPY)) { return copy(); } else if (action.equals(DELETE)) { return delete(); } eventBus.post(new InstructionEvent(this)); return InstructionStatus.FAILED; } private InstructionStatus copy() { final File destination = new File(target); if (!overwrite && destination.exists()) { log.info("File already exists {}", destination); eventBus.post(new InstructionEvent(this)); return InstructionStatus.SKIPPED; } File file = new File(source); if (!file.exists()) { // try to read as resource: final InputStream stream = getClass().getClassLoader().getResourceAsStream(source); if (stream != null) { try { if (!destination.exists()) { //Recursively creates parent directories in case they don't exist yet Deque<String> directories = new ArrayDeque<>(); String parent = destination.getParent(); while (!new File(parent).exists()) { directories.push(parent); parent = new File(parent).getParent(); } if (!directories.isEmpty()) { folderMessage = directories.size() > 1 ? directories.size() - 1 + " directories" : "directory"; createdFolders = directories.getLast().substring(directories.getFirst().length()); createdFoldersTarget = directories.getLast(); Files.createDirectories(new File(directories.getLast()).toPath()); eventBus.post(new InstructionEvent(messageFolderCreate)); } Files.createFile(destination.toPath()); } // replace file placeholders if needed: if (isBinary()) { FileUtils.copyInputStreamToFile(stream, destination); } else { final String replacedData = TemplateUtils.injectTemplate(source, context.getPlaceholderData(), getClass()); FileUtils.copyInputStreamToFile(IOUtils.toInputStream(replacedData), destination); } sendEvents(); return InstructionStatus.SUCCESS; } catch (IOException e) { log.error("Error while copy resource", e); } finally { IOUtils.closeQuietly(stream); } } log.error("Source file doesn't exists: {}", file); message = messageCopyError; eventBus.post(new InstructionEvent(this)); return InstructionStatus.FAILED; } try { FileUtils.copyFile(file, destination); sendEvents(); return InstructionStatus.SUCCESS; } catch (IOException e) { log.error("Error creating file", e); } eventBus.post(new InstructionEvent(this)); return InstructionStatus.FAILED; } private boolean isBinary() { return source.endsWith(".png") || source.endsWith(".jpeg"); } private InstructionStatus delete() { try { Path path = new File(target).toPath(); final boolean deleted = Files.deleteIfExists(path); if (deleted) { sendEvents(); log.debug("Deleted file {}", target); return InstructionStatus.SUCCESS; } else { log.debug("File not deleted {}", target); eventBus.post(new InstructionEvent(this)); return InstructionStatus.SKIPPED; } } catch (IOException e) { log.error("Error deleting file", e); } eventBus.post(new InstructionEvent(this)); return InstructionStatus.FAILED; } @Override public void processPlaceholders(final Map<String, Object> data) { final String myTarget = TemplateUtils.replaceTemplateData(target, data); if (myTarget != null) { target = myTarget; } // final String mySource = TemplateUtils.replaceTemplateData(source, data); if (mySource != null) { source = mySource; } // add local data data.put(EssentialConst.PLACEHOLDER_SOURCE, source); data.put(EssentialConst.PLACEHOLDER_TARGET, target); //TODO check what Wicket can offer regarding placeholders and localization, it's probably reusable data.put("folderMessage", folderMessage); data.put("createdFolders", createdFolders); data.put("createdFoldersTarget", createdFoldersTarget); // setup messages: if (Strings.isNullOrEmpty(message)) { // check message based on action: if (action.equals(COPY)) { message = messageCopy; } else if (action.equals(DELETE)) { message = messageDelete; } } super.processPlaceholders(data); // messageCopyError = TemplateUtils.replaceTemplateData(messageCopyError, data); message = TemplateUtils.replaceTemplateData(message, data); } private boolean valid() { if (Strings.isNullOrEmpty(action) || !VALID_ACTIONS.contains(action) || Strings.isNullOrEmpty(target)) { return false; } if (action.equals(COPY) && (Strings.isNullOrEmpty(source))) { return false; } return true; } @XmlAttribute public boolean isOverwrite() { return overwrite; } public void setOverwrite(final boolean overwrite) { this.overwrite = overwrite; } @XmlAttribute public String getSource() { return source; } public void setSource(final String source) { this.source = source; } @XmlAttribute public String getTarget() { return target; } public void setTarget(final String target) { this.target = target; } @XmlAttribute @Override public String getMessage() { return message; } @Override public void setMessage(final String message) { this.message = message; } @XmlAttribute @Override public String getAction() { return action; } @Override public void setAction(final String action) { this.action = action; } @Override public String toString() { final StringBuilder sb = new StringBuilder("FileInstruction{"); sb.append("message='").append(message).append('\''); sb.append(", overwrite=").append(overwrite); sb.append(", source='").append(source).append('\''); sb.append(", target='").append(target).append('\''); sb.append(", action='").append(action).append('\''); sb.append('}'); return sb.toString(); } }
- fix file instruction test
plugin-api/implementation/src/main/java/org/onehippo/cms7/essentials/dashboard/instruction/FileInstruction.java
- fix file instruction test
Java
apache-2.0
6ec875300a42f03cb6ad65b15aaaa14a89feeed1
0
kalaspuffar/pdfbox,apache/pdfbox,kalaspuffar/pdfbox,apache/pdfbox
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pdfbox.tools; import java.io.File; import java.io.FileInputStream; import java.io.InputStream; import java.security.cert.CertificateFactory; import java.security.cert.X509Certificate; import org.apache.pdfbox.pdmodel.PDDocument; import org.apache.pdfbox.pdmodel.encryption.AccessPermission; import org.apache.pdfbox.pdmodel.encryption.PublicKeyProtectionPolicy; import org.apache.pdfbox.pdmodel.encryption.PublicKeyRecipient; import org.apache.pdfbox.pdmodel.encryption.StandardProtectionPolicy; /** * This will read a document from the filesystem, encrypt it and and then write * the results to the filesystem. <br/><br/> * * @author Ben Litchfield */ public class Encrypt { private Encrypt() { } /** * This is the entry point for the application. * * @param args The command-line arguments. * * @throws Exception If there is an error decrypting the document. */ public static void main( String[] args ) throws Exception { // suppress the Dock icon on OS X System.setProperty("apple.awt.UIElement", "true"); Encrypt encrypt = new Encrypt(); encrypt.encrypt( args ); } private void encrypt( String[] args ) throws Exception { if( args.length < 1 ) { usage(); } else { AccessPermission ap = new AccessPermission(); String infile = null; String outfile = null; String certFile = null; String userPassword = ""; String ownerPassword = ""; int keyLength = 40; PDDocument document = null; try { for( int i=0; i<args.length; i++ ) { String key = args[i]; if( key.equals( "-O" ) ) { ownerPassword = args[++i]; } else if( key.equals( "-U" ) ) { userPassword = args[++i]; } else if( key.equals( "-canAssemble" ) ) { ap.setCanAssembleDocument(args[++i].equalsIgnoreCase( "true" )); } else if( key.equals( "-canExtractContent" ) ) { ap.setCanExtractContent( args[++i].equalsIgnoreCase( "true" ) ); } else if( key.equals( "-canExtractForAccessibility" ) ) { ap.setCanExtractForAccessibility( args[++i].equalsIgnoreCase( "true" ) ); } else if( key.equals( "-canFillInForm" ) ) { ap.setCanFillInForm( args[++i].equalsIgnoreCase( "true" ) ); } else if( key.equals( "-canModify" ) ) { ap.setCanModify( args[++i].equalsIgnoreCase( "true" ) ); } else if( key.equals( "-canModifyAnnotations" ) ) { ap.setCanModifyAnnotations( args[++i].equalsIgnoreCase( "true" ) ); } else if( key.equals( "-canPrint" ) ) { ap.setCanPrint( args[++i].equalsIgnoreCase( "true" ) ); } else if( key.equals( "-canPrintDegraded" ) ) { ap.setCanPrintDegraded( args[++i].equalsIgnoreCase( "true" ) ); } else if( key.equals( "-certFile" ) ) { certFile = args[++i]; } else if( key.equals( "-keyLength" ) ) { try { keyLength = Integer.parseInt( args[++i] ); } catch( NumberFormatException e ) { throw new NumberFormatException( "Error: -keyLength is not an integer '" + args[i] + "'", e ); } } else if( infile == null ) { infile = key; } else if( outfile == null ) { outfile = key; } else { usage(); } } if( infile == null ) { usage(); } if( outfile == null ) { outfile = infile; } document = PDDocument.load( new File(infile) ); if( !document.isEncrypted() ) { if( certFile != null ) { PublicKeyProtectionPolicy ppp = new PublicKeyProtectionPolicy(); PublicKeyRecipient recip = new PublicKeyRecipient(); recip.setPermission(ap); CertificateFactory cf = CertificateFactory.getInstance("X.509"); InputStream inStream = new FileInputStream(certFile); X509Certificate certificate = (X509Certificate)cf.generateCertificate(inStream); inStream.close(); recip.setX509(certificate); ppp.addRecipient(recip); ppp.setEncryptionKeyLength(keyLength); document.protect(ppp); } else { StandardProtectionPolicy spp = new StandardProtectionPolicy(ownerPassword, userPassword, ap); spp.setEncryptionKeyLength(keyLength); document.protect(spp); } document.save( outfile ); } else { System.err.println( "Error: Document is already encrypted." ); } } finally { if( document != null ) { document.close(); } } } } /** * This will print a usage message. */ private static void usage() { System.err.println( "usage: java -jar pdfbox-app-x.y.z.jar Encrypt [options] <inputfile> [outputfile]" ); System.err.println( " -O <password> " + "Set the owner password(ignored if cert is set)" ); System.err.println( " -U <password> " + "Set the user password(ignored if cert is set)" ); System.err.println( " -certFile <path to cert> Path to X.509 certificate" ); System.err.println( " -canAssemble <true|false> Set the assemble permission" ); System.err.println( " -canExtractContent <true|false> Set the extraction permission" ); System.err.println( " -canExtractForAccessibility <true|false> Set the extraction permission" ); System.err.println( " -canFillInForm <true|false> Set the fill in form permission" ); System.err.println( " -canModify <true|false> Set the modify permission" ); System.err.println( " -canModifyAnnotations <true|false> Set the modify annots permission" ); System.err.println( " -canPrint <true|false> Set the print permission" ); System.err.println( " -canPrintDegraded <true|false> Set the print degraded permission" ); System.err.println( " -keyLength <length> The length of the key in bits(40)" ); System.err.println( "\nNote: By default all permissions are set to true!" ); System.exit( 1 ); } }
tools/src/main/java/org/apache/pdfbox/tools/Encrypt.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pdfbox.tools; import java.io.File; import java.io.FileInputStream; import java.io.InputStream; import java.security.cert.CertificateFactory; import java.security.cert.X509Certificate; import org.apache.pdfbox.pdmodel.PDDocument; import org.apache.pdfbox.pdmodel.encryption.AccessPermission; import org.apache.pdfbox.pdmodel.encryption.PublicKeyProtectionPolicy; import org.apache.pdfbox.pdmodel.encryption.PublicKeyRecipient; import org.apache.pdfbox.pdmodel.encryption.StandardProtectionPolicy; /** * This will read a document from the filesystem, encrypt it and and then write * the results to the filesystem. <br/><br/> * * @author Ben Litchfield */ public class Encrypt { private Encrypt() { } /** * This is the entry point for the application. * * @param args The command-line arguments. * * @throws Exception If there is an error decrypting the document. */ public static void main( String[] args ) throws Exception { // suppress the Dock icon on OS X System.setProperty("apple.awt.UIElement", "true"); Encrypt encrypt = new Encrypt(); encrypt.encrypt( args ); } private void encrypt( String[] args ) throws Exception { if( args.length < 1 ) { usage(); } else { AccessPermission ap = new AccessPermission(); String infile = null; String outfile = null; String certFile = null; String userPassword = ""; String ownerPassword = ""; int keyLength = 40; PDDocument document = null; try { for( int i=0; i<args.length; i++ ) { String key = args[i]; if( key.equals( "-O" ) ) { ownerPassword = args[++i]; } else if( key.equals( "-U" ) ) { userPassword = args[++i]; } else if( key.equals( "-canAssemble" ) ) { ap.setCanAssembleDocument(args[++i].equalsIgnoreCase( "true" )); } else if( key.equals( "-canExtractContent" ) ) { ap.setCanExtractContent( args[++i].equalsIgnoreCase( "true" ) ); } else if( key.equals( "-canExtractForAccessibility" ) ) { ap.setCanExtractForAccessibility( args[++i].equalsIgnoreCase( "true" ) ); } else if( key.equals( "-canFillInForm" ) ) { ap.setCanFillInForm( args[++i].equalsIgnoreCase( "true" ) ); } else if( key.equals( "-canModify" ) ) { ap.setCanModify( args[++i].equalsIgnoreCase( "true" ) ); } else if( key.equals( "-canModifyAnnotations" ) ) { ap.setCanModifyAnnotations( args[++i].equalsIgnoreCase( "true" ) ); } else if( key.equals( "-canPrint" ) ) { ap.setCanPrint( args[++i].equalsIgnoreCase( "true" ) ); } else if( key.equals( "-canPrintDegraded" ) ) { ap.setCanPrintDegraded( args[++i].equalsIgnoreCase( "true" ) ); } else if( key.equals( "-certFile" ) ) { certFile = args[++i]; } else if( key.equals( "-keyLength" ) ) { try { keyLength = Integer.parseInt( args[++i] ); } catch( NumberFormatException e ) { throw new NumberFormatException( "Error: -keyLength is not an integer '" + args[i] + "'" ); } } else if( infile == null ) { infile = key; } else if( outfile == null ) { outfile = key; } else { usage(); } } if( infile == null ) { usage(); } if( outfile == null ) { outfile = infile; } document = PDDocument.load( new File(infile) ); if( !document.isEncrypted() ) { if( certFile != null ) { PublicKeyProtectionPolicy ppp = new PublicKeyProtectionPolicy(); PublicKeyRecipient recip = new PublicKeyRecipient(); recip.setPermission(ap); CertificateFactory cf = CertificateFactory.getInstance("X.509"); InputStream inStream = new FileInputStream(certFile); X509Certificate certificate = (X509Certificate)cf.generateCertificate(inStream); inStream.close(); recip.setX509(certificate); ppp.addRecipient(recip); ppp.setEncryptionKeyLength(keyLength); document.protect(ppp); } else { StandardProtectionPolicy spp = new StandardProtectionPolicy(ownerPassword, userPassword, ap); spp.setEncryptionKeyLength(keyLength); document.protect(spp); } document.save( outfile ); } else { System.err.println( "Error: Document is already encrypted." ); } } finally { if( document != null ) { document.close(); } } } } /** * This will print a usage message. */ private static void usage() { System.err.println( "usage: java -jar pdfbox-app-x.y.z.jar Encrypt [options] <inputfile> [outputfile]" ); System.err.println( " -O <password> " + "Set the owner password(ignored if cert is set)" ); System.err.println( " -U <password> " + "Set the user password(ignored if cert is set)" ); System.err.println( " -certFile <path to cert> Path to X.509 certificate" ); System.err.println( " -canAssemble <true|false> Set the assemble permission" ); System.err.println( " -canExtractContent <true|false> Set the extraction permission" ); System.err.println( " -canExtractForAccessibility <true|false> Set the extraction permission" ); System.err.println( " -canFillInForm <true|false> Set the fill in form permission" ); System.err.println( " -canModify <true|false> Set the modify permission" ); System.err.println( " -canModifyAnnotations <true|false> Set the modify annots permission" ); System.err.println( " -canPrint <true|false> Set the print permission" ); System.err.println( " -canPrintDegraded <true|false> Set the print degraded permission" ); System.err.println( " -keyLength <length> The length of the key in bits(40)" ); System.err.println( "\nNote: By default all permissions are set to true!" ); System.exit( 1 ); } }
PDFBOX-2576: pass the original exception into the new exception git-svn-id: c3ad59981690829a43dc34c293c4e2cd04bcd994@1647666 13f79535-47bb-0310-9956-ffa450edef68
tools/src/main/java/org/apache/pdfbox/tools/Encrypt.java
PDFBOX-2576: pass the original exception into the new exception
Java
apache-2.0
bf5d7d0ac9dc8a025a044c8af45b3d3c6a613b3e
0
fivejjs/crawljax,fivejjs/crawljax,aminmf/crawljax,cschroed-usgs/crawljax,bigplus/crawljax,cschroed-usgs/crawljax,aminmf/crawljax,robertzas/crawljax,crawljax/crawljax,bigplus/crawljax,xujun10110/crawljax,cschroed-usgs/crawljax,xujun10110/crawljax,aminmf/crawljax,ntatsumi/crawljax,adini121/crawljax,crawljax/crawljax,bigplus/crawljax,robertzas/crawljax,adini121/crawljax,fivejjs/crawljax,xujun10110/crawljax,crawljax/crawljax,crawljax/crawljax,ntatsumi/crawljax,aminmf/crawljax,bigplus/crawljax,adini121/crawljax,saltlab/crawljax-graphdb,xujun10110/crawljax,ntatsumi/crawljax,robertzas/crawljax,robertzas/crawljax,adini121/crawljax,saltlab/crawljax-graphdb,cschroed-usgs/crawljax,fivejjs/crawljax,ntatsumi/crawljax
/** * Created Dec 19, 2007 */ package com.crawljax.core.state; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import org.junit.Test; import org.w3c.dom.Document; import org.w3c.dom.Element; import com.crawljax.util.Helper; /** * @author mesbah * @version $Id: EventableTest.java 6275 2009-12-23 14:13:05Z stefan $ */ public class EventableTest { @Test public void testHashCode() { String xpath = "/body/div[3]"; Identification id = new Identification("xpath", xpath); String eventType = "onclick"; Eventable c = new Eventable(id, eventType); Eventable temp = new Eventable(id, eventType); assertEquals(temp.hashCode(), c.hashCode()); temp = new Eventable(new Identification("id", "34"), eventType); assertNotSame(temp.hashCode(), c.hashCode()); temp = new Eventable(id, "onmouseover"); assertNotSame(temp.hashCode(), c.hashCode()); } @Test public void testToString() { Eventable c = new Eventable(new Identification("xpath", "/body/div[3]"), "onclick"); assertNotNull(c.toString()); } /** * Test method for {@link com.crawljax.core.state.Eventable#equals(java.lang.Object)}. */ @Test public void testEqualsObject() { Eventable c = new Eventable(new Identification("xpath", "/body/div[3]"), "onclick"); Eventable b = new Eventable(new Identification("xpath", "/body/div[3]"), "onclick"); Eventable d = new Eventable(new Identification("id", "23"), "onclick"); Eventable e = new Eventable(new Identification("id", "23"), "onmouseover"); assertTrue(c.equals(b)); assertFalse(c.equals(d)); assertFalse(d.equals(e)); } @Test public void testGetInfo() { Eventable c = new Eventable(new Identification("xpath", "/body/div[3]"), "onclick"); String info = " onclick xpath /body/div[3]"; assertEquals(info, c.toString()); } @Test public void testClickableElement() { String html = "<body><div id='firstdiv'></div><div><span id='thespan'>" + "<a id='thea'>test</a></span></div></body>"; try { Document dom = Helper.getDocument(html); assertNotNull(dom); Element element = dom.getElementById("firstdiv"); Eventable clickable = new Eventable(element, "onclick"); assertNotNull(clickable); /* * String infoexpected = "DIV: id=firstdiv, xpath /HTML[1]/BODY[1]/DIV[1] onclick"; */ String infoexpected = "ID: firstdivDIV: id=\"firstdiv\" onclick xpath " + "/HTML[1]/BODY[1]/DIV[1]"; System.out.println(clickable); assertEquals(infoexpected, clickable.toString()); } catch (Exception e) { fail(e.getMessage()); } } @Test public void testEdge() { StateVertix s1 = new StateVertix("stateSource", "dom1"); StateVertix s2 = new StateVertix("stateTarget", "dom2"); Eventable e = new Eventable(); e.setSourceStateVertix(s1); e.setTargetStateVertix(s2); assertEquals(s1, e.getSourceStateVertix()); assertEquals(s2, e.getTargetStateVertix()); } }
src/test/java/com/crawljax/core/state/EventableTest.java
/** * Created Dec 19, 2007 */ package com.crawljax.core.state; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import org.junit.Test; import org.w3c.dom.Document; import org.w3c.dom.Element; import com.crawljax.util.Helper; /** * @author mesbah * @version $Id: EventableTest.java 6275 2009-12-23 14:13:05Z stefan $ */ public class EventableTest { @Test public void testHashCode() { String xpath = "/body/div[3]"; Identification id = new Identification("xpath", xpath); String eventType = "onclick"; Eventable c = new Eventable(id, eventType); Eventable temp = new Eventable(id, eventType); assertEquals(temp.hashCode(), c.hashCode()); temp = new Eventable(new Identification("id", "34"), eventType); assertNotSame(temp.hashCode(), c.hashCode()); temp = new Eventable(id, "onmouseover"); assertNotSame(temp.hashCode(), c.hashCode()); } @Test public void testToString() { Eventable c = new Eventable(new Identification("xpath", "/body/div[3]"), "onclick"); assertNotNull(c.toString()); } /** * Test method for {@link com.crawljax.core.state.Eventable#equals(java.lang.Object)}. */ @Test public void testEqualsObject() { Eventable c = new Eventable(new Identification("xpath", "/body/div[3]"), "onclick"); Eventable b = new Eventable(new Identification("xpath", "/body/div[3]"), "onclick"); Eventable d = new Eventable(new Identification("id", "23"), "onclick"); Eventable e = new Eventable(new Identification("id", "23"), "onmouseover"); assertTrue(c.equals(b)); assertFalse(c.equals(d)); assertFalse(d.equals(e)); } @Test public void testGetInfo() { Eventable c = new Eventable(new Identification("xpath", "/body/div[3]"), "onclick"); String info = " onclick xpath /body/div[3]"; assertEquals(info, c.toString()); } @Test public void testClickableElement() { String html = "<body><div id='firstdiv'></div><div><span id='thespan'>" + "<a id='thea'>test</a></span></div></body>"; try { Document dom = Helper.getDocument(html); assertNotNull(dom); Element element = dom.getElementById("firstdiv"); Eventable clickable = new Eventable(element, "onclick"); assertNotNull(clickable); /* * String infoexpected = "DIV: id=firstdiv, xpath /HTML[1]/BODY[1]/DIV[1] onclick"; */ String infoexpected = "ID: firstdivDIV: id=\"firstdiv\" onclick xpath " + "/HTML[1]/BODY[1]/DIV[1]"; System.out.println(clickable); assertEquals(infoexpected, clickable.toString()); } catch (Exception e) { fail(e.getMessage()); } } @Test public void testEdge() { StateVertix s1 = new StateVertix("stateSource", "dom1"); StateVertix s2 = new StateVertix("stateTarget", "dom2"); Eventable e = new Eventable(); Edge edge = new Edge(s1, s2); e.setEdge(edge); assertNotNull("Edge not null", edge); assertEquals(s1, e.getEdge().getFromStateVertix()); assertEquals(s2, e.getEdge().getToStateVertix()); } }
deleted Edge. source and target states are included in Eventable class.
src/test/java/com/crawljax/core/state/EventableTest.java
deleted Edge. source and target states are included in Eventable class.
Java
apache-2.0
e9422ce69b86b35ee29b053787becc9f6016a5ea
0
jembi/openxds,jembi/openxds,jembi/openxds
/** * Copyright (c) 2009-2010 Misys Open Source Solutions (MOSS) and others * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. * * Contributors: * Misys Open Source Solutions - initial API and implementation * - */ package org.openhealthtools.openxds.dao; import java.util.ArrayList; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.openhealthtools.openxds.registry.PersonIdentifier; import org.openhealthtools.openxds.registry.api.RegistryPatientException; import org.springframework.orm.hibernate3.support.HibernateDaoSupport; /* * @author <a href="mailto:[email protected]">Raja</a> */ public class xdsRegistryPatientDaoImpl extends HibernateDaoSupport implements XdsRegistryPatientDao{ private static final Log log = LogFactory.getLog(xdsRegistryPatientDaoImpl.class); public PersonIdentifier getPersonById(PersonIdentifier patientId) throws RegistryPatientException{ return getPersonById(patientId, false); } public PersonIdentifier getPersonById(PersonIdentifier patientId, boolean merged) throws RegistryPatientException{ List list = new ArrayList(); PersonIdentifier personIdentifier = null; String personId = patientId.getPatientId(); String assigningAuthority = patientId.getAssigningAuthority(); String deletePatient = "N"; String mergedPatient = (merged) ? "Y" : "N"; try{ list = this.getHibernateTemplate().find( "from PersonIdentifier where patientid = '"+ personId +"' and assigningauthority ='" + assigningAuthority + "' and deleted ='" + deletePatient + "' and merged ='" + mergedPatient + "'"); }catch (Exception e) { log.error("Failed to retrieve person identifier from registry patient service",e); throw new RegistryPatientException(e); } if (list.size() > 0) personIdentifier = (PersonIdentifier) list.get(0); return personIdentifier; } public void savePersonIdentifier(PersonIdentifier identifier) throws RegistryPatientException { try { this.getHibernateTemplate().save(identifier); } catch (Exception e) { throw new RegistryPatientException(e); } } public void updatePersonIdentifier(PersonIdentifier identifier) throws RegistryPatientException { try { this.getHibernateTemplate().update(identifier); } catch (Exception e) { throw new RegistryPatientException(e); } } }
openxds/openxds-registry-patient-lightweight/src/main/java/org/openhealthtools/openxds/dao/xdsRegistryPatientDaoImpl.java
/** * Copyright (c) 2009-2010 Misys Open Source Solutions (MOSS) and others * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. * * Contributors: * Misys Open Source Solutions - initial API and implementation * - */ package org.openhealthtools.openxds.dao; import java.util.ArrayList; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.openhealthtools.openxds.registry.PersonIdentifier; import org.openhealthtools.openxds.registry.api.RegistryPatientException; import org.springframework.orm.hibernate3.support.HibernateDaoSupport; /* * @author <a href="mailto:[email protected]">Raja</a> */ public class xdsRegistryPatientDaoImpl extends HibernateDaoSupport implements XdsRegistryPatientDao{ private static final Log log = LogFactory.getLog(xdsRegistryPatientDaoImpl.class); @Override public PersonIdentifier getPersonById(PersonIdentifier patientId) throws RegistryPatientException{ return getPersonById(patientId, false); } @Override public PersonIdentifier getPersonById(PersonIdentifier patientId, boolean merged) throws RegistryPatientException{ List list = new ArrayList(); PersonIdentifier personIdentifier = null; String personId = patientId.getPatientId(); String assigningAuthority = patientId.getAssigningAuthority(); String deletePatient = "N"; String mergedPatient = (merged) ? "Y" : "N"; try{ list = this.getHibernateTemplate().find( "from PersonIdentifier where patientid = '"+ personId +"' and assigningauthority ='" + assigningAuthority + "' and deleted ='" + deletePatient + "' and merged ='" + mergedPatient + "'"); }catch (Exception e) { log.error("Failed to retrieve person identifier from registry patient service",e); throw new RegistryPatientException(e); } if (list.size() > 0) personIdentifier = (PersonIdentifier) list.get(0); return personIdentifier; } @Override public void savePersonIdentifier(PersonIdentifier identifier) throws RegistryPatientException { try { this.getHibernateTemplate().save(identifier); } catch (Exception e) { throw new RegistryPatientException(e); } } @Override public void updatePersonIdentifier(PersonIdentifier identifier) throws RegistryPatientException { try { this.getHibernateTemplate().update(identifier); } catch (Exception e) { throw new RegistryPatientException(e); } } }
removed @Override
openxds/openxds-registry-patient-lightweight/src/main/java/org/openhealthtools/openxds/dao/xdsRegistryPatientDaoImpl.java
removed @Override
Java
apache-2.0
02054bb8ce8691158f1e1ce0bfb5ca87570ea7c0
0
ucam-cl-dtg/urop-2014-git
/* vim: set et ts=4 sts=4 sw=4 tw=72 : */ /* See the LICENSE file for the license of the project */ package uk.ac.cam.cl.git; import java.util.Arrays; import java.util.List; import java.util.LinkedList; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.InputStreamReader; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.inject.Guice; import com.google.inject.Inject; import com.google.inject.Injector; import uk.ac.cam.cl.git.api.DuplicateRepoNameException; import uk.ac.cam.cl.git.api.RepositoryNotFoundException; import uk.ac.cam.cl.git.configuration.ConfigurationLoader; import com.jcraft.jsch.*; /** * @author Isaac Dunn &lt;[email protected]&gt; * @author Kovacsics Robert &lt;[email protected]&gt; * @version 0.1 */ public class ConfigDatabase { /* For logging */ private static final Logger log = LoggerFactory.getLogger(ConfigDatabase.class); private static final String[] environmentVariables = new String[] {"HOME=" + ConfigurationLoader.getConfig().getGitoliteHome() , "PATH=" + ConfigurationLoader.getConfig().getGitolitePath() , "GL_LIBDIR=" + ConfigurationLoader.getConfig().getGitoliteLibdir()}; @Inject private RepositoryCollection reposCollection; private static final Injector injector = Guice.createInjector(new DatabaseModule()); private static final ConfigDatabase instance = injector.getInstance(ConfigDatabase.class); public static ConfigDatabase instance() { return instance; } /** * For unit testing only, to allow a mock collection to be used. * Replaces the repository collection with the argument. * @param reposCollection The collection to be used. */ //@Inject void setReposCollection(RepositoryCollection rCollection) { reposCollection = rCollection; } /** * Returns a list of all the repository objects in the collection * * @return List of repository objects in the collection */ public List<Repository> getRepos() { /* TODO: Test ordered-ness or repositories. */ return reposCollection.listRepos(); } /** * Returns the repository object with the given name in the * database. * * @param name The name of the repository * @return The requested repository object * @throws RepositoryNotFoundException */ public Repository getRepoByName(String name) throws RepositoryNotFoundException { return reposCollection.getRepo(name); } /** * Removes the repository object with the given name from the * database if present in the database. * * @param name The name of the repository to remove * @throws IOException * @throws RepositoryNotFoundException */ public void delRepoByName(String repoName) throws IOException, RepositoryNotFoundException { log.info("Deleting repository \"" + repoName + "\""); if (!reposCollection.contains(repoName)) throw new RepositoryNotFoundException(); reposCollection.removeRepo(repoName); generateConfigFile(); log.info("Deleted repository \"" + repoName + "\""); } /** * Removes all repositories from the collection. * For unit testing only. */ void deleteAll() throws IOException { reposCollection.removeAll(); generateConfigFile(); } /** * Generates config file for gitolite and writes it to gitoliteGeneratedConfigFile (see ConfigurationLoader). * <p> * Accesses the database to find repositories and assumes the * Repository.toString() method returns the appropriate representation. The * main conf file should have an include statement so that * when the hook is called, the updates are made. The hook is * called at the end of this method. * * @throws IOException Typically an unrecoverable problem. */ public void generateConfigFile() throws IOException { log.info("Generating config file \"" + ConfigurationLoader.getConfig() .getGitoliteGeneratedConfigFile() + "\""); StringBuilder output = new StringBuilder(); for (Repository r : getRepos()) output.append(r.toString() + "\n"); /* Write out file */ File configFile = new File(ConfigurationLoader.getConfig() .getGitoliteGeneratedConfigFile()); BufferedWriter buffWriter = new BufferedWriter(new FileWriter(configFile, false)); buffWriter.write(output.toString()); buffWriter.flush(); buffWriter.close(); runGitoliteUpdate(new String[] {"compile", /* Workaround first compile not updating file, only * doing git init --bare */ "compile", "trigger POST_COMPILE"}); log.info("Generated config file \"" + ConfigurationLoader.getConfig() .getGitoliteGeneratedConfigFile() + "\""); } /** * Adds a new repository to the mongo database for inclusion in the * conf file when generated. * * @param repo The repository to be added * @throws DuplicateRepoNameException A repository with this name already * exists. */ public void addRepo(Repository repo) throws DuplicateRepoNameException, IOException { reposCollection.insertRepo(repo); generateConfigFile(); } /** * Takes public key and username as strings, writes the key to * getGitoliteSSHKeyLocation (see ConfigurationLoader), and calls the hook. * * @param key The SSH key to be added * @param username The name of the user to be added * @throws IOException */ public void addSSHKey(String key, String userName) throws IOException { log.info("Adding key for \"" + userName + "\" to \"" + ConfigurationLoader.getConfig() .getGitoliteSSHKeyLocation() + "\""); File keyFile = new File(ConfigurationLoader.getConfig() .getGitoliteSSHKeyLocation() + "/" + userName + ".pub"); if (!keyFile.exists()) { if (keyFile.getParentFile() != null) keyFile.getParentFile().mkdirs(); /* Make parent directories if necessary */ keyFile.createNewFile(); } BufferedWriter buffWriter = new BufferedWriter(new FileWriter(keyFile)); buffWriter.write(key); buffWriter.close(); runGitoliteUpdate(new String[] {"trigger SSH_AUTHKEYS"}); log.info("Finished adding key for \"" + userName + "\""); } /** * Updates the given repository. * * This selects the repository uniquely using the ID (not * technically the name of the repository, but is equivalent). * * @param repo The updated repository (there must also be a * repository by this name). * @throws RepositoryNotFoundException * @throws MongoException If the update operation fails (for some * unknown reason). */ public void updateRepo(Repository repo) throws IOException, RepositoryNotFoundException { reposCollection.updateRepo(repo); generateConfigFile(); } /** * Runs the gitolite update programs. * <p> * This is because gitolite is a perl program and compiles the * configuration file into a perl module, which it uses. * This just forces recompilation. * * @param updates List of things to recompile/reconfigure. */ void runGitoliteUpdate(String[] updates) throws IOException { log.info("Starting gitolite recompilation"); for (String command : updates) { try { JSch ssh = new JSch(); ssh.setKnownHosts("~/.ssh/known_hosts"); ssh.addIdentity("~/.ssh/id_rsa"); Session session = ssh.getSession(ConfigurationLoader .getConfig().getRepoUser() , "localhost" , 22); session.connect(); ChannelExec channel = (ChannelExec)session.openChannel("exec"); channel.setCommand(command); channel.setInputStream(null); /* Gitolite does native logging */ channel.setOutputStream(null); channel.setErrStream(null); channel.connect(); while (channel.isClosed()) { try { Thread.sleep(100); } catch (InterruptedException e) { /* If we woke up earlier from sleep than * expected, continue to check for channel * status. */ } } channel.disconnect(); session.disconnect(); } catch (JSchException e) { throw new IOException(e); } } log.info("Finished gitolite recompilation"); } /** * This rebuilds the MongoDB database using the gitolite * configuration file, in case the two become out of sync. */ private void rebuildDatabaseFromGitolite() throws IOException, DuplicateRepoNameException { reposCollection.removeAll(); // Empty database collection BufferedReader reader = new BufferedReader(new FileReader(new File( ConfigurationLoader.getConfig().getGitoliteGeneratedConfigFile()))); String firstLine; while ((firstLine = reader.readLine()) != null) { // While not end of file String repoName = firstLine.split("\\s\\+")[1]; // Repo name is second word of first line String[] readWriteLine = reader.readLine().split("=")[1].trim().split("\\s\\+"); // We want the words to the right of the "RW =" String nextLine = reader.readLine(); String[] readOnlyLine; String[] auxiliaryLine; if (nextLine.startsWith("#")) { // No users with read only access readOnlyLine = new String[0]; auxiliaryLine = nextLine.split(" "); } else { // At least one user with read only access readOnlyLine = nextLine.split("=")[1].trim().split("\\s\\+"); auxiliaryLine = reader.readLine().split("\\s\\+"); } String owner = readWriteLine[0]; // Owner is always first RW entry - see Repository.toString() List<String> readWrites = new LinkedList<String>(Arrays.asList(readWriteLine)); readWrites.remove(0); // remove owner from RW list as owner is automatically added List<String> readOnlys = Arrays.asList(readOnlyLine); String parent = auxiliaryLine[1]; // see Repository.toString() String parent_hidden = auxiliaryLine[2]; Repository toInsert = new Repository(repoName, owner, readWrites, readOnlys, parent, parent_hidden, null); reposCollection.insertRepo(toInsert); reader.readLine(); // extra line between repos } reader.close(); } }
server/src/main/java/uk/ac/cam/cl/git/ConfigDatabase.java
/* vim: set et ts=4 sts=4 sw=4 tw=72 : */ /* See the LICENSE file for the license of the project */ package uk.ac.cam.cl.git; import java.util.Arrays; import java.util.List; import java.util.LinkedList; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.InputStreamReader; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.inject.Guice; import com.google.inject.Inject; import com.google.inject.Injector; import uk.ac.cam.cl.git.api.DuplicateRepoNameException; import uk.ac.cam.cl.git.api.RepositoryNotFoundException; import uk.ac.cam.cl.git.configuration.ConfigurationLoader; import com.jcraft.jsch.*; /** * @author Isaac Dunn &lt;[email protected]&gt; * @author Kovacsics Robert &lt;[email protected]&gt; * @version 0.1 */ public class ConfigDatabase { /* For logging */ private static final Logger log = LoggerFactory.getLogger(ConfigDatabase.class); private static final String[] environmentVariables = new String[] {"HOME=" + ConfigurationLoader.getConfig().getGitoliteHome() , "PATH=" + ConfigurationLoader.getConfig().getGitolitePath() , "GL_LIBDIR=" + ConfigurationLoader.getConfig().getGitoliteLibdir()}; @Inject private RepositoryCollection reposCollection; private static final Injector injector = Guice.createInjector(new DatabaseModule()); private static final ConfigDatabase instance = injector.getInstance(ConfigDatabase.class); public static ConfigDatabase instance() { return instance; } /** * For unit testing only, to allow a mock collection to be used. * Replaces the repository collection with the argument. * @param reposCollection The collection to be used. */ //@Inject void setReposCollection(RepositoryCollection rCollection) { reposCollection = rCollection; } /** * Returns a list of all the repository objects in the collection * * @return List of repository objects in the collection */ public List<Repository> getRepos() { /* TODO: Test ordered-ness or repositories. */ return reposCollection.listRepos(); } /** * Returns the repository object with the given name in the * database. * * @param name The name of the repository * @return The requested repository object * @throws RepositoryNotFoundException */ public Repository getRepoByName(String name) throws RepositoryNotFoundException { return reposCollection.getRepo(name); } /** * Removes the repository object with the given name from the * database if present in the database. * * @param name The name of the repository to remove * @throws IOException * @throws RepositoryNotFoundException */ public void delRepoByName(String repoName) throws IOException, RepositoryNotFoundException { log.info("Deleting repository \"" + repoName + "\""); if (!reposCollection.contains(repoName)) throw new RepositoryNotFoundException(); reposCollection.removeRepo(repoName); generateConfigFile(); log.info("Deleted repository \"" + repoName + "\""); } /** * Removes all repositories from the collection. * For unit testing only. */ void deleteAll() throws IOException { reposCollection.removeAll(); generateConfigFile(); } /** * Generates config file for gitolite and writes it to gitoliteGeneratedConfigFile (see ConfigurationLoader). * <p> * Accesses the database to find repositories and assumes the * Repository.toString() method returns the appropriate representation. The * main conf file should have an include statement so that * when the hook is called, the updates are made. The hook is * called at the end of this method. * * @throws IOException Typically an unrecoverable problem. */ public void generateConfigFile() throws IOException { log.info("Generating config file \"" + ConfigurationLoader.getConfig() .getGitoliteGeneratedConfigFile() + "\""); StringBuilder output = new StringBuilder(); for (Repository r : getRepos()) output.append(r.toString() + "\n"); /* Write out file */ File configFile = new File(ConfigurationLoader.getConfig() .getGitoliteGeneratedConfigFile()); BufferedWriter buffWriter = new BufferedWriter(new FileWriter(configFile, false)); buffWriter.write(output.toString()); buffWriter.flush(); buffWriter.close(); runGitoliteUpdate(new String[] {"compile", "trigger POST_COMPILE"}); log.info("Generated config file \"" + ConfigurationLoader.getConfig() .getGitoliteGeneratedConfigFile() + "\""); } /** * Adds a new repository to the mongo database for inclusion in the * conf file when generated. * * @param repo The repository to be added * @throws DuplicateRepoNameException A repository with this name already * exists. */ public void addRepo(Repository repo) throws DuplicateRepoNameException, IOException { reposCollection.insertRepo(repo); generateConfigFile(); } /** * Takes public key and username as strings, writes the key to * getGitoliteSSHKeyLocation (see ConfigurationLoader), and calls the hook. * * @param key The SSH key to be added * @param username The name of the user to be added * @throws IOException */ public void addSSHKey(String key, String userName) throws IOException { log.info("Adding key for \"" + userName + "\" to \"" + ConfigurationLoader.getConfig() .getGitoliteSSHKeyLocation() + "\""); File keyFile = new File(ConfigurationLoader.getConfig() .getGitoliteSSHKeyLocation() + "/" + userName + ".pub"); if (!keyFile.exists()) { if (keyFile.getParentFile() != null) keyFile.getParentFile().mkdirs(); /* Make parent directories if necessary */ keyFile.createNewFile(); } BufferedWriter buffWriter = new BufferedWriter(new FileWriter(keyFile)); buffWriter.write(key); buffWriter.close(); runGitoliteUpdate(new String[] {"trigger SSH_AUTHKEYS"}); log.info("Finished adding key for \"" + userName + "\""); } /** * Updates the given repository. * * This selects the repository uniquely using the ID (not * technically the name of the repository, but is equivalent). * * @param repo The updated repository (there must also be a * repository by this name). * @throws RepositoryNotFoundException * @throws MongoException If the update operation fails (for some * unknown reason). */ public void updateRepo(Repository repo) throws IOException, RepositoryNotFoundException { reposCollection.updateRepo(repo); generateConfigFile(); } /** * Runs the gitolite update programs. * <p> * This is because gitolite is a perl program and compiles the * configuration file into a perl module, which it uses. * This just forces recompilation. * * @param updates List of things to recompile/reconfigure. */ void runGitoliteUpdate(String[] updates) throws IOException { log.info("Starting gitolite recompilation"); for (String command : updates) { try { JSch ssh = new JSch(); ssh.setKnownHosts("~/.ssh/known_hosts"); ssh.addIdentity("~/.ssh/id_rsa"); Session session = ssh.getSession(ConfigurationLoader .getConfig().getRepoUser() , "localhost" , 22); session.connect(); ChannelExec channel = (ChannelExec)session.openChannel("exec"); channel.setCommand(command); channel.setInputStream(null); /* Gitolite does native logging */ channel.setOutputStream(null); channel.setErrStream(null); channel.connect(); while (channel.isClosed()) { try { Thread.sleep(100); } catch (InterruptedException e) { /* If we woke up earlier from sleep than * expected, continue to check for channel * status. */ } } channel.disconnect(); session.disconnect(); } catch (JSchException e) { throw new IOException(e); } } log.info("Finished gitolite recompilation"); } /** * This rebuilds the MongoDB database using the gitolite * configuration file, in case the two become out of sync. */ private void rebuildDatabaseFromGitolite() throws IOException, DuplicateRepoNameException { reposCollection.removeAll(); // Empty database collection BufferedReader reader = new BufferedReader(new FileReader(new File( ConfigurationLoader.getConfig().getGitoliteGeneratedConfigFile()))); String firstLine; while ((firstLine = reader.readLine()) != null) { // While not end of file String repoName = firstLine.split("\\s\\+")[1]; // Repo name is second word of first line String[] readWriteLine = reader.readLine().split("=")[1].trim().split("\\s\\+"); // We want the words to the right of the "RW =" String nextLine = reader.readLine(); String[] readOnlyLine; String[] auxiliaryLine; if (nextLine.startsWith("#")) { // No users with read only access readOnlyLine = new String[0]; auxiliaryLine = nextLine.split(" "); } else { // At least one user with read only access readOnlyLine = nextLine.split("=")[1].trim().split("\\s\\+"); auxiliaryLine = reader.readLine().split("\\s\\+"); } String owner = readWriteLine[0]; // Owner is always first RW entry - see Repository.toString() List<String> readWrites = new LinkedList<String>(Arrays.asList(readWriteLine)); readWrites.remove(0); // remove owner from RW list as owner is automatically added List<String> readOnlys = Arrays.asList(readOnlyLine); String parent = auxiliaryLine[1]; // see Repository.toString() String parent_hidden = auxiliaryLine[2]; Repository toInsert = new Repository(repoName, owner, readWrites, readOnlys, parent, parent_hidden, null); reposCollection.insertRepo(toInsert); reader.readLine(); // extra line between repos } reader.close(); } }
Added workaround to compile only doing git init
server/src/main/java/uk/ac/cam/cl/git/ConfigDatabase.java
Added workaround to compile only doing git init
Java
apache-2.0
ba4e7f555f19ffd410ff12859f53dba2e7c48915
0
sarnowski/eve-interfaces
/** * Copyright 2010 Tobias Sarnowski * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.eveonline.api.map; import com.eveonline.api.ApiService; import com.eveonline.api.exceptions.ApiException; /** * @author Tobias Sarnowski */ public interface FacWarSystemsApi extends ApiService { public static final String XMLPATH = "/map/FacWarSystems.xml.aspx"; /** * @return list of solar systems used for the faction warfare * @throws ApiException */ FacWarSystems getFactionWarfareSystems() throws ApiException; }
src/main/java/com/eveonline/api/map/FacWarSystemsApi.java
/** * Copyright 2010 Tobias Sarnowski * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.eveonline.api.map; import com.eveonline.api.ApiService; import com.eveonline.api.exceptions.ApiException; /** * @author Tobias Sarnowski */ public interface FacWarSystemsApi extends ApiService { /** * @return list of solar systems used for the faction warfare * @throws ApiException */ FacWarSystems getFactionWarfareSystems() throws ApiException; }
added XMLPATH to FacWarSystemsApi
src/main/java/com/eveonline/api/map/FacWarSystemsApi.java
added XMLPATH to FacWarSystemsApi
Java
apache-2.0
91823b8d2816d1932945a507ce8708c08c2cf66c
0
RaviKumar7443/JPETSTORE,RaviKumar7443/JPETSTORE,RaviKumar7443/JPETSTORE
package org.mybatis.jpetstore.domain; import java.io.Serializable; import java.math.BigDecimal; public class Calculate implements Serializable { public void hello() { System.out.println("JPET Store Application"); System.out.println("Class name: Calculate.java"); System.out.println("Hello World"); System.out.println("Making a new Entry at Mon Dec 12 11:00:16 UTC 2016"); System.out.println("Mon Dec 12 11:00:16 UTC 2016"); System.out.println("Making a new Entry at Sat Dec 10 11:00:16 UTC 2016"); System.out.println("Sat Dec 10 11:00:16 UTC 2016"); System.out.println("Making a new Entry at Thu Dec 8 11:00:16 UTC 2016"); System.out.println("Thu Dec 8 11:00:16 UTC 2016"); System.out.println("Making a new Entry at Tue Dec 6 11:00:16 UTC 2016"); System.out.println("Tue Dec 6 11:00:16 UTC 2016"); System.out.println("Making a new Entry at Fri Dec 2 12:52:58 UTC 2016"); System.out.println("Fri Dec 2 12:52:58 UTC 2016"); } } //---------------------------------------------------- //Comment added on date:Fri Dec 2 09:45:31 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //Comment added on date:Fri Dec 2 09:55:14 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Dec 2 11:34:52 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Dec 2 11:35:25 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Dec 2 12:32:47 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Dec 5 05:39:41 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Dec 5 05:41:08 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Dec 5 05:41:14 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Dec 5 06:05:33 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Dec 5 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Wed Dec 7 05:08:34 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Wed Dec 7 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Dec 9 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sun Dec 11 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //----------------------------------------------------
src/main/java/org/mybatis/jpetstore/domain/Calculate.java
package org.mybatis.jpetstore.domain; import java.io.Serializable; import java.math.BigDecimal; public class Calculate implements Serializable { public void hello() { System.out.println("JPET Store Application"); System.out.println("Class name: Calculate.java"); System.out.println("Hello World"); System.out.println("Making a new Entry at Sat Dec 10 11:00:16 UTC 2016"); System.out.println("Sat Dec 10 11:00:16 UTC 2016"); System.out.println("Making a new Entry at Thu Dec 8 11:00:16 UTC 2016"); System.out.println("Thu Dec 8 11:00:16 UTC 2016"); System.out.println("Making a new Entry at Tue Dec 6 11:00:16 UTC 2016"); System.out.println("Tue Dec 6 11:00:16 UTC 2016"); System.out.println("Making a new Entry at Fri Dec 2 12:52:58 UTC 2016"); System.out.println("Fri Dec 2 12:52:58 UTC 2016"); } } //---------------------------------------------------- //Comment added on date:Fri Dec 2 09:45:31 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //Comment added on date:Fri Dec 2 09:55:14 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Dec 2 11:34:52 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Dec 2 11:35:25 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Dec 2 12:32:47 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Dec 5 05:39:41 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Dec 5 05:41:08 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Dec 5 05:41:14 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Dec 5 06:05:33 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Dec 5 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Wed Dec 7 05:08:34 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Wed Dec 7 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Dec 9 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sun Dec 11 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //----------------------------------------------------
Mon Dec 12 11:00:16 UTC 2016
src/main/java/org/mybatis/jpetstore/domain/Calculate.java
Mon Dec 12 11:00:16 UTC 2016
Java
apache-2.0
1e8a76f04884108e9d070130a7b20714cbc9fc7d
0
KernelHaven/KernelHaven,KernelHaven/KernelHaven
/* * Copyright 2017-2019 University of Hildesheim, Software Systems Engineering * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.ssehub.kernel_haven.analysis; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import net.ssehub.kernel_haven.SetUpException; import net.ssehub.kernel_haven.build_model.BuildModel; import net.ssehub.kernel_haven.code_model.SourceFile; import net.ssehub.kernel_haven.config.Configuration; import net.ssehub.kernel_haven.config.DefaultSettings; import net.ssehub.kernel_haven.provider.AbstractProvider; import net.ssehub.kernel_haven.util.ExtractorException; import net.ssehub.kernel_haven.util.Timestamp; import net.ssehub.kernel_haven.util.io.ITableCollection; import net.ssehub.kernel_haven.util.io.ITableWriter; import net.ssehub.kernel_haven.util.io.TableCollectionWriterFactory; import net.ssehub.kernel_haven.util.io.csv.CsvFileCollection; import net.ssehub.kernel_haven.util.null_checks.NonNull; import net.ssehub.kernel_haven.variability_model.VariabilityModel; /** * An analysis that is a pipeline consisting of {@link AnalysisComponent}s. * * @author Adam */ public abstract class PipelineAnalysis extends AbstractAnalysis { private static PipelineAnalysis instance; private ITableCollection resultCollection; private ExtractorDataDuplicator<VariabilityModel> vmStarter; private ExtractorDataDuplicator<BuildModel> bmStarter; private ExtractorDataDuplicator<SourceFile<?>> cmStarter; /** * Creates a new {@link PipelineAnalysis}. * * @param config The global configuration. */ public PipelineAnalysis(@NonNull Configuration config) { super(config); } /** * The {@link PipelineAnalysis} that is the current main analysis in this execution. May be null if no * {@link PipelineAnalysis} is the main analysis component. * * @return The current {@link PipelineAnalysis} instance. */ static PipelineAnalysis getInstance() { return instance; } /** * Returns the {@link AnalysisComponent} that provides the variability model from the extractors. * * @return The {@link AnalysisComponent} that provides the variability model. */ protected @NonNull AnalysisComponent<VariabilityModel> getVmComponent() { return vmStarter.createNewStartingComponent(config); } /** * Returns the {@link AnalysisComponent} that provides the build model from the extractors. * * @return The {@link AnalysisComponent} that provides the build model. */ protected @NonNull AnalysisComponent<BuildModel> getBmComponent() { return bmStarter.createNewStartingComponent(config); } /** * Returns the {@link AnalysisComponent} that provides the code model from the extractors. * * @return The {@link AnalysisComponent} that provides the code model. */ protected @NonNull AnalysisComponent<SourceFile<?>> getCmComponent() { return cmStarter.createNewStartingComponent(config); } /** * The collection that {@link AnalysisComponent}s should write their intermediate output to. * * @return The {@link ITableCollection} to write output to. */ ITableCollection getResultCollection() { return resultCollection; } /** * Creates the result collection from the user settings. * * @return The result collection to store files in. * * @throws SetUpException If creating the result collection fails. */ private ITableCollection createResultCollection() throws SetUpException { String outputSuffix = config.getValue(DefaultSettings.ANALYSIS_RESULT); File outputFile = new File(getOutputDir(), Timestamp.INSTANCE.getFilename( config.getValue(DefaultSettings.ANALYSIS_RESULT_NAME), outputSuffix)); try { return TableCollectionWriterFactory.INSTANCE.createCollection(outputFile); } catch (IOException e) { throw new SetUpException("Can't create output for suffix " + outputSuffix, e); } } /** * Creates the pipeline. * * @return The "main" (i.e. the last) component of the pipeline. * * @throws SetUpException If setting up the pipeline fails. */ protected abstract @NonNull AnalysisComponent<?> createPipeline() throws SetUpException; @Override public void run() { Thread.currentThread().setName("AnalysisPipelineController"); try { vmStarter = new ExtractorDataDuplicator<>(vmProvider, false, "VM"); bmStarter = new ExtractorDataDuplicator<>(bmProvider, false, "BM"); cmStarter = new ExtractorDataDuplicator<>(cmProvider, true, "CM"); try { resultCollection = createResultCollection(); } catch (SetUpException e) { LOGGER.logException("Couldn't create output collection based on user configuration; " + "falling back to CSV", e); resultCollection = new CsvFileCollection(new File(getOutputDir(), "Analysis_" + Timestamp.INSTANCE.getFileTimestamp())); } instance = this; AnalysisComponent<?> mainComponent = createPipeline(); if (config.getValue(DefaultSettings.ANALYSIS_PIPELINE_START_EXTRACTORS)) { // start all extractors; this is needed here because the analysis components will most likely poll them // in order, which means that the extractors would not run in parallel vmStarter.start(); bmStarter.start(); cmStarter.start(); } if (mainComponent instanceof JoinComponent) { joinSplitComponentFull((JoinComponent) mainComponent); } else { pollAndWriteOutput(mainComponent); } LOGGER.logDebug("Analysis components done"); try { LOGGER.logDebug("Closing result collection"); resultCollection.close(); for (File file : resultCollection.getFiles()) { addOutputFile(file); } } catch (IOException e) { LOGGER.logException("Exception while closing output file", e); } } catch (SetUpException e) { LOGGER.logException("Exception while setting up", e); } } /** * Part of {@link #run()} to handle {@link JoinComponent}s. This method joins all components in parallel. * * @param mainComponent The analysis, which is joining results of multiple other components. */ private void joinSplitComponentFull(@NonNull JoinComponent mainComponent) { List<Thread> threads = new ArrayList<>(mainComponent.getInputs().length); for (AnalysisComponent<?> component : mainComponent.getInputs()) { Thread th = new Thread(() -> { pollAndWriteOutput(component); }, "AnalysisPipelineControllerOutputThread"); threads.add(th); th.setDaemon(true); th.start(); } for (Thread th : threads) { try { th.join(); } catch (InterruptedException e) { } } } /** * Polls all output from the given component and writes it to the output file. * * @param component The component to read the output from. */ private void pollAndWriteOutput(@NonNull AnalysisComponent<?> component) { LOGGER.logDebug2("Starting and polling output of analysis component (", component.getClass().getSimpleName(), ")..."); try (ITableWriter writer = resultCollection.getWriter(component.getResultName())) { Object result; while ((result = component.getNextResult()) != null) { LOGGER.logDebug2("Got analysis result: ", result.toString()); writer.writeObject(result); } } catch (IOException e) { LOGGER.logException("Exception while writing output file", e); } } /** * A class for duplicating the extractor data. This way, multiple analysis components can have the same models * as their input data. * * @param <T> The type of model to duplicate. */ private static class ExtractorDataDuplicator<T> implements Runnable { private @NonNull AbstractProvider<T> provider; private boolean multiple; private @NonNull List<@NonNull StartingComponent<T>> startingComponents; private boolean started; private @NonNull String type; /** * Creates a new ExtractorDataDuplicator. * * @param provider The provider to get the data from. * @param multiple Whether the provider should be polled multiple times or just once. * @param type The type of duplicator component ("CM", "BM" or "VM"). */ public ExtractorDataDuplicator(@NonNull AbstractProvider<T> provider, boolean multiple, @NonNull String type) { this.provider = provider; this.multiple = multiple; this.type = type; startingComponents = new LinkedList<>(); } /** * Creates a new starting component that will get its own copy of the data from us. * * @param config The configuration to create the component with. * * @return The starting component that can be used as input data for other analysis components. */ public @NonNull StartingComponent<T> createNewStartingComponent(@NonNull Configuration config) { StartingComponent<T> component = new StartingComponent<>(config, this, type); startingComponents.add(component); return component; } /** * Adds the given data element to all starting components. * * @param data The data to add. */ private void addToAllComponents(@NonNull T data) { for (StartingComponent<T> component : startingComponents) { component.addResult(data); } } /** * Starts a new thread that copies the extractor data to all stating components created up until now. * This method ensures that this thread is only started once, no matter how often this method is called. */ public void start() { synchronized (this) { if (!started) { new Thread(this, "ExtractorDataDuplicator").start(); started = true; } } } @Override public void run() { if (multiple) { int numData = 0; int numExceptions = 0; T data; while ((data = provider.getNextResult()) != null) { addToAllComponents(data); numData++; } ExtractorException exc; while ((exc = provider.getNextException()) != null) { LOGGER.logExceptionDebug("Got " + type + "-Extractor exception", exc); numExceptions++; } if (numData == 0) { // log an error when no elements could be produced LOGGER.logError(type + "-Extractor: Got " + numData + " elements and " + numExceptions + " exceptions"); } else if (numExceptions > 0) { // log a warning when some elements got through, but others failed LOGGER.logWarning(type + "-Extractor: Got " + numData + " elements and " + numExceptions + " exceptions"); } } else { T data = provider.getResult(); if (data != null) { addToAllComponents(data); } ExtractorException exc = provider.getException(); if (exc != null) { LOGGER.logException("Got " + type + "-Extractor exception", exc); } } for (StartingComponent<T> component : startingComponents) { synchronized (component) { component.done = true; component.notifyAll(); } } } } /** * A starting component for the analysis pipeline. This is used to pass the extractor data to the analysis * components. This class does nothing; it is only used by {@link ExtractorDataDuplicator}. * * @param <T> The type of result data that this produces. */ private static class StartingComponent<T> extends AnalysisComponent<T> { private boolean done = false; private @NonNull ExtractorDataDuplicator<T> duplicator; private @NonNull String name; /** * Creates a new starting component. * * @param config The global configuration. * @param duplicator The {@link ExtractorDataDuplicator} to start when this component is started * (start on demand). * @param type The type of starting component ("CM", "BM" or "VM"). */ public StartingComponent(@NonNull Configuration config, @NonNull ExtractorDataDuplicator<T> duplicator, @NonNull String type) { super(config); this.duplicator = duplicator; this.name = type + " StartingComponent"; } @Override protected void execute() { duplicator.start(); // wait until the duplicator tells us that we are done synchronized (this) { while (!done) { try { wait(); } catch (InterruptedException e) { } } } } @Override public String getResultName() { return name; } @Override boolean isInternalHelperComponent() { return true; } } }
src/net/ssehub/kernel_haven/analysis/PipelineAnalysis.java
/* * Copyright 2017-2019 University of Hildesheim, Software Systems Engineering * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.ssehub.kernel_haven.analysis; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import net.ssehub.kernel_haven.SetUpException; import net.ssehub.kernel_haven.build_model.BuildModel; import net.ssehub.kernel_haven.code_model.SourceFile; import net.ssehub.kernel_haven.config.Configuration; import net.ssehub.kernel_haven.config.DefaultSettings; import net.ssehub.kernel_haven.provider.AbstractProvider; import net.ssehub.kernel_haven.util.ExtractorException; import net.ssehub.kernel_haven.util.Timestamp; import net.ssehub.kernel_haven.util.io.ITableCollection; import net.ssehub.kernel_haven.util.io.ITableWriter; import net.ssehub.kernel_haven.util.io.TableCollectionWriterFactory; import net.ssehub.kernel_haven.util.io.csv.CsvFileCollection; import net.ssehub.kernel_haven.util.null_checks.NonNull; import net.ssehub.kernel_haven.variability_model.VariabilityModel; /** * An analysis that is a pipeline consisting of {@link AnalysisComponent}s. * * @author Adam */ public abstract class PipelineAnalysis extends AbstractAnalysis { private static PipelineAnalysis instance; private ITableCollection resultCollection; private ExtractorDataDuplicator<VariabilityModel> vmStarter; private ExtractorDataDuplicator<BuildModel> bmStarter; private ExtractorDataDuplicator<SourceFile<?>> cmStarter; /** * Creates a new {@link PipelineAnalysis}. * * @param config The global configuration. */ public PipelineAnalysis(@NonNull Configuration config) { super(config); } /** * The {@link PipelineAnalysis} that is the current main analysis in this execution. May be null if no * {@link PipelineAnalysis} is the main analysis component. * * @return The current {@link PipelineAnalysis} instance. */ static PipelineAnalysis getInstance() { return instance; } /** * Returns the {@link AnalysisComponent} that provides the variability model from the extractors. * * @return The {@link AnalysisComponent} that provides the variability model. */ protected @NonNull AnalysisComponent<VariabilityModel> getVmComponent() { return vmStarter.createNewStartingComponent(config); } /** * Returns the {@link AnalysisComponent} that provides the build model from the extractors. * * @return The {@link AnalysisComponent} that provides the build model. */ protected @NonNull AnalysisComponent<BuildModel> getBmComponent() { return bmStarter.createNewStartingComponent(config); } /** * Returns the {@link AnalysisComponent} that provides the code model from the extractors. * * @return The {@link AnalysisComponent} that provides the code model. */ protected @NonNull AnalysisComponent<SourceFile<?>> getCmComponent() { return cmStarter.createNewStartingComponent(config); } /** * The collection that {@link AnalysisComponent}s should write their intermediate output to. * * @return The {@link ITableCollection} to write output to. */ ITableCollection getResultCollection() { return resultCollection; } /** * Creates the result collection from the user settings. * * @return The result collection to store files in. * * @throws SetUpException If creating the result collection fails. */ private ITableCollection createResultCollection() throws SetUpException { String outputSuffix = config.getValue(DefaultSettings.ANALYSIS_RESULT); File outputFile = new File(getOutputDir(), Timestamp.INSTANCE.getFilename( config.getValue(DefaultSettings.ANALYSIS_RESULT_NAME), outputSuffix)); try { return TableCollectionWriterFactory.INSTANCE.createCollection(outputFile); } catch (IOException e) { throw new SetUpException("Can't create output for suffix " + outputSuffix, e); } } /** * Creates the pipeline. * * @return The "main" (i.e. the last) component of the pipeline. * * @throws SetUpException If setting up the pipeline fails. */ protected abstract @NonNull AnalysisComponent<?> createPipeline() throws SetUpException; @Override public void run() { Thread.currentThread().setName("AnalysisPipelineController"); try { vmStarter = new ExtractorDataDuplicator<>(vmProvider, false, "VM"); bmStarter = new ExtractorDataDuplicator<>(bmProvider, false, "BM"); cmStarter = new ExtractorDataDuplicator<>(cmProvider, true, "CM"); try { resultCollection = createResultCollection(); } catch (SetUpException e) { LOGGER.logException("Couldn't create output collection based on user configuration; " + "falling back to CSV", e); resultCollection = new CsvFileCollection(new File(getOutputDir(), "Analysis_" + Timestamp.INSTANCE.getFileTimestamp())); } instance = this; AnalysisComponent<?> mainComponent = createPipeline(); if (config.getValue(DefaultSettings.ANALYSIS_PIPELINE_START_EXTRACTORS)) { // start all extractors; this is needed here because the analysis components will most likely poll them // in order, which means that the extractors would not run in parallel vmStarter.start(); bmStarter.start(); cmStarter.start(); } if (mainComponent instanceof JoinComponent) { joinSplitComponentFull((JoinComponent) mainComponent); } else { pollAndWriteOutput(mainComponent); } LOGGER.logDebug("Analysis components done"); try { LOGGER.logDebug("Closing result collection"); resultCollection.close(); for (File file : resultCollection.getFiles()) { addOutputFile(file); } } catch (IOException e) { LOGGER.logException("Exception while closing output file", e); } } catch (SetUpException e) { LOGGER.logException("Exception while setting up", e); } } /** * Part of {@link #run()} to handle {@link JoinComponent}s. This method joins all components in parallel. * * @param mainComponent The analysis, which is joining results of multiple other components. */ private void joinSplitComponentFull(@NonNull JoinComponent mainComponent) { List<Thread> threads = new ArrayList<>(mainComponent.getInputs().length); for (AnalysisComponent<?> component : mainComponent.getInputs()) { Thread th = new Thread(() -> { pollAndWriteOutput(component); }, "AnalysisPipelineControllerOutputThread"); threads.add(th); th.setDaemon(true); th.start(); } for (Thread th : threads) { try { th.join(); } catch (InterruptedException e) { } } } /** * Polls all output from the given component and writes it to the output file. * * @param component The component to read the output from. */ private void pollAndWriteOutput(@NonNull AnalysisComponent<?> component) { LOGGER.logDebug2("Starting and polling output of analysis component (", component.getClass().getSimpleName(), ")..."); try (ITableWriter writer = resultCollection.getWriter(component.getResultName())) { Object result; while ((result = component.getNextResult()) != null) { LOGGER.logDebug2("Got analysis result: ", result.toString()); writer.writeObject(result); } } catch (IOException e) { LOGGER.logException("Exception while writing output file", e); } } /** * A class for duplicating the extractor data. This way, multiple analysis components can have the same models * as their input data. * * @param <T> The type of model to duplicate. */ private static class ExtractorDataDuplicator<T> implements Runnable { private @NonNull AbstractProvider<T> provider; private boolean multiple; private @NonNull List<@NonNull StartingComponent<T>> startingComponents; private boolean started; private @NonNull String type; /** * Creates a new ExtractorDataDuplicator. * * @param provider The provider to get the data from. * @param multiple Whether the provider should be polled multiple times or just once. * @param type The type of duplicator component ("CM", "BM" or "VM"). */ public ExtractorDataDuplicator(@NonNull AbstractProvider<T> provider, boolean multiple, @NonNull String type) { this.provider = provider; this.multiple = multiple; this.type = type; startingComponents = new LinkedList<>(); } /** * Creates a new starting component that will get its own copy of the data from us. * * @param config The configuration to create the component with. * * @return The starting component that can be used as input data for other analysis components. */ public @NonNull StartingComponent<T> createNewStartingComponent(@NonNull Configuration config) { StartingComponent<T> component = new StartingComponent<>(config, this, type); startingComponents.add(component); return component; } /** * Adds the given data element to all starting components. * * @param data The data to add. */ private void addToAllComponents(@NonNull T data) { for (StartingComponent<T> component : startingComponents) { component.addResult(data); } } /** * Starts a new thread that copies the extractor data to all stating components created up until now. * This method ensures that this thread is only started once, no matter how often this method is called. */ public void start() { synchronized (this) { if (!started) { new Thread(this, "ExtractorDataDuplicator").start(); started = true; } } } @Override public void run() { if (multiple) { T data; while ((data = provider.getNextResult()) != null) { addToAllComponents(data); } ExtractorException exc; while ((exc = provider.getNextException()) != null) { LOGGER.logException("Got extractor exception", exc); } } else { T data = provider.getResult(); if (data != null) { addToAllComponents(data); } ExtractorException exc = provider.getException(); if (exc != null) { LOGGER.logException("Got extractor exception", exc); } } for (StartingComponent<T> component : startingComponents) { synchronized (component) { component.done = true; component.notifyAll(); } } } } /** * A starting component for the analysis pipeline. This is used to pass the extractor data to the analysis * components. This class does nothing; it is only used by {@link ExtractorDataDuplicator}. * * @param <T> The type of result data that this produces. */ private static class StartingComponent<T> extends AnalysisComponent<T> { private boolean done = false; private @NonNull ExtractorDataDuplicator<T> duplicator; private @NonNull String name; /** * Creates a new starting component. * * @param config The global configuration. * @param duplicator The {@link ExtractorDataDuplicator} to start when this component is started * (start on demand). * @param type The type of starting component ("CM", "BM" or "VM"). */ public StartingComponent(@NonNull Configuration config, @NonNull ExtractorDataDuplicator<T> duplicator, @NonNull String type) { super(config); this.duplicator = duplicator; this.name = type + " StartingComponent"; } @Override protected void execute() { duplicator.start(); // wait until the duplicator tells us that we are done synchronized (this) { while (!done) { try { wait(); } catch (InterruptedException e) { } } } } @Override public String getResultName() { return name; } @Override boolean isInternalHelperComponent() { return true; } } }
ExtractorDataDuplicator: Don't log every extractor exception to [error]
src/net/ssehub/kernel_haven/analysis/PipelineAnalysis.java
ExtractorDataDuplicator: Don't log every extractor exception to [error]
Java
apache-2.0
9bb0fdd1787ad259533399509bd5c253df8bdf58
0
vatbub/fokLauncher,vatbub/fokLauncher
package view; import java.awt.Desktop; import java.io.File; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.ResourceBundle; import java.util.logging.Level; import applist.App; import applist.AppList; import common.Common; import common.AppConfig; import common.HidableUpdateProgressDialog; import common.Internet; import common.Prefs; import common.UpdateChecker; import common.UpdateInfo; import common.Version; import common.VersionList; import extended.CustomListCell; import extended.GuiLanguage; import extended.VersionMenuItem; import javafx.application.Application; import javafx.application.Platform; import javafx.beans.binding.Bindings; import javafx.beans.value.ChangeListener; import javafx.beans.value.ObservableValue; import javafx.collections.FXCollections; import javafx.collections.ObservableList; import javafx.collections.transformation.FilteredList; import javafx.event.ActionEvent; import javafx.fxml.FXML; import javafx.fxml.FXMLLoader; import javafx.scene.Parent; import javafx.scene.Scene; import javafx.scene.control.Alert; import javafx.scene.control.Button; import javafx.scene.control.CheckBox; import javafx.scene.control.ComboBox; import javafx.scene.control.ContextMenu; import javafx.scene.control.Hyperlink; import javafx.scene.control.Label; import javafx.scene.control.ListView; import javafx.scene.control.Menu; import javafx.scene.control.MenuItem; import javafx.scene.control.ProgressBar; import javafx.scene.control.SelectionMode; import javafx.scene.control.TextField; import javafx.scene.image.Image; import javafx.scene.input.ClipboardContent; import javafx.scene.input.DragEvent; import javafx.scene.input.Dragboard; import javafx.scene.input.MouseEvent; import javafx.scene.input.TransferMode; import javafx.scene.layout.GridPane; import javafx.stage.FileChooser; import javafx.stage.Stage; import logging.FOKLogger; import view.motd.MOTD; import view.motd.MOTDDialog; import view.updateAvailableDialog.UpdateAvailableDialog; import org.apache.commons.io.FilenameUtils; import org.apache.commons.lang.exception.ExceptionUtils; import org.jdom2.JDOMException; import com.rometools.rome.io.FeedException; public class MainWindow extends Application implements HidableUpdateProgressDialog { private static FOKLogger log; public static AppConfig appConfig; public static void main(String[] args) { common.Common.setAppName("foklauncher"); log = new FOKLogger(MainWindow.class.getName()); prefs = new Prefs(MainWindow.class.getName()); // Complete the update UpdateChecker.completeUpdate(args); for (String arg : args) { if (arg.toLowerCase().matches("mockappversion=.*")) { // Set the mock version String version = arg.substring(arg.toLowerCase().indexOf('=') + 1); Common.setMockAppVersion(version); } else if (arg.toLowerCase().matches("mockbuildnumber=.*")) { // Set the mock build number String buildnumber = arg.substring(arg.toLowerCase().indexOf('=') + 1); Common.setMockBuildNumber(buildnumber); } else if (arg.toLowerCase().matches("mockpackaging=.*")) { // Set the mock packaging String packaging = arg.substring(arg.toLowerCase().indexOf('=') + 1); Common.setMockPackaging(packaging); } } launch(args); } private static ResourceBundle bundle; private static Prefs prefs; private static final String enableSnapshotsPrefKey = "enableSnapshots"; private static final String showLauncherAgainPrefKey = "showLauncherAgain"; private static final String guiLanguagePrefKey = "guiLanguage"; private static AppList apps; private static Stage stage; private static Thread downloadAndLaunchThread = new Thread(); private static boolean launchSpecificVersionMenuCanceled = false; private static Locale systemDefaultLocale; private Runnable getAppListRunnable = new Runnable() { @Override public void run() { try { Platform.runLater(new Runnable() { @Override public void run() { appList.setPlaceholder(new Label(bundle.getString("WaitForAppList"))); } }); apps = App.getAppList(); ObservableList<App> items = FXCollections.observableArrayList(); FilteredList<App> filteredData = new FilteredList<>(items, s -> true); for (App app : apps) { items.add(app); } // Add filter functionality searchField.textProperty().addListener(obs -> { String filter = searchField.getText(); if (filter == null || filter.length() == 0) { filteredData.setPredicate(s -> true); } else { filteredData.setPredicate(s -> s.getName().toLowerCase().contains(filter.toLowerCase())); } }); // Build the context menu appList.setCellFactory(lv -> { CustomListCell<App> cell = new CustomListCell<App>(); ContextMenu contextMenu = new ContextMenu(); Menu launchSpecificVersionItem = new Menu(); launchSpecificVersionItem.textProperty() .bind(Bindings.format(bundle.getString("launchSpecificVersion"), cell.itemProperty())); MenuItem dummyVersion = new MenuItem(); dummyVersion.setText(bundle.getString("waitForVersionList")); launchSpecificVersionItem.getItems().add(dummyVersion); launchSpecificVersionItem.setOnHiding(event2 -> { launchSpecificVersionMenuCanceled = true; }); launchSpecificVersionItem.setOnShown(event -> { launchSpecificVersionMenuCanceled = false; Thread buildContextMenuThread = new Thread() { @Override public void run() { log.getLogger().info("Getting available online versions..."); App app = cell.getItem(); // Get available versions VersionList verList = new VersionList(); if (!workOfflineCheckbox.isSelected()) { // Online mode enabled try { verList = app.getAllOnlineVersions(); if (enableSnapshotsCheckbox.isSelected()) { verList.add(app.getLatestOnlineSnapshotVersion()); } } catch (Exception e) { // Something happened, pretend // offline mode verList = app.getCurrentlyInstalledVersions(); } } else { // Offline mode enabled verList = app.getCurrentlyInstalledVersions(); } // Sort the list Collections.sort(verList); // Clear previous list Platform.runLater(new Runnable() { @Override public void run() { launchSpecificVersionItem.getItems().clear(); } }); for (Version ver : verList) { VersionMenuItem menuItem = new VersionMenuItem(); menuItem.setVersion(ver); menuItem.setText(ver.toString(false)); menuItem.setOnAction(event2 -> { // Launch the download downloadAndLaunchThread = new Thread() { @Override public void run() { try { // Attach the on app // exit handler if // required if (launchLauncherAfterAppExitCheckbox.isSelected()) { Platform.setImplicitExit(false); currentlySelectedApp .addEventHandlerWhenLaunchedAppExits(showLauncherAgain); } else { Platform.setImplicitExit(true); currentlySelectedApp.removeEventHandlerWhenLaunchedAppExits( showLauncherAgain); } currentlySelectedApp.downloadIfNecessaryAndLaunch( currentMainWindowInstance, menuItem.getVersion(), workOfflineCheckbox.isSelected()); } catch (IOException | JDOMException e) { currentMainWindowInstance.showErrorMessage( "An error occurred: \n" + ExceptionUtils.getStackTrace(e)); log.getLogger().log(Level.SEVERE, "An error occurred", e); } } }; downloadAndLaunchThread.setName("downloadAndLaunchThread"); downloadAndLaunchThread.start(); }); Platform.runLater(new Runnable() { @Override public void run() { launchSpecificVersionItem.getItems().add(menuItem); } }); } Platform.runLater(new Runnable() { @Override public void run() { if (!launchSpecificVersionMenuCanceled) { launchSpecificVersionItem.hide(); launchSpecificVersionItem.show(); } } }); } }; if (!cell.getItem().isSpecificVersionListLoaded()) { buildContextMenuThread.setName("buildContextMenuThread"); buildContextMenuThread.start(); cell.getItem().setSpecificVersionListLoaded(true); } }); Menu deleteItem = new Menu(); deleteItem.textProperty() .bind(Bindings.format(bundle.getString("deleteVersion"), cell.itemProperty())); MenuItem dummyVersion2 = new MenuItem(); dummyVersion2.setText(bundle.getString("waitForVersionList")); deleteItem.getItems().add(dummyVersion2); deleteItem.setOnShown(event -> { // App app = apps.get(cell.getIndex()); App app = cell.getItem(); if (!app.isDeletableVersionListLoaded()) { // Get deletable versions app.setDeletableVersionListLoaded(true); log.getLogger().info("Getting deletable versions..."); deleteItem.getItems().clear(); VersionList verList = new VersionList(); verList = app.getCurrentlyInstalledVersions(); Collections.sort(verList); for (Version ver : verList) { VersionMenuItem menuItem = new VersionMenuItem(); menuItem.setVersion(ver); menuItem.setText(ver.toString(false)); menuItem.setOnAction(event2 -> { // Delete the file try { currentlySelectedApp.delete(menuItem.getVersion()); } finally { updateLaunchButton(); } // Update the list the next time the // user opens it as it has changed app.setDeletableVersionListLoaded(false); }); Platform.runLater(new Runnable() { @Override public void run() { deleteItem.getItems().add(menuItem); } }); } Platform.runLater(new Runnable() { @Override public void run() { deleteItem.hide(); deleteItem.show(); } }); } }); MenuItem exportInfoItem = new MenuItem(); exportInfoItem.setText(bundle.getString("exportInfo")); exportInfoItem.setOnAction(event2 -> { FileChooser fileChooser = new FileChooser(); fileChooser.getExtensionFilters() .addAll(new FileChooser.ExtensionFilter("FOK-Launcher-File", "*.foklauncher")); fileChooser.setTitle("Save Image"); File file = fileChooser.showSaveDialog(stage); if (file != null) { log.getLogger().info("Exporting info..."); // App app = apps.get(cell.getIndex()); App app = cell.getItem(); try { log.getLogger().info("Exporting app info of app " + app.getName() + " to file: " + file.getAbsolutePath()); app.exportInfo(file); } catch (IOException e) { log.getLogger().log(Level.SEVERE, "An error occurred", e); currentMainWindowInstance.showErrorMessage(e.toString()); } } }); contextMenu.getItems().addAll(launchSpecificVersionItem, deleteItem, exportInfoItem); MenuItem removeImportedApp = new MenuItem(); contextMenu.setOnShowing(event5 -> { App app = cell.getItem(); if (app.isImported()) { removeImportedApp.setText("Remove this app from this list"); removeImportedApp.setOnAction(event3 -> { try { app.removeFromImportedAppList(); currentMainWindowInstance.loadAppList(); } catch (IOException e) { log.getLogger().log(Level.SEVERE, "An error occurred", e); currentMainWindowInstance.showErrorMessage(e.toString()); } }); contextMenu.getItems().add(removeImportedApp); } }); contextMenu.setOnHidden(event5 -> { // Remove the removeImportedApp-Item again if it exists if (contextMenu.getItems().contains(removeImportedApp)) { contextMenu.getItems().remove(removeImportedApp); } }); cell.emptyProperty().addListener((obs, wasEmpty, isNowEmpty) -> { if (isNowEmpty) { cell.setContextMenu(null); } else { cell.setContextMenu(contextMenu); } }); return cell; }); Platform.runLater(new Runnable() { @Override public void run() { appList.setItems(filteredData); appList.setPlaceholder(new Label(bundle.getString("emptyAppList"))); } }); } catch (JDOMException | IOException e) { log.getLogger().log(Level.SEVERE, "An error occurred", e); currentMainWindowInstance .showErrorMessage("An error occurred: \n" + e.getClass().getName() + "\n" + e.getMessage()); } } }; /** * The thread that gets the app list */ private Thread getAppListThread; /** * This reference always refers to the currently used instance of the * MainWidow. The purpose of this field that {@code this} can be accessed in * a convenient way in static methods. */ private static MainWindow currentMainWindowInstance; private static App currentlySelectedApp = null; @FXML // ResourceBundle that was given to the FXMLLoader private ResourceBundle resources; @FXML // URL location of the FXML file that was given to the FXMLLoader private URL location; @FXML // fx:id="appList" private ListView<App> appList; // Value injected by FXMLLoader @FXML // fx:id="searchField" private TextField searchField; // Value injected by FXMLLoader @FXML // fx:id="enableSnapshotsCheckbox" private CheckBox enableSnapshotsCheckbox; // Value injected by FXMLLoader @FXML // fx:id="launchButton" private ProgressButton launchButton; // Value injected by FXMLLoader @FXML // fx:id="launchLauncherAfterAppExitCheckbox" private CheckBox launchLauncherAfterAppExitCheckbox; // Value injected by // FXMLLoader @FXML // fx:id="languageSelector" private ComboBox<GuiLanguage> languageSelector; // Value injected by // FXMLLoader @FXML // fx:id="progressBar" private ProgressBar progressBar; // Value injected by FXMLLoader @FXML // fx:id="workOfflineCheckbox" private CheckBox workOfflineCheckbox; // Value injected by FXMLLoader @FXML /** * fx:id="updateLink" */ private Hyperlink updateLink; // Value injected by FXMLLoader @FXML /** * fx:id="versionLabel" */ private Label versionLabel; // Value injected by FXMLLoader @FXML // fx:id="settingsGridView" private GridPane settingsGridView; // Value injected by FXMLLoader @FXML // fx:id="appInfoButton" private Button appInfoButton; // Value injected by FXMLLoader // Handler for ListView[fx:id="appList"] onMouseClicked @FXML void appListOnMouseClicked(MouseEvent event) { // Currently not used } // Handler for AnchorPane[id="AnchorPane"] onDragDetected @FXML void appListOnDragDetected(MouseEvent event) { if (currentlySelectedApp != null) { File tempFile = new File( Common.getAndCreateAppDataPath() + currentlySelectedApp.getMavenArtifactID() + ".foklauncher"); try { currentlySelectedApp.exportInfo(tempFile); Dragboard db = appList.startDragAndDrop(TransferMode.MOVE); ClipboardContent content = new ClipboardContent(); content.putFiles(Arrays.asList(tempFile)); db.setContent(content); } catch (IOException e) { log.getLogger().log(Level.SEVERE, "An error occurred", e); } } } // Handler for ListView[fx:id="appList"] onDragOver @FXML void mainFrameOnDragOver(DragEvent event) { Dragboard db = event.getDragboard(); // Only allow drag'n'drop for files and if no app list is currently // loading if (db.hasFiles() && !getAppListThread.isAlive()) { // Don't accept the drag if any file contained in the drag does not // have the *.foklauncher extension for (File f : db.getFiles()) { if (!FilenameUtils.getExtension(f.getAbsolutePath()).equals("foklauncher")) { event.consume(); return; } } event.acceptTransferModes(TransferMode.LINK); } else { event.consume(); } } @FXML void mainFrameOnDragDropped(DragEvent event) { List<File> files = event.getDragboard().getFiles(); for (File f : files) { log.getLogger().info("Importing app from " + f.getAbsolutePath() + "..."); try { App.addImportedApp(f); currentMainWindowInstance.loadAppList(); } catch (IOException e) { log.getLogger().log(Level.SEVERE, "An error occurred", e); currentMainWindowInstance.showErrorMessage(e.toString(), false); } } } private static Runnable showLauncherAgain = new Runnable() { @Override public void run() { // reset the ui try { currentMainWindowInstance.start(stage); } catch (Exception e) { log.getLogger().log(Level.INFO, "An error occurred while firing a handler for the LaunchedAppExited event, trying to run the handler using Platform.runLater...", e); } Platform.setImplicitExit(true); } }; @FXML /** * Handler for Hyperlink[fx:id="updateLink"] onAction * * @param event * The event object that contains information about the event. */ void updateLinkOnAction(ActionEvent event) { // Check for new version ignoring ignored updates Thread updateThread = new Thread() { @Override public void run() { UpdateInfo update = UpdateChecker.isUpdateAvailableCompareAppVersion(AppConfig.getUpdateRepoBaseURL(), AppConfig.groupID, AppConfig.artifactID, AppConfig.getUpdateFileClassifier(), Common.getPackaging()); Platform.runLater(new Runnable() { @Override public void run() { new UpdateAvailableDialog(update); } }); } }; updateThread.setName("manualUpdateThread"); updateThread.start(); } @FXML void languageSelectorOnAction(ActionEvent event) { log.getLogger().info("Switching gui language to: " + languageSelector.getItems().get(languageSelector.getSelectionModel().getSelectedIndex())); prefs.setPreference(guiLanguagePrefKey, languageSelector.getItems() .get(languageSelector.getSelectionModel().getSelectedIndex()).getLocale().getLanguage()); // Restart gui boolean implicitExit = Platform.isImplicitExit(); Platform.setImplicitExit(false); stage.hide(); try { currentMainWindowInstance.start(stage); } catch (Exception e) { log.getLogger().log(Level.INFO, "An error occurred while setting a new gui language", e); } Platform.setImplicitExit(implicitExit); } // Handler for Button[fx:id="launchButton"] onAction @FXML void launchButtonOnAction(ActionEvent event) { MainWindow gui = this; if (!downloadAndLaunchThread.isAlive()) { // Launch the download downloadAndLaunchThread = new Thread() { @Override public void run() { try { // Attach the on app exit handler if required if (launchLauncherAfterAppExitCheckbox.isSelected()) { Platform.setImplicitExit(false); currentlySelectedApp.addEventHandlerWhenLaunchedAppExits(showLauncherAgain); } else { Platform.setImplicitExit(true); currentlySelectedApp.removeEventHandlerWhenLaunchedAppExits(showLauncherAgain); } currentlySelectedApp.downloadIfNecessaryAndLaunch(enableSnapshotsCheckbox.isSelected(), gui, workOfflineCheckbox.isSelected()); } catch (IOException | JDOMException e) { gui.showErrorMessage("An error occurred: \n" + e.getClass().getName() + "\n" + e.getMessage()); log.getLogger().log(Level.SEVERE, "An error occurred", e); } } }; downloadAndLaunchThread.setName("downloadAndLaunchThread"); downloadAndLaunchThread.start(); } else { currentlySelectedApp.cancelDownloadAndLaunch(gui); } } // Handler for CheckBox[fx:id="workOfflineCheckbox"] onAction @FXML void workOfflineCheckboxOnAction(ActionEvent event) { updateLaunchButton(); } // Handler for CheckBox[fx:id="launchLauncherAfterAppExitCheckbox"] onAction @FXML void launchLauncherAfterAppExitCheckboxOnAction(ActionEvent event) { prefs.setPreference(showLauncherAgainPrefKey, Boolean.toString(launchLauncherAfterAppExitCheckbox.isSelected())); } @FXML void appInfoButtonOnAction(ActionEvent event) { try { Desktop.getDesktop().browse(new URI(currentlySelectedApp.getAdditionalInfoURL().toString())); } catch (IOException | URISyntaxException e) { log.getLogger().log(Level.SEVERE, "An error occurred", e); } } @Override public void start(Stage primaryStage) throws Exception { // get the right resource bundle String guiLanguageCode = prefs.getPreference(guiLanguagePrefKey, ""); if (guiLanguageCode.equals("")) { if (systemDefaultLocale != null) { Locale.setDefault(systemDefaultLocale); } } else { // Get the specified bundle if (systemDefaultLocale == null) { systemDefaultLocale = Locale.getDefault(); } log.getLogger().info("Setting language: " + guiLanguageCode); Locale.setDefault(new Locale(guiLanguageCode)); } bundle = ResourceBundle.getBundle("view.MainWindow"); // appConfig = new Config(); stage = primaryStage; try { Thread updateThread = new Thread() { @Override public void run() { UpdateInfo update = UpdateChecker.isUpdateAvailable(AppConfig.getUpdateRepoBaseURL(), AppConfig.groupID, AppConfig.artifactID, AppConfig.getUpdateFileClassifier(), Common.getPackaging()); if (update.showAlert) { Platform.runLater(new Runnable() { @Override public void run() { new UpdateAvailableDialog(update); } }); } } }; updateThread.setName("updateThread"); updateThread.start(); Parent root = FXMLLoader.load(getClass().getResource("MainWindow.fxml"), bundle); Scene scene = new Scene(root); scene.getStylesheets().add(getClass().getResource("MainWindow.css").toExternalForm()); primaryStage.setTitle(bundle.getString("windowTitle")); primaryStage.setMinWidth(scene.getRoot().minWidth(0) + 70); primaryStage.setMinHeight(scene.getRoot().minHeight(0) + 70); primaryStage.setScene(scene); // Set Icon primaryStage.getIcons().add(new Image(MainWindow.class.getResourceAsStream("icon.png"))); primaryStage.show(); } catch (Exception e) { log.getLogger().log(Level.SEVERE, "An error occurred", e); } } @Override public void stop() { try { UpdateChecker.cancelUpdateCompletion(); if (currentlySelectedApp != null) { currentlySelectedApp.cancelDownloadAndLaunch(this); } } catch (Exception e) { log.getLogger().log(Level.SEVERE, "An error occurred but is not relevant as we are currently in the shutdown process. Possible reasons for this exception are: You tried to modify a view but it is not shown any more on the screen; You tried to cancel the app download but no download was in progress.", e); } } @FXML // This method is called by the FXMLLoader when initialization is // complete void initialize() { assert launchButton != null : "fx:id=\"launchButton\" was not injected: check your FXML file 'MainWindow.fxml'."; assert launchLauncherAfterAppExitCheckbox != null : "fx:id=\"launchLauncherAfterAppExitCheckbox\" was not injected: check your FXML file 'MainWindow.fxml'."; assert languageSelector != null : "fx:id=\"languageSelector\" was not injected: check your FXML file 'MainWindow.fxml'."; assert versionLabel != null : "fx:id=\"versionLabel\" was not injected: check your FXML file 'MainWindow.fxml'."; assert searchField != null : "fx:id=\"searchField\" was not injected: check your FXML file 'MainWindow.fxml'."; assert appList != null : "fx:id=\"appList\" was not injected: check your FXML file 'MainWindow.fxml'."; assert appInfoButton != null : "fx:id=\"appInfoButton\" was not injected: check your FXML file 'MainWindow.fxml'."; assert progressBar != null : "fx:id=\"progressBar\" was not injected: check your FXML file 'MainWindow.fxml'."; assert enableSnapshotsCheckbox != null : "fx:id=\"enableSnapshotsCheckbox\" was not injected: check your FXML file 'MainWindow.fxml'."; assert workOfflineCheckbox != null : "fx:id=\"workOfflineCheckbox\" was not injected: check your FXML file 'MainWindow.fxml'."; assert updateLink != null : "fx:id=\"updateLink\" was not injected: check your FXML file 'MainWindow.fxml'."; assert settingsGridView != null : "fx:id=\"settingsGridView\" was not injected: check your FXML file 'MainWindow.fxml'."; // Initialize your logic here: all @FXML variables will have been // injected // Show messages of the day Thread motdThread = new Thread() { @Override public void run() { MOTD motd; try { motd = MOTD.getLatestMOTD(AppConfig.getMotdFeedUrl()); if (!motd.isMarkedAsRead()) { Platform.runLater(new Runnable() { @Override public void run() { new MOTDDialog(motd, motd.getEntry().getTitle()); } }); } } catch (IllegalArgumentException | FeedException | IOException | ClassNotFoundException e) { log.getLogger().log(Level.SEVERE, "An error occurred", e); } } }; motdThread.setName("motdThread"); motdThread.start(); currentMainWindowInstance = this; enableSnapshotsCheckbox.setSelected(Boolean.parseBoolean(prefs.getPreference(enableSnapshotsPrefKey, "false"))); launchLauncherAfterAppExitCheckbox .setSelected(Boolean.parseBoolean(prefs.getPreference(showLauncherAgainPrefKey, "false"))); try { versionLabel.setText(new Version(Common.getAppVersion(), Common.getBuildNumber()).toString(false)); } catch (IllegalArgumentException e) { versionLabel.setText(Common.UNKNOWN_APP_VERSION); } progressBar.setVisible(false); // Disable multiselect appList.getSelectionModel().setSelectionMode(SelectionMode.SINGLE); loadAvailableGuiLanguages(); // Selection change listener appList.getSelectionModel().selectedItemProperty().addListener(new ChangeListener<App>() { public void changed(ObservableValue<? extends App> observable, App oldValue, App newValue) { try { currentlySelectedApp = appList.getSelectionModel().getSelectedItem(); } catch (ArrayIndexOutOfBoundsException e) { currentlySelectedApp = null; } updateLaunchButton(); } }); if (!Internet.isConnected()) { workOfflineCheckbox.setSelected(true); workOfflineCheckbox.setDisable(true); } loadAppList(); } private void loadAvailableGuiLanguages() { List<Locale> supportedGuiLocales = Common.getLanguagesSupportedByResourceBundle(bundle); List<GuiLanguage> convertedList = new ArrayList<GuiLanguage>(supportedGuiLocales.size()); for (Locale lang : supportedGuiLocales) { convertedList.add(new GuiLanguage(lang, bundle.getString("langaugeSelector.chooseAutomatically"))); } ObservableList<GuiLanguage> items = FXCollections.observableArrayList(convertedList); languageSelector.setItems(items); if (Locale.getDefault() != systemDefaultLocale) { GuiLanguage langToSelect = null; for (GuiLanguage lang : convertedList) { if (Locale.getDefault().equals(lang.getLocale())) { langToSelect = lang; } } if (langToSelect != null) { languageSelector.getSelectionModel().select(langToSelect); } } } /** * Loads the app list using the {@link App#getAppList()}-method */ private void loadAppList() { if (getAppListThread != null) { // If thread is not null and running, quit if (getAppListThread.isAlive()) { return; } } // Thread is either null or not running anymore getAppListThread = new Thread(getAppListRunnable); getAppListThread.setName("getAppListThread"); getAppListThread.start(); } private void updateLaunchButton() { apps.reloadContextMenuEntriesOnShow(); Thread getAppStatus = new Thread() { @Override public void run() { App checkedApp = currentlySelectedApp; boolean progressVisibleBefore = progressBar.isVisible(); Platform.runLater(new Runnable() { @Override public void run() { launchButton.setDisable(true); launchButton.setDefaultButton(false); launchButton.setStyle("-fx-background-color: transparent;"); launchButton.setControlText(""); progressBar.setPrefHeight(launchButton.getHeight()); progressBar.setVisible(true); progressBar.setProgress(-1); launchButton.setProgressText(bundle.getString("progress.checkingVersionInfo")); appInfoButton.setDisable(true); } }); try { if (!workOfflineCheckbox.isSelected()) { // downloads are enabled // enable the additional info button if applicable Platform.runLater(new Runnable() { @Override public void run() { appInfoButton.setDisable(checkedApp.getAdditionalInfoURL() == null); } }); if (checkedApp.downloadRequired(enableSnapshotsCheckbox.isSelected())) { // download required setLaunchButtonText(checkedApp, false, bundle.getString("okButton.downloadAndLaunch")); } else if (checkedApp.updateAvailable(enableSnapshotsCheckbox.isSelected())) { // Update available setLaunchButtonText(checkedApp, false, bundle.getString("okButton.updateAndLaunch")); } else { // Can launch immediately setLaunchButtonText(checkedApp, false, bundle.getString("okButton.launch")); } } else { // downloads disabled if (checkedApp.downloadRequired(enableSnapshotsCheckbox.isSelected())) { // download required but disabled setLaunchButtonText(checkedApp, true, bundle.getString("okButton.downloadAndLaunch")); } else { // Can launch immediately setLaunchButtonText(checkedApp, false, bundle.getString("okButton.launch")); } } } catch (JDOMException | IOException e) { log.getLogger().log(Level.SEVERE, "An error occurred", e); // Switch to offline mode workOfflineCheckbox.setSelected(true); workOfflineCheckbox.setDisable(true); // update launch button accordingly updateLaunchButton(); // Show error message currentMainWindowInstance.showErrorMessage( bundle.getString("updateLaunchButtonException") + "\n\n" + ExceptionUtils.getStackTrace(e), false); } // Do finishing touches to gui only if checkedApp still equals // currentlySelectedApp (make sure the user did not change the // selection in the meanwhile) if (checkedApp == currentlySelectedApp) { Platform.runLater(new Runnable() { @Override public void run() { launchButton.setProgressText(""); progressBar.setVisible(progressVisibleBefore); } }); } } }; // Only update the button caption if no download is running and an app // is selected if (!downloadAndLaunchThread.isAlive() && currentlySelectedApp != null) { getAppStatus.setName("getAppStatus"); getAppStatus.start(); } else if (currentlySelectedApp == null) { // disable the button launchButton.setDisable(true); } } private void setLaunchButtonText(App checkedApp, boolean isDisabled, String text) { // Only update the button if the user did not change his selection if (checkedApp == currentlySelectedApp) { Platform.runLater(new Runnable() { @Override public void run() { launchButton.setDisable(isDisabled); launchButton.setDefaultButton(!isDisabled); launchButton.setStyle(""); launchButton.setControlText(text); } }); } } @Override public void hide() { Platform.runLater(new Runnable() { @Override public void run() { stage.hide(); } }); } // Handler for CheckBox[fx:id="enableSnapshotsCheckbox"] onAction @FXML void enableSnapshotsCheckboxOnAction(ActionEvent event) { updateLaunchButton(); prefs.setPreference(enableSnapshotsPrefKey, Boolean.toString(enableSnapshotsCheckbox.isSelected())); } @Override public void preparePhaseStarted() { Platform.runLater(new Runnable() { @Override public void run() { appList.setDisable(true); launchButton.setDisable(false); launchButton.setDefaultButton(false); progressBar.setPrefHeight(launchButton.getHeight()); launchButton.setStyle("-fx-background-color: transparent;"); launchButton.setControlText(bundle.getString("okButton.cancelLaunch")); progressBar.setVisible(true); progressBar.setProgress(0 / 4.0); launchButton.setProgressText(bundle.getString("progress.preparing")); settingsGridView.setDisable(true); } }); } @Override public void downloadStarted() { Platform.runLater(new Runnable() { @Override public void run() { progressBar.setProgress(-1); launchButton.setProgressText(bundle.getString("progress.downloading")); } }); } @Override public void installStarted() { Platform.runLater(new Runnable() { @Override public void run() { progressBar.setProgress(1.0 / 2.0); launchButton.setProgressText(bundle.getString("progress.installing")); } }); } @Override public void launchStarted() { Platform.runLater(new Runnable() { @Override public void run() { progressBar.setProgress(2.0 / 2.0); launchButton.setProgressText(bundle.getString("progress.launching")); } }); } public void showErrorMessage(String message) { showErrorMessage(message, false); } public void showErrorMessage(String message, boolean closeWhenDialogIsClosed) { Platform.runLater(new Runnable() { @Override public void run() { String finalMessage; if (closeWhenDialogIsClosed) { finalMessage = message + "\n\n" + "The app needs to close now."; } else { finalMessage = message; } Alert alert = new Alert(Alert.AlertType.ERROR, finalMessage); alert.show(); Thread t = new Thread() { @Override public void run() { while (alert.isShowing()) { // wait for dialog to be closed } if (closeWhenDialogIsClosed) { System.err.println("Closing app after exception, good bye..."); Platform.exit(); } } }; t.setName("showErrorThread"); t.start(); } }); } @Override public void operationCanceled() { log.getLogger().info("Operation cancelled."); Platform.setImplicitExit(true); appList.setDisable(false); progressBar.setVisible(false); Platform.runLater(new Runnable() { @Override public void run() { launchButton.setProgressText(""); settingsGridView.setDisable(false); updateLaunchButton(); } }); } @Override public void cancelRequested() { if (progressBar != null) { progressBar.setProgress(-1); launchButton.setProgressText(bundle.getString("cancelRequested")); launchButton.setDisable(true); log.getLogger().info("Requested to cancel the current operation, Cancel in progress..."); } } @Override public void downloadProgressChanged(double kilobytesDownloaded, double totalFileSizeInKB) { Platform.runLater(new Runnable() { @Override public void run() { progressBar.setProgress(kilobytesDownloaded / totalFileSizeInKB); String downloadedString; if (kilobytesDownloaded < 1024) { downloadedString = Double.toString(Math.round(kilobytesDownloaded * 100.0) / 100.0) + " " + bundle.getString("kilobyte"); } else if ((kilobytesDownloaded / 1024) < 1024) { downloadedString = Double.toString(Math.round((kilobytesDownloaded * 100.0) / 1024) / 100.0) + " " + bundle.getString("megabyte"); } else if (((kilobytesDownloaded / 1024) / 1024) < 1024) { downloadedString = Double .toString(Math.round(((kilobytesDownloaded * 100.0) / 1024) / 1024) / 100.0) + " " + bundle.getString("gigabyte"); } else { downloadedString = Double .toString(Math.round((((kilobytesDownloaded * 100.0) / 1024) / 1024) / 1024) / 100.0) + " " + bundle.getString("terabyte"); } String totalString; if (totalFileSizeInKB < 1024) { totalString = Double.toString(Math.round(totalFileSizeInKB * 100.0) / 100.0) + " " + bundle.getString("kilobyte"); } else if ((totalFileSizeInKB / 1024) < 1024) { totalString = Double.toString(Math.round((totalFileSizeInKB * 100.0) / 1024) / 100.0) + " " + bundle.getString("megabyte"); } else if (((totalFileSizeInKB / 1024) / 1024) < 1024) { totalString = Double.toString(Math.round(((totalFileSizeInKB * 100.0) / 1024) / 1024) / 100.0) + " " + bundle.getString("gigabyte"); } else { totalString = Double .toString(Math.round((((totalFileSizeInKB * 100.0) / 1024) / 1024) / 1024) / 100.0) + " " + bundle.getString("terabyte"); } launchButton.setProgressText( bundle.getString("progress.downloading") + "(" + downloadedString + "/" + totalString + ")"); } }); } }
src/main/java/view/MainWindow.java
package view; import java.awt.Desktop; import java.io.File; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.ResourceBundle; import java.util.logging.Level; import applist.App; import applist.AppList; import common.Common; import common.AppConfig; import common.HidableUpdateProgressDialog; import common.Internet; import common.Prefs; import common.UpdateChecker; import common.UpdateInfo; import common.Version; import common.VersionList; import extended.CustomListCell; import extended.GuiLanguage; import extended.VersionMenuItem; import javafx.application.Application; import javafx.application.Platform; import javafx.beans.binding.Bindings; import javafx.beans.value.ChangeListener; import javafx.beans.value.ObservableValue; import javafx.collections.FXCollections; import javafx.collections.ObservableList; import javafx.collections.transformation.FilteredList; import javafx.event.ActionEvent; import javafx.fxml.FXML; import javafx.fxml.FXMLLoader; import javafx.scene.Parent; import javafx.scene.Scene; import javafx.scene.control.Alert; import javafx.scene.control.Button; import javafx.scene.control.CheckBox; import javafx.scene.control.ComboBox; import javafx.scene.control.ContextMenu; import javafx.scene.control.Hyperlink; import javafx.scene.control.Label; import javafx.scene.control.ListView; import javafx.scene.control.Menu; import javafx.scene.control.MenuItem; import javafx.scene.control.ProgressBar; import javafx.scene.control.SelectionMode; import javafx.scene.control.TextField; import javafx.scene.image.Image; import javafx.scene.input.ClipboardContent; import javafx.scene.input.DragEvent; import javafx.scene.input.Dragboard; import javafx.scene.input.MouseEvent; import javafx.scene.input.TransferMode; import javafx.scene.layout.GridPane; import javafx.stage.FileChooser; import javafx.stage.Stage; import logging.FOKLogger; import view.motd.MOTD; import view.motd.MOTDDialog; import view.updateAvailableDialog.UpdateAvailableDialog; import org.apache.commons.io.FilenameUtils; import org.apache.commons.lang.exception.ExceptionUtils; import org.jdom2.JDOMException; import com.rometools.rome.io.FeedException; public class MainWindow extends Application implements HidableUpdateProgressDialog { private static FOKLogger log; public static AppConfig appConfig; public static void main(String[] args) { common.Common.setAppName("foklauncher"); log = new FOKLogger(MainWindow.class.getName()); prefs = new Prefs(MainWindow.class.getName()); // Complete the update UpdateChecker.completeUpdate(args); for (String arg : args) { if (arg.toLowerCase().matches("mockappversion=.*")) { // Set the mock version String version = arg.substring(arg.toLowerCase().indexOf('=') + 1); Common.setMockAppVersion(version); } else if (arg.toLowerCase().matches("mockbuildnumber=.*")) { // Set the mock build number String buildnumber = arg.substring(arg.toLowerCase().indexOf('=') + 1); Common.setMockBuildNumber(buildnumber); } else if (arg.toLowerCase().matches("mockpackaging=.*")) { // Set the mock packaging String packaging = arg.substring(arg.toLowerCase().indexOf('=') + 1); Common.setMockPackaging(packaging); } } launch(args); } private static ResourceBundle bundle; private static Prefs prefs; private static final String enableSnapshotsPrefKey = "enableSnapshots"; private static final String showLauncherAgainPrefKey = "showLauncherAgain"; private static final String guiLanguagePrefKey = "guiLanguage"; private static AppList apps; private static Stage stage; private static Thread downloadAndLaunchThread = new Thread(); private static boolean launchSpecificVersionMenuCanceled = false; private static Locale systemDefaultLocale; private Runnable getAppListRunnable = new Runnable() { @Override public void run() { try { Platform.runLater(new Runnable() { @Override public void run() { appList.setPlaceholder(new Label(bundle.getString("WaitForAppList"))); } }); apps = App.getAppList(); ObservableList<App> items = FXCollections.observableArrayList(); FilteredList<App> filteredData = new FilteredList<>(items, s -> true); for (App app : apps) { items.add(app); } // Add filter functionality searchField.textProperty().addListener(obs -> { String filter = searchField.getText(); if (filter == null || filter.length() == 0) { filteredData.setPredicate(s -> true); } else { filteredData.setPredicate(s -> s.getName().toLowerCase().contains(filter.toLowerCase())); } }); // Build the context menu appList.setCellFactory(lv -> { CustomListCell<App> cell = new CustomListCell<App>(); ContextMenu contextMenu = new ContextMenu(); Menu launchSpecificVersionItem = new Menu(); launchSpecificVersionItem.textProperty() .bind(Bindings.format(bundle.getString("launchSpecificVersion"), cell.itemProperty())); MenuItem dummyVersion = new MenuItem(); dummyVersion.setText(bundle.getString("waitForVersionList")); launchSpecificVersionItem.getItems().add(dummyVersion); launchSpecificVersionItem.setOnHiding(event2 -> { launchSpecificVersionMenuCanceled = true; }); launchSpecificVersionItem.setOnShown(event -> { launchSpecificVersionMenuCanceled = false; Thread buildContextMenuThread = new Thread() { @Override public void run() { log.getLogger().info("Getting available online versions..."); App app = cell.getItem(); // Get available versions VersionList verList = new VersionList(); if (!workOfflineCheckbox.isSelected()) { // Online mode enabled try { verList = app.getAllOnlineVersions(); if (enableSnapshotsCheckbox.isSelected()) { verList.add(app.getLatestOnlineSnapshotVersion()); } } catch (Exception e) { // Something happened, pretend // offline mode verList = app.getCurrentlyInstalledVersions(); } } else { // Offline mode enabled verList = app.getCurrentlyInstalledVersions(); } // Sort the list Collections.sort(verList); // Clear previous list Platform.runLater(new Runnable() { @Override public void run() { launchSpecificVersionItem.getItems().clear(); } }); for (Version ver : verList) { VersionMenuItem menuItem = new VersionMenuItem(); menuItem.setVersion(ver); menuItem.setText(ver.toString(false)); menuItem.setOnAction(event2 -> { // Launch the download downloadAndLaunchThread = new Thread() { @Override public void run() { try { // Attach the on app // exit handler if // required if (launchLauncherAfterAppExitCheckbox.isSelected()) { Platform.setImplicitExit(false); currentlySelectedApp .addEventHandlerWhenLaunchedAppExits(showLauncherAgain); } else { Platform.setImplicitExit(true); currentlySelectedApp.removeEventHandlerWhenLaunchedAppExits( showLauncherAgain); } currentlySelectedApp.downloadIfNecessaryAndLaunch( currentMainWindowInstance, menuItem.getVersion(), workOfflineCheckbox.isSelected()); } catch (IOException | JDOMException e) { currentMainWindowInstance.showErrorMessage("An error occurred: \n" + ExceptionUtils.getStackTrace(e)); log.getLogger().log(Level.SEVERE, "An error occurred", e); } } }; downloadAndLaunchThread.setName("downloadAndLaunchThread"); downloadAndLaunchThread.start(); }); Platform.runLater(new Runnable() { @Override public void run() { launchSpecificVersionItem.getItems().add(menuItem); } }); } Platform.runLater(new Runnable() { @Override public void run() { if (!launchSpecificVersionMenuCanceled) { launchSpecificVersionItem.hide(); launchSpecificVersionItem.show(); } } }); } }; if (!cell.getItem().isSpecificVersionListLoaded()) { buildContextMenuThread.setName("buildContextMenuThread"); buildContextMenuThread.start(); cell.getItem().setSpecificVersionListLoaded(true); } }); Menu deleteItem = new Menu(); deleteItem.textProperty() .bind(Bindings.format(bundle.getString("deleteVersion"), cell.itemProperty())); MenuItem dummyVersion2 = new MenuItem(); dummyVersion2.setText(bundle.getString("waitForVersionList")); deleteItem.getItems().add(dummyVersion2); deleteItem.setOnShown(event -> { // App app = apps.get(cell.getIndex()); App app = cell.getItem(); if (!app.isDeletableVersionListLoaded()) { // Get deletable versions app.setDeletableVersionListLoaded(true); log.getLogger().info("Getting deletable versions..."); deleteItem.getItems().clear(); VersionList verList = new VersionList(); verList = app.getCurrentlyInstalledVersions(); Collections.sort(verList); for (Version ver : verList) { VersionMenuItem menuItem = new VersionMenuItem(); menuItem.setVersion(ver); menuItem.setText(ver.toString(false)); menuItem.setOnAction(event2 -> { // Delete the file try { currentlySelectedApp.delete(menuItem.getVersion()); } finally { updateLaunchButton(); } // Update the list the next time the // user opens it as it has changed app.setDeletableVersionListLoaded(false); }); Platform.runLater(new Runnable() { @Override public void run() { deleteItem.getItems().add(menuItem); } }); } Platform.runLater(new Runnable() { @Override public void run() { deleteItem.hide(); deleteItem.show(); } }); } }); MenuItem exportInfoItem = new MenuItem(); exportInfoItem.setText(bundle.getString("exportInfo")); exportInfoItem.setOnAction(event2 -> { FileChooser fileChooser = new FileChooser(); fileChooser.getExtensionFilters() .addAll(new FileChooser.ExtensionFilter("FOK-Launcher-File", "*.foklauncher")); fileChooser.setTitle("Save Image"); File file = fileChooser.showSaveDialog(stage); if (file != null) { log.getLogger().info("Exporting info..."); // App app = apps.get(cell.getIndex()); App app = cell.getItem(); try { log.getLogger().info("Exporting app info of app " + app.getName() + " to file: " + file.getAbsolutePath()); app.exportInfo(file); } catch (IOException e) { log.getLogger().log(Level.SEVERE, "An error occurred", e); currentMainWindowInstance.showErrorMessage(e.toString()); } } }); contextMenu.getItems().addAll(launchSpecificVersionItem, deleteItem, exportInfoItem); MenuItem removeImportedApp = new MenuItem(); contextMenu.setOnShowing(event5 -> { App app = cell.getItem(); if (app.isImported()) { removeImportedApp.setText("Remove this app from this list"); removeImportedApp.setOnAction(event3 -> { try { app.removeFromImportedAppList(); currentMainWindowInstance.loadAppList(); } catch (IOException e) { log.getLogger().log(Level.SEVERE, "An error occurred", e); currentMainWindowInstance.showErrorMessage(e.toString()); } }); contextMenu.getItems().add(removeImportedApp); } }); contextMenu.setOnHidden(event5 -> { // Remove the removeImportedApp-Item again if it exists if (contextMenu.getItems().contains(removeImportedApp)) { contextMenu.getItems().remove(removeImportedApp); } }); cell.emptyProperty().addListener((obs, wasEmpty, isNowEmpty) -> { if (isNowEmpty) { cell.setContextMenu(null); } else { cell.setContextMenu(contextMenu); } }); return cell; }); Platform.runLater(new Runnable() { @Override public void run() { appList.setItems(filteredData); appList.setPlaceholder(new Label(bundle.getString("emptyAppList"))); } }); } catch (JDOMException | IOException e) { log.getLogger().log(Level.SEVERE, "An error occurred", e); currentMainWindowInstance .showErrorMessage("An error occurred: \n" + e.getClass().getName() + "\n" + e.getMessage()); } } }; /** * The thread that gets the app list */ private Thread getAppListThread; /** * This reference always refers to the currently used instance of the * MainWidow. The purpose of this field that {@code this} can be accessed in * a convenient way in static methods. */ private static MainWindow currentMainWindowInstance; private static App currentlySelectedApp = null; @FXML // ResourceBundle that was given to the FXMLLoader private ResourceBundle resources; @FXML // URL location of the FXML file that was given to the FXMLLoader private URL location; @FXML // fx:id="appList" private ListView<App> appList; // Value injected by FXMLLoader @FXML // fx:id="searchField" private TextField searchField; // Value injected by FXMLLoader @FXML // fx:id="enableSnapshotsCheckbox" private CheckBox enableSnapshotsCheckbox; // Value injected by FXMLLoader @FXML // fx:id="launchButton" private ProgressButton launchButton; // Value injected by FXMLLoader @FXML // fx:id="launchLauncherAfterAppExitCheckbox" private CheckBox launchLauncherAfterAppExitCheckbox; // Value injected by // FXMLLoader @FXML // fx:id="languageSelector" private ComboBox<GuiLanguage> languageSelector; // Value injected by // FXMLLoader @FXML // fx:id="progressBar" private ProgressBar progressBar; // Value injected by FXMLLoader @FXML // fx:id="workOfflineCheckbox" private CheckBox workOfflineCheckbox; // Value injected by FXMLLoader @FXML /** * fx:id="updateLink" */ private Hyperlink updateLink; // Value injected by FXMLLoader @FXML /** * fx:id="versionLabel" */ private Label versionLabel; // Value injected by FXMLLoader @FXML // fx:id="settingsGridView" private GridPane settingsGridView; // Value injected by FXMLLoader @FXML // fx:id="appInfoButton" private Button appInfoButton; // Value injected by FXMLLoader // Handler for ListView[fx:id="appList"] onMouseClicked @FXML void appListOnMouseClicked(MouseEvent event) { // Currently not used } // Handler for AnchorPane[id="AnchorPane"] onDragDetected @FXML void appListOnDragDetected(MouseEvent event) { if (currentlySelectedApp != null) { File tempFile = new File( Common.getAndCreateAppDataPath() + currentlySelectedApp.getMavenArtifactID() + ".foklauncher"); try { currentlySelectedApp.exportInfo(tempFile); Dragboard db = appList.startDragAndDrop(TransferMode.MOVE); ClipboardContent content = new ClipboardContent(); content.putFiles(Arrays.asList(tempFile)); db.setContent(content); } catch (IOException e) { log.getLogger().log(Level.SEVERE, "An error occurred", e); } } } // Handler for ListView[fx:id="appList"] onDragOver @FXML void mainFrameOnDragOver(DragEvent event) { Dragboard db = event.getDragboard(); // Only allow drag'n'drop for files and if no app list is currently // loading if (db.hasFiles() && !getAppListThread.isAlive()) { // Don't accept the drag if any file contained in the drag does not // have the *.foklauncher extension for (File f : db.getFiles()) { if (!FilenameUtils.getExtension(f.getAbsolutePath()).equals("foklauncher")) { event.consume(); return; } } event.acceptTransferModes(TransferMode.LINK); } else { event.consume(); } } @FXML void mainFrameOnDragDropped(DragEvent event) { List<File> files = event.getDragboard().getFiles(); for (File f : files) { log.getLogger().info("Importing app from " + f.getAbsolutePath() + "..."); try { App.addImportedApp(f); currentMainWindowInstance.loadAppList(); } catch (IOException e) { log.getLogger().log(Level.SEVERE, "An error occurred", e); currentMainWindowInstance.showErrorMessage(e.toString(), false); } } } private static Runnable showLauncherAgain = new Runnable() { @Override public void run() { // reset the ui try { currentMainWindowInstance.start(stage); } catch (Exception e) { log.getLogger().log(Level.INFO, "An error occurred while firing a handler for the LaunchedAppExited event, trying to run the handler using Platform.runLater...", e); } Platform.setImplicitExit(true); } }; @FXML /** * Handler for Hyperlink[fx:id="updateLink"] onAction * * @param event * The event object that contains information about the event. */ void updateLinkOnAction(ActionEvent event) { // Check for new version ignoring ignored updates Thread updateThread = new Thread() { @Override public void run() { UpdateInfo update = UpdateChecker.isUpdateAvailableCompareAppVersion(AppConfig.getUpdateRepoBaseURL(), AppConfig.groupID, AppConfig.artifactID, AppConfig.getUpdateFileClassifier(), Common.getPackaging()); Platform.runLater(new Runnable() { @Override public void run() { new UpdateAvailableDialog(update); } }); } }; updateThread.setName("manualUpdateThread"); updateThread.start(); } @FXML void languageSelectorOnAction(ActionEvent event) { log.getLogger().info("Switching gui language to: " + languageSelector.getItems().get(languageSelector.getSelectionModel().getSelectedIndex())); prefs.setPreference(guiLanguagePrefKey, languageSelector.getItems() .get(languageSelector.getSelectionModel().getSelectedIndex()).getLocale().getLanguage()); // Restart gui boolean implicitExit = Platform.isImplicitExit(); Platform.setImplicitExit(false); stage.hide(); try { currentMainWindowInstance.start(stage); } catch (Exception e) { log.getLogger().log(Level.INFO, "An error occurred while setting a new gui language", e); } Platform.setImplicitExit(implicitExit); } // Handler for Button[fx:id="launchButton"] onAction @FXML void launchButtonOnAction(ActionEvent event) { MainWindow gui = this; if (!downloadAndLaunchThread.isAlive()) { // Launch the download downloadAndLaunchThread = new Thread() { @Override public void run() { try { // Attach the on app exit handler if required if (launchLauncherAfterAppExitCheckbox.isSelected()) { Platform.setImplicitExit(false); currentlySelectedApp.addEventHandlerWhenLaunchedAppExits(showLauncherAgain); } else { Platform.setImplicitExit(true); currentlySelectedApp.removeEventHandlerWhenLaunchedAppExits(showLauncherAgain); } currentlySelectedApp.downloadIfNecessaryAndLaunch(enableSnapshotsCheckbox.isSelected(), gui, workOfflineCheckbox.isSelected()); } catch (IOException | JDOMException e) { gui.showErrorMessage("An error occurred: \n" + e.getClass().getName() + "\n" + e.getMessage()); log.getLogger().log(Level.SEVERE, "An error occurred", e); } } }; downloadAndLaunchThread.setName("downloadAndLaunchThread"); downloadAndLaunchThread.start(); } else { currentlySelectedApp.cancelDownloadAndLaunch(gui); } } // Handler for CheckBox[fx:id="workOfflineCheckbox"] onAction @FXML void workOfflineCheckboxOnAction(ActionEvent event) { updateLaunchButton(); } // Handler for CheckBox[fx:id="launchLauncherAfterAppExitCheckbox"] onAction @FXML void launchLauncherAfterAppExitCheckboxOnAction(ActionEvent event) { prefs.setPreference(showLauncherAgainPrefKey, Boolean.toString(launchLauncherAfterAppExitCheckbox.isSelected())); } @FXML void appInfoButtonOnAction(ActionEvent event) { try { Desktop.getDesktop().browse(new URI(currentlySelectedApp.getAdditionalInfoURL().toString())); } catch (IOException | URISyntaxException e) { log.getLogger().log(Level.SEVERE, "An error occurred", e); } } @Override public void start(Stage primaryStage) throws Exception { // get the right resource bundle String guiLanguageCode = prefs.getPreference(guiLanguagePrefKey, ""); if (guiLanguageCode.equals("")) { if (systemDefaultLocale != null) { Locale.setDefault(systemDefaultLocale); } } else { // Get the specified bundle if (systemDefaultLocale == null) { systemDefaultLocale = Locale.getDefault(); } log.getLogger().info("Setting language: " + guiLanguageCode); Locale.setDefault(new Locale(guiLanguageCode)); } bundle = ResourceBundle.getBundle("view.MainWindow"); // appConfig = new Config(); stage = primaryStage; try { Thread updateThread = new Thread() { @Override public void run() { UpdateInfo update = UpdateChecker.isUpdateAvailable(AppConfig.getUpdateRepoBaseURL(), AppConfig.groupID, AppConfig.artifactID, AppConfig.getUpdateFileClassifier(), Common.getPackaging()); if (update.showAlert) { Platform.runLater(new Runnable() { @Override public void run() { new UpdateAvailableDialog(update); } }); } } }; updateThread.setName("updateThread"); updateThread.start(); Parent root = FXMLLoader.load(getClass().getResource("MainWindow.fxml"), bundle); Scene scene = new Scene(root); scene.getStylesheets().add(getClass().getResource("MainWindow.css").toExternalForm()); primaryStage.setTitle(bundle.getString("windowTitle")); primaryStage.setMinWidth(scene.getRoot().minWidth(0) + 70); primaryStage.setMinHeight(scene.getRoot().minHeight(0) + 70); primaryStage.setScene(scene); // Set Icon primaryStage.getIcons().add(new Image(MainWindow.class.getResourceAsStream("icon.png"))); primaryStage.show(); } catch (Exception e) { log.getLogger().log(Level.SEVERE, "An error occurred", e); } } @Override public void stop() { try { UpdateChecker.cancelUpdateCompletion(); if (currentlySelectedApp != null) { currentlySelectedApp.cancelDownloadAndLaunch(this); } } catch (Exception e) { log.getLogger().log(Level.SEVERE, "An error occurred but is not relevant as we are currently in the shutdown process. Possible reasons for this exception are: You tried to modify a view but it is not shown any more on the screen; You tried to cancel the app download but no download was in progress.", e); } } @FXML // This method is called by the FXMLLoader when initialization is // complete void initialize() { assert launchButton != null : "fx:id=\"launchButton\" was not injected: check your FXML file 'MainWindow.fxml'."; assert launchLauncherAfterAppExitCheckbox != null : "fx:id=\"launchLauncherAfterAppExitCheckbox\" was not injected: check your FXML file 'MainWindow.fxml'."; assert languageSelector != null : "fx:id=\"languageSelector\" was not injected: check your FXML file 'MainWindow.fxml'."; assert versionLabel != null : "fx:id=\"versionLabel\" was not injected: check your FXML file 'MainWindow.fxml'."; assert searchField != null : "fx:id=\"searchField\" was not injected: check your FXML file 'MainWindow.fxml'."; assert appList != null : "fx:id=\"appList\" was not injected: check your FXML file 'MainWindow.fxml'."; assert appInfoButton != null : "fx:id=\"appInfoButton\" was not injected: check your FXML file 'MainWindow.fxml'."; assert progressBar != null : "fx:id=\"progressBar\" was not injected: check your FXML file 'MainWindow.fxml'."; assert enableSnapshotsCheckbox != null : "fx:id=\"enableSnapshotsCheckbox\" was not injected: check your FXML file 'MainWindow.fxml'."; assert workOfflineCheckbox != null : "fx:id=\"workOfflineCheckbox\" was not injected: check your FXML file 'MainWindow.fxml'."; assert updateLink != null : "fx:id=\"updateLink\" was not injected: check your FXML file 'MainWindow.fxml'."; assert settingsGridView != null : "fx:id=\"settingsGridView\" was not injected: check your FXML file 'MainWindow.fxml'."; // Initialize your logic here: all @FXML variables will have been // injected // Show messages of the day Thread motdThread = new Thread() { @Override public void run() { MOTD motd; try { motd = MOTD.getLatestMOTD(AppConfig.getMotdFeedUrl()); if (!motd.isMarkedAsRead()) { Platform.runLater(new Runnable() { @Override public void run() { new MOTDDialog(motd, motd.getEntry().getTitle()); } }); } } catch (IllegalArgumentException | FeedException | IOException | ClassNotFoundException e) { log.getLogger().log(Level.SEVERE, "An error occurred", e); } } }; motdThread.setName("motdThread"); motdThread.start(); currentMainWindowInstance = this; enableSnapshotsCheckbox.setSelected(Boolean.parseBoolean(prefs.getPreference(enableSnapshotsPrefKey, "false"))); launchLauncherAfterAppExitCheckbox .setSelected(Boolean.parseBoolean(prefs.getPreference(showLauncherAgainPrefKey, "false"))); try { versionLabel.setText(new Version(Common.getAppVersion(), Common.getBuildNumber()).toString(false)); } catch (IllegalArgumentException e) { versionLabel.setText(Common.UNKNOWN_APP_VERSION); } progressBar.setVisible(false); // Disable multiselect appList.getSelectionModel().setSelectionMode(SelectionMode.SINGLE); loadAvailableGuiLanguages(); // Selection change listener appList.getSelectionModel().selectedItemProperty().addListener(new ChangeListener<App>() { public void changed(ObservableValue<? extends App> observable, App oldValue, App newValue) { try { currentlySelectedApp = appList.getSelectionModel().getSelectedItem(); } catch (ArrayIndexOutOfBoundsException e) { currentlySelectedApp = null; } updateLaunchButton(); } }); if (!Internet.isConnected()) { workOfflineCheckbox.setSelected(true); workOfflineCheckbox.setDisable(true); } loadAppList(); } private void loadAvailableGuiLanguages() { List<Locale> supportedGuiLocales = Common.getLanguagesSupportedByResourceBundle(bundle); List<GuiLanguage> convertedList = new ArrayList<GuiLanguage>(supportedGuiLocales.size()); for (Locale lang : supportedGuiLocales) { convertedList.add(new GuiLanguage(lang, bundle.getString("langaugeSelector.chooseAutomatically"))); } ObservableList<GuiLanguage> items = FXCollections.observableArrayList(convertedList); languageSelector.setItems(items); if (Locale.getDefault() != systemDefaultLocale) { GuiLanguage langToSelect = null; for (GuiLanguage lang : convertedList) { if (Locale.getDefault().equals(lang.getLocale())) { langToSelect = lang; } } if (langToSelect != null) { languageSelector.getSelectionModel().select(langToSelect); } } } /** * Loads the app list using the {@link App#getAppList()}-method */ private void loadAppList() { if (getAppListThread != null) { // If thread is not null and running, quit if (getAppListThread.isAlive()) { return; } } // Thread is either null or not running anymore getAppListThread = new Thread(getAppListRunnable); getAppListThread.setName("getAppListThread"); getAppListThread.start(); } private void updateLaunchButton() { apps.reloadContextMenuEntriesOnShow(); Thread getAppStatus = new Thread() { @Override public void run() { App checkedApp = currentlySelectedApp; boolean progressVisibleBefore = progressBar.isVisible(); Platform.runLater(new Runnable() { @Override public void run() { launchButton.setDisable(true); launchButton.setDefaultButton(false); launchButton.setStyle("-fx-background-color: transparent;"); launchButton.setControlText(""); progressBar.setPrefHeight(launchButton.getHeight()); progressBar.setVisible(true); progressBar.setProgress(-1); launchButton.setProgressText(bundle.getString("progress.checkingVersionInfo")); appInfoButton.setDisable(true); } }); try { if (!workOfflineCheckbox.isSelected()) { // downloads are enabled // enable the additional info button if applicable Platform.runLater(new Runnable() { @Override public void run() { appInfoButton.setDisable(checkedApp.getAdditionalInfoURL() == null); } }); if (checkedApp.downloadRequired(enableSnapshotsCheckbox.isSelected())) { // download required setLaunchButtonText(checkedApp, false, bundle.getString("okButton.downloadAndLaunch")); } else if (checkedApp.updateAvailable(enableSnapshotsCheckbox.isSelected())) { // Update available setLaunchButtonText(checkedApp, false, bundle.getString("okButton.updateAndLaunch")); } else { // Can launch immediately setLaunchButtonText(checkedApp, false, bundle.getString("okButton.launch")); } } else { // downloads disabled if (checkedApp.downloadRequired(enableSnapshotsCheckbox.isSelected())) { // download required but disabled setLaunchButtonText(checkedApp, true, bundle.getString("okButton.downloadAndLaunch")); } else { // Can launch immediately setLaunchButtonText(checkedApp, false, bundle.getString("okButton.launch")); } } } catch (JDOMException | IOException e) { log.getLogger().log(Level.SEVERE, "An error occurred", e); // Switch to offline mode workOfflineCheckbox.setSelected(true); workOfflineCheckbox.setDisable(true); // update launch button accordingly updateLaunchButton(); // Show error message currentMainWindowInstance.showErrorMessage(bundle.getString("updateLaunchButtonException") + "\n\n" + ExceptionUtils.getStackTrace(e), false); } // Do finishing touches to gui only if checkedApp still equals // currentlySelectedApp (make sure the user did not change the // selection in the meanwhile) if (checkedApp == currentlySelectedApp) { Platform.runLater(new Runnable() { @Override public void run() { launchButton.setProgressText(""); progressBar.setVisible(progressVisibleBefore); } }); } } }; // Only update the button caption if no download is running and an app // is selected if (!downloadAndLaunchThread.isAlive() && currentlySelectedApp != null) { getAppStatus.setName("getAppStatus"); getAppStatus.start(); } else if (currentlySelectedApp == null) { // disable the button launchButton.setDisable(true); } } private void setLaunchButtonText(App checkedApp, boolean isDisabled, String text) { // Only update the button if the user did not change his selection if (checkedApp == currentlySelectedApp) { Platform.runLater(new Runnable() { @Override public void run() { launchButton.setDisable(isDisabled); launchButton.setDefaultButton(!isDisabled); launchButton.setStyle(""); launchButton.setControlText(text); } }); } } @Override public void hide() { Platform.runLater(new Runnable() { @Override public void run() { stage.hide(); } }); } // Handler for CheckBox[fx:id="enableSnapshotsCheckbox"] onAction @FXML void enableSnapshotsCheckboxOnAction(ActionEvent event) { updateLaunchButton(); prefs.setPreference(enableSnapshotsPrefKey, Boolean.toString(enableSnapshotsCheckbox.isSelected())); } @Override public void preparePhaseStarted() { Platform.runLater(new Runnable() { @Override public void run() { appList.setDisable(true); launchButton.setDisable(false); launchButton.setDefaultButton(false); progressBar.setPrefHeight(launchButton.getHeight()); launchButton.setStyle("-fx-background-color: transparent;"); launchButton.setControlText(bundle.getString("okButton.cancelLaunch")); progressBar.setVisible(true); progressBar.setProgress(0 / 4.0); launchButton.setProgressText(bundle.getString("progress.preparing")); settingsGridView.setDisable(true); } }); } @Override public void downloadStarted() { Platform.runLater(new Runnable() { @Override public void run() { progressBar.setProgress(-1); launchButton.setProgressText(bundle.getString("progress.downloading")); } }); } @Override public void installStarted() { Platform.runLater(new Runnable() { @Override public void run() { progressBar.setProgress(1.0 / 2.0); launchButton.setProgressText(bundle.getString("progress.installing")); } }); } @Override public void launchStarted() { Platform.runLater(new Runnable() { @Override public void run() { progressBar.setProgress(2.0 / 2.0); launchButton.setProgressText(bundle.getString("progress.launching")); } }); } public void showErrorMessage(String message) { showErrorMessage(message, false); } public void showErrorMessage(String message, boolean closeWhenDialogIsClosed) { Platform.runLater(new Runnable() { @Override public void run() { Alert alert = new Alert(Alert.AlertType.ERROR, message + "\n\n" + "The app needs to close now."); alert.show(); Thread t = new Thread() { @Override public void run() { while (alert.isShowing()) { // wait for dialog to be closed } System.err.println("Closing app after exception, good bye..."); Platform.exit(); } }; t.setName("showErrorThread"); t.start(); } }); } @Override public void operationCanceled() { log.getLogger().info("Operation cancelled."); Platform.setImplicitExit(true); appList.setDisable(false); progressBar.setVisible(false); Platform.runLater(new Runnable() { @Override public void run() { launchButton.setProgressText(""); settingsGridView.setDisable(false); updateLaunchButton(); } }); } @Override public void cancelRequested() { if (progressBar != null) { progressBar.setProgress(-1); launchButton.setProgressText(bundle.getString("cancelRequested")); launchButton.setDisable(true); log.getLogger().info("Requested to cancel the current operation, Cancel in progress..."); } } @Override public void downloadProgressChanged(double kilobytesDownloaded, double totalFileSizeInKB) { Platform.runLater(new Runnable() { @Override public void run() { progressBar.setProgress(kilobytesDownloaded / totalFileSizeInKB); String downloadedString; if (kilobytesDownloaded < 1024) { downloadedString = Double.toString(Math.round(kilobytesDownloaded * 100.0) / 100.0) + " " + bundle.getString("kilobyte"); } else if ((kilobytesDownloaded / 1024) < 1024) { downloadedString = Double.toString(Math.round((kilobytesDownloaded * 100.0) / 1024) / 100.0) + " " + bundle.getString("megabyte"); } else if (((kilobytesDownloaded / 1024) / 1024) < 1024) { downloadedString = Double .toString(Math.round(((kilobytesDownloaded * 100.0) / 1024) / 1024) / 100.0) + " " + bundle.getString("gigabyte"); } else { downloadedString = Double .toString(Math.round((((kilobytesDownloaded * 100.0) / 1024) / 1024) / 1024) / 100.0) + " " + bundle.getString("terabyte"); } String totalString; if (totalFileSizeInKB < 1024) { totalString = Double.toString(Math.round(totalFileSizeInKB * 100.0) / 100.0) + " " + bundle.getString("kilobyte"); } else if ((totalFileSizeInKB / 1024) < 1024) { totalString = Double.toString(Math.round((totalFileSizeInKB * 100.0) / 1024) / 100.0) + " " + bundle.getString("megabyte"); } else if (((totalFileSizeInKB / 1024) / 1024) < 1024) { totalString = Double.toString(Math.round(((totalFileSizeInKB * 100.0) / 1024) / 1024) / 100.0) + " " + bundle.getString("gigabyte"); } else { totalString = Double .toString(Math.round((((totalFileSizeInKB * 100.0) / 1024) / 1024) / 1024) / 100.0) + " " + bundle.getString("terabyte"); } launchButton.setProgressText( bundle.getString("progress.downloading") + "(" + downloadedString + "/" + totalString + ")"); } }); } }
Fixed #23
src/main/java/view/MainWindow.java
Fixed #23
Java
apache-2.0
1932e369273fc613171a5330155049250622aec0
0
jitsi/libjitsi,jitsi/libjitsi,jitsi/libjitsi,jitsi/libjitsi
/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jitsi.util; import java.lang.reflect.*; import java.util.*; /** * * @author Lyubomir Marinov */ public final class ArrayUtils { /** * Adds a specific element to a specific array with a specific component * type if the array does not contain the element yet. * * @param array the array to add <tt>element</tt> to * @param componentType the component type of <tt>array</tt> * @param element the element to add to <tt>array</tt> * @return an array with the specified <tt>componentType</tt> and * containing <tt>element</tt>. If <tt>array</tt> contained <tt>element</tt> * already, returns <tt>array</tt>. */ @SuppressWarnings("unchecked") public static <T> T[] add(T[] array, Class<T> componentType, T element) { if (element == null) throw new NullPointerException("element"); if (array == null) { array = (T[]) Array.newInstance(componentType, 1); } else { for (T a : array) { if (element.equals(a)) return array; } T[] newArray = (T[]) Array.newInstance(componentType, array.length + 1); System.arraycopy(array, 0, newArray, 0, array.length); array = newArray; } array[array.length - 1] = element; return array; } /** Prevents the initialization of new {@code ArrayUtils} instances. */ private ArrayUtils() { } /** * Concatenates two arrays. * * @param first * @param second * @param <T> * @return */ public static <T> T[] concat(T[] first, T[] second) { if (first == null || first.length == 0) { return second; } else if (second == null || second.length == 0) { return first; } else { T[] result = Arrays.copyOf(first, first.length + second.length); System.arraycopy(second, 0, result, first.length, second.length); return result; } } }
src/org/jitsi/util/ArrayUtils.java
/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jitsi.util; import java.lang.reflect.*; /** * * @author Lyubomir Marinov */ public final class ArrayUtils { /** * Adds a specific element to a specific array with a specific component * type if the array does not contain the element yet. * * @param array the array to add <tt>element</tt> to * @param componentType the component type of <tt>array</tt> * @param element the element to add to <tt>array</tt> * @return an array with the specified <tt>componentType</tt> and * containing <tt>element</tt>. If <tt>array</tt> contained <tt>element</tt> * already, returns <tt>array</tt>. */ @SuppressWarnings("unchecked") public static <T> T[] add(T[] array, Class<T> componentType, T element) { if (element == null) throw new NullPointerException("element"); if (array == null) { array = (T[]) Array.newInstance(componentType, 1); } else { for (T a : array) { if (element.equals(a)) return array; } T[] newArray = (T[]) Array.newInstance(componentType, array.length + 1); System.arraycopy(array, 0, newArray, 0, array.length); array = newArray; } array[array.length - 1] = element; return array; } /** Prevents the initialization of new {@code ArrayUtils} instances. */ private ArrayUtils() { } }
feat: Adds a method for array concatenation.
src/org/jitsi/util/ArrayUtils.java
feat: Adds a method for array concatenation.
Java
apache-2.0
031c1a413bb21bbf93c0b8499b46f37de02c9f1c
0
dimagi/commcare-android,dimagi/commcare-android,dimagi/commcare-android,dimagi/commcare-android,dimagi/commcare-android,dimagi/commcare-android
package org.commcare.android.adapters; import android.content.Context; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.BaseAdapter; import org.commcare.android.view.SquareButtonWithNotification; import org.commcare.dalvik.R; import java.util.HashMap; import java.util.LinkedList; /** * Sets up home screen buttons and gives accessors for setting their visibility and listeners * Created by dancluna on 3/19/15. */ public class HomeScreenAdapter extends BaseAdapter { public static final String TAG = "HomeScrnAdpt"; //region Buttons static final int[] buttonsResources = new int[]{ R.layout.home_start_button, R.layout.home_savedforms_button, R.layout.home_incompleteforms_button, R.layout.home_sync_button, R.layout.home_disconnect_button, }; static final HashMap<Integer, Integer> buttonsIDsToResources = new HashMap<Integer, Integer>() {{ put(R.id.home_start_sqbn,R.layout.home_start_button); put(R.id.home_savedforms_sqbn,R.layout.home_savedforms_button); put(R.id.home_sync_sqbn,R.layout.home_sync_button); put(R.id.home_disconnect_sqbn,R.layout.home_disconnect_button); put(R.id.home_incompleteforms_sqbn,R.layout.home_incompleteforms_button); }}; //endregion //region Private variables final View.OnClickListener[] buttonListeners = new View.OnClickListener[buttonsResources.length]; final SquareButtonWithNotification[] buttons = new SquareButtonWithNotification[buttonsResources.length]; private Context context; private boolean[] hiddenButtons = new boolean[buttonsResources.length]; private boolean isInitialized = false; private LinkedList<SquareButtonWithNotification> visibleButtons; //endregion //region Constructors public HomeScreenAdapter(Context c) { this.context = c; visibleButtons = new LinkedList<SquareButtonWithNotification>(); for (int i = 0; i < buttons.length; i++) { if (buttons[i] != null) { continue; } SquareButtonWithNotification button = (SquareButtonWithNotification)LayoutInflater.from(context) .inflate(buttonsResources[i], null, false); buttons[i] = button; Log.i(TAG, "Added button " + button + "to position " + i); View.OnClickListener listener = buttonListeners[i]; // creating now, but set a clickListener before, so we'll add it to this button... if (listener != null) { button.setOnClickListener(listener); Log.i(TAG, "Added onClickListener " + listener + " to button in position " + i); } if (!hiddenButtons[i]) { visibleButtons.add(button); } } isInitialized = true; } //endregion //region Public API /** * Sets the onClickListener for the given button * @param resourceCode Android resource code (R.id.$button or R.layout.$button) * @param lookupID If set, will search for the button with the given R.id * @param listener OnClickListener for the button */ public void setOnClickListenerForButton(int resourceCode, boolean lookupID, View.OnClickListener listener){ int buttonIndex = getButtonIndex(resourceCode, lookupID); buttonListeners[buttonIndex] = listener; SquareButtonWithNotification button = (SquareButtonWithNotification) getItem(buttonIndex); if(button != null){ button.setOnClickListener(listener); } } public SquareButtonWithNotification getButton(int resourceCode, boolean lookupID){ return buttons[getButtonIndex(resourceCode, lookupID)]; } public void setNotificationTextForButton(int resourceCode, boolean lookupID, String notificationText) { SquareButtonWithNotification button = getButton(resourceCode, lookupID); if (button != null) { button.setNotificationText(notificationText); notifyDataSetChanged(); } } @Override public int getCount() { return visibleButtons.size(); } @Override public Object getItem(int position) { return buttons[position]; } @Override public long getItemId(int position) { return buttonsResources[position]; } @Override public View getView(int position, View convertView, ViewGroup parent) { if (position < 0 || position >= getCount()) { return null; } if(convertView != null) { return convertView; } else { SquareButtonWithNotification btn = visibleButtons.get(position); if(btn == null) { Log.i(TAG,"Unexpected null button"); } return btn; } } /** * Sets visibility for the button with the given resource code * @param resourceCode Android resource code (R.id.$button or R.layout.$button) * @param lookupID If set, will search for the button with the given R.id * @param isButtonHidden Button visibility state (true for hidden, false for visible) */ public void setButtonVisibility(int resourceCode, boolean lookupID, boolean isButtonHidden){ int index = getButtonIndex(resourceCode, lookupID); boolean toggled = isButtonHidden ^ hiddenButtons[index]; // checking if the button visibility was changed in this call hiddenButtons[index] = isButtonHidden; if (!toggled) { return; } // if the visibility was not changed, we don't need to do anything if (isButtonHidden) { // if the visibility was changed, we add/remove the button from the visible buttons' list visibleButtons.remove(buttons[index]); } else { visibleButtons.add(index, buttons[index]); } } //endregion //region Private methods /** * Returns the index of the button with the given resource code. If lookupID is set, will search for the button with the given R.id; if not, will search for the button with the given R.layout code. * @param resourceCode * @param lookupID * @return * @throws java.lang.IllegalArgumentException If the given resourceCode is not found */ private int getButtonIndex(int resourceCode, boolean lookupID){ int code = resourceCode; // if lookupID is set, we are mapping from an int in R.id to one in R.layout if(lookupID){ Integer layoutCode = buttonsIDsToResources.get(resourceCode); if(layoutCode == null) throw new IllegalArgumentException("ID code not found: " + resourceCode); code = layoutCode; } Integer buttonIndex = null; for (int i = 0; i < buttonsResources.length; i++) { if(code == buttonsResources[i]){ buttonIndex = i; break; } } if (buttonIndex == null) { throw new IllegalArgumentException("Layout code not found: " + code); } return buttonIndex; } //endregion }
app/src/org/commcare/android/adapters/HomeScreenAdapter.java
package org.commcare.android.adapters; import android.content.Context; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.BaseAdapter; import org.commcare.android.view.SquareButtonWithNotification; import org.commcare.dalvik.R; import java.util.HashMap; import java.util.LinkedList; /** * Sets up home screen buttons and gives accessors for setting their visibility and listeners * Created by dancluna on 3/19/15. */ public class HomeScreenAdapter extends BaseAdapter { //region Buttons static final int[] buttonsResources = new int[]{ R.layout.home_start_button, R.layout.home_savedforms_button, R.layout.home_incompleteforms_button, R.layout.home_sync_button, R.layout.home_disconnect_button, }; static final HashMap<Integer, Integer> buttonsIDsToResources = new HashMap<Integer, Integer>() {{ put(R.id.home_start_sqbn,R.layout.home_start_button); put(R.id.home_savedforms_sqbn,R.layout.home_savedforms_button); put(R.id.home_sync_sqbn,R.layout.home_sync_button); put(R.id.home_disconnect_sqbn,R.layout.home_disconnect_button); put(R.id.home_incompleteforms_sqbn,R.layout.home_incompleteforms_button); }}; //endregion //region Private variables final View.OnClickListener[] buttonListeners = new View.OnClickListener[buttonsResources.length]; final SquareButtonWithNotification[] buttons = new SquareButtonWithNotification[buttonsResources.length]; private Context context; private boolean[] hiddenButtons = new boolean[buttonsResources.length]; private boolean isInitialized = false; private LinkedList<SquareButtonWithNotification> visibleButtons; //endregion //region Constructors public HomeScreenAdapter(Context c) { this.context = c; } //endregion //region Public API /** * Sets the onClickListener for the given button * @param resourceCode Android resource code (R.id.$button or R.layout.$button) * @param lookupID If set, will search for the button with the given R.id * @param listener OnClickListener for the button */ public void setOnClickListenerForButton(int resourceCode, boolean lookupID, View.OnClickListener listener){ int buttonIndex = getButtonIndex(resourceCode, lookupID); buttonListeners[buttonIndex] = listener; SquareButtonWithNotification button = (SquareButtonWithNotification) getItem(buttonIndex); if(button != null){ button.setOnClickListener(listener); } } public SquareButtonWithNotification getButton(int resourceCode, boolean lookupID){ return buttons[getButtonIndex(resourceCode, lookupID)]; } public void setNotificationTextForButton(int resourceCode, boolean lookupID, String notificationText) { SquareButtonWithNotification button = getButton(resourceCode, lookupID); if (button != null) { button.setNotificationText(notificationText); notifyDataSetChanged(); } } @Override public int getCount() { // return buttonsResources.length; return visibleButtons == null ? buttonsResources.length : visibleButtons.size(); } @Override public Object getItem(int position) { return buttons[position]; } @Override public long getItemId(int position) { return buttonsResources[position]; } @Override public View getView(int position, View convertView, ViewGroup parent) { if(!isInitialized){ visibleButtons = new LinkedList<SquareButtonWithNotification>(); Log.i("HomeScrnAdpt","Creating all buttons because got a null in position " + position); for (int i = 0; i < buttons.length; i++) { if (buttons[i] != null) continue; SquareButtonWithNotification button = (SquareButtonWithNotification) LayoutInflater.from(context) .inflate(buttonsResources[i], parent, false); buttons[i] = button; Log.i("HomeScrnAdpt","Added button " + button + "to position " + i); View.OnClickListener listener = buttonListeners[i]; // creating now, but set a clickListener before, so we'll add it to this button... if(listener != null) { button.setOnClickListener(listener); Log.i("HomeScrnAdpt","Added onClickListener " + listener + " to button in position " + i); } if(!hiddenButtons[i]) visibleButtons.add(button); } isInitialized = true; } if(position < 0 || position >= getCount()) return null; if(convertView != null) { return convertView; } else { SquareButtonWithNotification btn = visibleButtons.get(position); if(btn == null) { Log.i("HomeScrnAdpt","Unexpected null button"); } return btn; } } /** * Sets visibility for the button with the given resource code * @param resourceCode Android resource code (R.id.$button or R.layout.$button) * @param lookupID If set, will search for the button with the given R.id * @param isButtonHidden Button visibility state (true for hidden, false for visible) */ public void setButtonVisibility(int resourceCode, boolean lookupID, boolean isButtonHidden){ int index = getButtonIndex(resourceCode, lookupID); boolean toggled = isButtonHidden ^ hiddenButtons[index]; // checking if the button visibility was changed in this call hiddenButtons[index] = isButtonHidden; if (visibleButtons != null) { if(!toggled) return; // if the visibility was not changed, we don't need to do anything if(isButtonHidden) { // if the visibility was changed, we add/remove the button from the visible buttons' list visibleButtons.remove(buttons[index]); } else { visibleButtons.add(index, buttons[index]); } } } //endregion //region Private methods /** * Returns the index of the button with the given resource code. If lookupID is set, will search for the button with the given R.id; if not, will search for the button with the given R.layout code. * @param resourceCode * @param lookupID * @return * @throws java.lang.IllegalArgumentException If the given resourceCode is not found */ private int getButtonIndex(int resourceCode, boolean lookupID){ int code = resourceCode; // if lookupID is set, we are mapping from an int in R.id to one in R.layout if(lookupID){ Integer layoutCode = buttonsIDsToResources.get(resourceCode); if(layoutCode == null) throw new IllegalArgumentException("ID code not found: " + resourceCode); code = layoutCode; } Integer buttonIndex = null; for (int i = 0; i < buttonsResources.length; i++) { if(code == buttonsResources[i]){ buttonIndex = i; break; } } if(buttonIndex == null) throw new IllegalArgumentException("Layout code not found: " + code); return buttonIndex; } //endregion }
Code review: removing complex logic from HomeScreenAdapter
app/src/org/commcare/android/adapters/HomeScreenAdapter.java
Code review: removing complex logic from HomeScreenAdapter
Java
apache-2.0
075922b90f5b0dee9ac5c5f7060b5cc54078f14a
0
mintern/gson
/* * Copyright (C) 2008 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.gson; import com.google.gson.internal.LruCache; import com.google.gson.internal.Types; import com.google.gson.internal.UnsafeAllocator; import java.lang.reflect.Array; import java.lang.reflect.Constructor; import java.lang.reflect.Type; /** * This class contains a mapping of all the application specific * {@link InstanceCreator} instances. Registering an {@link InstanceCreator} * with this class will override the default object creation that is defined * by the ObjectConstructor that this class is wrapping. Using this class * with the JSON framework provides the application with "pluggable" modules * to customize framework to suit the application's needs. * * @author Joel Leitch */ final class MappedObjectConstructor implements ObjectConstructor { private static final UnsafeAllocator unsafeAllocator = UnsafeAllocator.create(); private static final LruCache<Class<?>, Constructor<?>> noArgsConstructorsCache = new LruCache<Class<?>, Constructor<?>>(500); private final ParameterizedTypeHandlerMap<InstanceCreator<?>> instanceCreatorMap; /** * We need a special null value to indicate that the class does not have a no-args constructor. * This helps avoid using reflection over and over again for such classes. For convenience, we * use the no-args constructor of this class itself since this class would never be * deserialized using Gson. */ private static final Constructor<MappedObjectConstructor> NULL_VALUE = getNoArgsConstructorUsingReflection(MappedObjectConstructor.class); @SuppressWarnings("unused") private MappedObjectConstructor() { this(null); } public MappedObjectConstructor( ParameterizedTypeHandlerMap<InstanceCreator<?>> instanceCreators) { instanceCreatorMap = instanceCreators; } @SuppressWarnings("unchecked") public <T> T construct(Type typeOfT) { InstanceCreator<T> creator = (InstanceCreator<T>) instanceCreatorMap.getHandlerFor(typeOfT); if (creator != null) { return creator.createInstance(typeOfT); } return (T) constructWithNoArgConstructor(typeOfT); } public Object constructArray(Type type, int length) { return Array.newInstance(Types.getRawType(type), length); } @SuppressWarnings({"unchecked", "cast"}) private <T> T constructWithNoArgConstructor(Type typeOfT) { try { Class<T> clazz = (Class<T>) Types.getRawType(typeOfT); Constructor<T> constructor = getNoArgsConstructor(clazz); return constructor == null ? unsafeAllocator.newInstance(clazz) : constructor.newInstance(); } catch (Exception e) { throw new RuntimeException(("Unable to invoke no-args constructor for " + typeOfT + ". " + "Register an InstanceCreator with Gson for this type may fix this problem."), e); } } private <T> Constructor<T> getNoArgsConstructor(Class<T> clazz) { @SuppressWarnings("unchecked") Constructor<T> constructor = (Constructor<T>)noArgsConstructorsCache.getElement(clazz); if (constructor == NULL_VALUE) { return null; } if (constructor == null) { constructor = getNoArgsConstructorUsingReflection(clazz); noArgsConstructorsCache.addElement(clazz, constructor); } return constructor == NULL_VALUE ? null : constructor; } @SuppressWarnings("unchecked") private static <T> Constructor<T> getNoArgsConstructorUsingReflection(Class<T> clazz) { try { Constructor<T> constructor = clazz.getDeclaredConstructor(); constructor.setAccessible(true); return constructor; } catch (Exception e) { return (Constructor<T>) NULL_VALUE; } } @Override public String toString() { return instanceCreatorMap.toString(); } }
src/main/java/com/google/gson/MappedObjectConstructor.java
/* * Copyright (C) 2008 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.gson; import com.google.gson.internal.Types; import com.google.gson.internal.UnsafeAllocator; import java.lang.reflect.Array; import java.lang.reflect.Constructor; import java.lang.reflect.Type; /** * This class contains a mapping of all the application specific * {@link InstanceCreator} instances. Registering an {@link InstanceCreator} * with this class will override the default object creation that is defined * by the ObjectConstructor that this class is wrapping. Using this class * with the JSON framework provides the application with "pluggable" modules * to customize framework to suit the application's needs. * * @author Joel Leitch */ final class MappedObjectConstructor implements ObjectConstructor { private static final UnsafeAllocator unsafeAllocator = UnsafeAllocator.create(); private final ParameterizedTypeHandlerMap<InstanceCreator<?>> instanceCreatorMap; public MappedObjectConstructor( ParameterizedTypeHandlerMap<InstanceCreator<?>> instanceCreators) { instanceCreatorMap = instanceCreators; } @SuppressWarnings("unchecked") public <T> T construct(Type typeOfT) { InstanceCreator<T> creator = (InstanceCreator<T>) instanceCreatorMap.getHandlerFor(typeOfT); if (creator != null) { return creator.createInstance(typeOfT); } return (T) constructWithNoArgConstructor(typeOfT); } public Object constructArray(Type type, int length) { return Array.newInstance(Types.getRawType(type), length); } @SuppressWarnings({"unchecked", "cast"}) private <T> T constructWithNoArgConstructor(Type typeOfT) { try { Class<T> clazz = (Class<T>) Types.getRawType(typeOfT); Constructor<T> constructor = getNoArgsConstructor(clazz); return constructor == null ? unsafeAllocator.newInstance(clazz) : constructor.newInstance(); } catch (Exception e) { throw new RuntimeException(("Unable to invoke no-args constructor for " + typeOfT + ". " + "Register an InstanceCreator with Gson for this type may fix this problem."), e); } } private <T> Constructor<T> getNoArgsConstructor(Class<T> clazz) { try { Constructor<T> declaredConstructor = clazz.getDeclaredConstructor(); declaredConstructor.setAccessible(true); return declaredConstructor; } catch (Exception e) { return null; } } @Override public String toString() { return instanceCreatorMap.toString(); } }
Added a cache for no-args constructors to avoid expensive reflection everytime an object needs to be instantiated. git-svn-id: 7b8be7b2f8bf58e8147c910303b95fa2b8d9948f@748 2534bb62-2c4b-0410-85e8-b5006b95c4ae
src/main/java/com/google/gson/MappedObjectConstructor.java
Added a cache for no-args constructors to avoid expensive reflection everytime an object needs to be instantiated.
Java
bsd-2-clause
8f562030388957c9807bbb71c792720d4a6660e0
0
JayH5/jbox2d,jbox2d/jbox2d,JayH5/jbox2d,wasimbeniwale/jbox2d,jbox2d/jbox2d,jbox2d/jbox2d,wasimbeniwale/jbox2d,wasimbeniwale/jbox2d,JayH5/jbox2d
/* * JBox2D - A Java Port of Erin Catto's Box2D * * JBox2D homepage: http://jbox2d.sourceforge.net/ * Box2D homepage: http://www.box2d.org * * This software is provided 'as-is', without any express or implied * warranty. In no event will the authors be held liable for any damages * arising from the use of this software. * * Permission is granted to anyone to use this software for any purpose, * including commercial applications, and to alter it and redistribute it * freely, subject to the following restrictions: * * 1. The origin of this software must not be misrepresented; you must not * claim that you wrote the original software. If you use this software * in a product, an acknowledgment in the product documentation would be * appreciated but is not required. * 2. Altered source versions must be plainly marked as such, and must not be * misrepresented as being the original software. * 3. This notice may not be removed or altered from any source distribution. */ package org.jbox2d.dynamics.contacts; import java.util.ArrayList; import java.util.List; import org.jbox2d.collision.ContactID; import org.jbox2d.collision.Manifold; import org.jbox2d.collision.ManifoldPoint; import org.jbox2d.collision.shapes.CircleShape; import org.jbox2d.collision.shapes.PolygonShape; import org.jbox2d.collision.shapes.Shape; import org.jbox2d.collision.shapes.ShapeType; import org.jbox2d.common.Vec2; import org.jbox2d.dynamics.Body; import org.jbox2d.dynamics.ContactListener; import org.jbox2d.pooling.SingletonPool; import org.jbox2d.pooling.TLContactPoint; import org.jbox2d.pooling.TLManifold; import org.jbox2d.pooling.TLVec2; import org.jbox2d.pooling.arrays.BooleanArray; //Updated to rev 144 of b2PolyAndCircleContact.h/cpp class PolyAndCircleContact extends Contact implements ContactCreateFcn { public final Manifold m_manifold; public final ArrayList<Manifold> manifoldList = new ArrayList<Manifold>(); public PolyAndCircleContact(final Shape s1, final Shape s2) { super(s1, s2); assert (m_shape1.getType() == ShapeType.POLYGON_SHAPE); assert (m_shape2.getType() == ShapeType.CIRCLE_SHAPE); m_manifold = new Manifold(); manifoldList.add(m_manifold); m_manifoldCount = 0; // These should not be necessary, manifold was // just created... //m_manifold.points[0].normalImpulse = 0.0f; //m_manifold.points[0].tangentImpulse = 0.0f; } public PolyAndCircleContact() { super(); m_manifold = new Manifold(); m_manifoldCount = 0; } @Override public Contact clone() { final PolyAndCircleContact newC = new PolyAndCircleContact(this.m_shape1, this.m_shape2); newC.m_manifold.set(this.m_manifold); newC.m_manifoldCount = this.m_manifoldCount; // The parent world. newC.m_world = this.m_world; // World pool and list pointers. newC.m_prev = this.m_prev; newC.m_next = this.m_next; // Nodes for connecting bodies. newC.m_node1.set(m_node1); newC.m_node2.set(m_node2); // Combined friction newC.m_friction = this.m_friction; newC.m_restitution = this.m_restitution; newC.m_flags = this.m_flags; return newC; } public Contact create(final Shape shape1, final Shape shape2) { return new PolyAndCircleContact(shape1, shape2); } @Override public List<Manifold> getManifolds() { return manifoldList; } // djm pooling private static final TLManifold tlm0 = new TLManifold(); private static final TLVec2 tlV1 = new TLVec2(); private static final TLContactPoint tlCp = new TLContactPoint(); private static final BooleanArray tlPersisted = new BooleanArray(); @Override public void evaluate(final ContactListener listener) { final Body b1 = m_shape1.getBody(); final Body b2 = m_shape2.getBody(); final Manifold m0 = tlm0.get(); final Vec2 v1 = tlV1.get(); final ContactPoint cp = tlCp.get(); SingletonPool.getCollideCircle().collidePolygonAndCircle(m_manifold, (PolygonShape)m_shape1, b1.getMemberXForm(), (CircleShape)m_shape2, b2.getMemberXForm()); final Boolean[] persisted = tlPersisted.get(2); persisted[0] = false; persisted[1] = false; cp.shape1 = m_shape1; cp.shape2 = m_shape2; cp.friction = m_friction; cp.restitution = m_restitution; // Match contact ids to facilitate warm starting. if (m_manifold.pointCount > 0) { // Match old contact ids to new contact ids and copy the // stored impulses to warm start the solver. for (int i = 0; i < m_manifold.pointCount; ++i) { final ManifoldPoint mp = m_manifold.points[i]; mp.normalImpulse = 0.0f; mp.tangentImpulse = 0.0f; boolean found = false; final ContactID id = mp.id; for (int j = 0; j < m0.pointCount; ++j) { if (persisted[j] == true) { continue; } final ManifoldPoint mp0 = m0.points[j]; if (mp0.id.isEqual(id)) { persisted[j] = true; mp.normalImpulse = mp0.normalImpulse; mp.tangentImpulse = mp0.tangentImpulse; // A persistent point. found = true; // Report persistent point. if (listener != null) { b1.getWorldLocationToOut(mp.localPoint1, cp.position); //Vec2 v1 = b1.getLinearVelocityFromLocalPoint(mp.localPoint1); b1.getLinearVelocityFromLocalPointToOut(mp.localPoint1, v1); //Vec2 v2 = b2.getLinearVelocityFromLocalPoint(mp.localPoint2); b2.getLinearVelocityFromLocalPointToOut(mp.localPoint2, cp.velocity); //cp.velocity = v2.sub(v1); cp.velocity.subLocal(v1); cp.normal.set(m_manifold.normal); cp.separation = mp.separation; cp.id.set(id); listener.persist(cp); } break; } } // Report added point. if (found == false && listener != null) { b1.getWorldLocationToOut(mp.localPoint1, cp.position); //Vec2 v1 = b1.getLinearVelocityFromLocalPoint(mp.localPoint1); b1.getLinearVelocityFromLocalPointToOut(mp.localPoint1, v1); //Vec2 v2 = b2.getLinearVelocityFromLocalPoint(mp.localPoint2); b2.getLinearVelocityFromLocalPointToOut(mp.localPoint2, cp.velocity); //cp.velocity = v2.sub(v1); cp.velocity.subLocal(v1); cp.normal.set(m_manifold.normal); cp.separation = mp.separation; cp.id.set(id); listener.add(cp); } } m_manifoldCount = 1; } else { m_manifoldCount = 0; } if (listener == null) { return; } // Report removed points. for (int i = 0; i < m0.pointCount; ++i) { if (persisted[i]) { continue; } final ManifoldPoint mp0 = m0.points[i]; b1.getWorldLocationToOut(mp0.localPoint1, cp.position); //Vec2 v1 = b1.getLinearVelocityFromLocalPoint(mp.localPoint1); b1.getLinearVelocityFromLocalPointToOut(mp0.localPoint1, v1); //Vec2 v2 = b2.getLinearVelocityFromLocalPoint(mp.localPoint2); b2.getLinearVelocityFromLocalPointToOut(mp0.localPoint2, cp.velocity); //cp.velocity = v2.sub(v1); cp.velocity.subLocal(v1); cp.normal.set(m_manifold.normal); cp.separation = mp0.separation; cp.id.set(mp0.id); listener.remove(cp); } } }
src/org/jbox2d/dynamics/contacts/PolyAndCircleContact.java
/* * JBox2D - A Java Port of Erin Catto's Box2D * * JBox2D homepage: http://jbox2d.sourceforge.net/ * Box2D homepage: http://www.box2d.org * * This software is provided 'as-is', without any express or implied * warranty. In no event will the authors be held liable for any damages * arising from the use of this software. * * Permission is granted to anyone to use this software for any purpose, * including commercial applications, and to alter it and redistribute it * freely, subject to the following restrictions: * * 1. The origin of this software must not be misrepresented; you must not * claim that you wrote the original software. If you use this software * in a product, an acknowledgment in the product documentation would be * appreciated but is not required. * 2. Altered source versions must be plainly marked as such, and must not be * misrepresented as being the original software. * 3. This notice may not be removed or altered from any source distribution. */ package org.jbox2d.dynamics.contacts; import java.util.ArrayList; import java.util.List; import org.jbox2d.collision.ContactID; import org.jbox2d.collision.Manifold; import org.jbox2d.collision.ManifoldPoint; import org.jbox2d.collision.shapes.CircleShape; import org.jbox2d.collision.shapes.PolygonShape; import org.jbox2d.collision.shapes.Shape; import org.jbox2d.collision.shapes.ShapeType; import org.jbox2d.common.Vec2; import org.jbox2d.dynamics.Body; import org.jbox2d.dynamics.ContactListener; import org.jbox2d.pooling.SingletonPool; import org.jbox2d.pooling.TLContactPoint; import org.jbox2d.pooling.TLManifold; import org.jbox2d.pooling.TLVec2; import org.jbox2d.pooling.arrays.BooleanArray; //Updated to rev 144 of b2PolyAndCircleContact.h/cpp class PolyAndCircleContact extends Contact implements ContactCreateFcn { public final Manifold m_manifold; public final ArrayList<Manifold> manifoldList = new ArrayList<Manifold>(); public PolyAndCircleContact(final Shape s1, final Shape s2) { super(s1, s2); assert (m_shape1.getType() == ShapeType.POLYGON_SHAPE); assert (m_shape2.getType() == ShapeType.CIRCLE_SHAPE); m_manifold = new Manifold(); manifoldList.add(m_manifold); m_manifoldCount = 0; // These should not be necessary, manifold was // just created... //m_manifold.points[0].normalImpulse = 0.0f; //m_manifold.points[0].tangentImpulse = 0.0f; } public PolyAndCircleContact() { super(); m_manifold = new Manifold(); m_manifoldCount = 0; } @Override public Contact clone() { final PolyAndCircleContact newC = new PolyAndCircleContact(this.m_shape1, this.m_shape2); newC.m_manifold.set(this.m_manifold); newC.m_manifoldCount = this.m_manifoldCount; // The parent world. newC.m_world = this.m_world; // World pool and list pointers. newC.m_prev = this.m_prev; newC.m_next = this.m_next; // Nodes for connecting bodies. newC.m_node1.set(m_node1); newC.m_node2.set(m_node2); // Combined friction newC.m_friction = this.m_friction; newC.m_restitution = this.m_restitution; newC.m_flags = this.m_flags; return newC; } public Contact create(final Shape shape1, final Shape shape2) { return new PolyAndCircleContact(shape1, shape2); } @Override public List<Manifold> getManifolds() { return manifoldList; } // djm pooling private static final TLManifold tlm0 = new TLManifold(); private static final TLVec2 tlV1 = new TLVec2(); private static final TLContactPoint tlCp = new TLContactPoint(); private static final BooleanArray tlPersisted = new BooleanArray(); @Override public void evaluate(final ContactListener listener) { final Body b1 = m_shape1.getBody(); final Body b2 = m_shape2.getBody(); final Manifold m0 = tlm0.get(); final Vec2 v1 = tlV1.get(); final ContactPoint cp = tlCp.get(); SingletonPool.getCollideCircle().collidePolygonAndCircle(m_manifold, (PolygonShape)m_shape1, b1.getMemberXForm(), (CircleShape)m_shape2, b2.getMemberXForm()); final Boolean[] persisted = tlPersisted.get(2); persisted[0] = false; persisted[1] = false; cp.shape1 = m_shape1; cp.shape2 = m_shape2; cp.friction = m_friction; cp.restitution = m_restitution; // Match contact ids to facilitate warm starting. if (m_manifold.pointCount > 0) { // Match old contact ids to new contact ids and copy the // stored impulses to warm start the solver. for (int i = 0; i < m_manifold.pointCount; ++i) { final ManifoldPoint mp = m_manifold.points[i]; mp.normalImpulse = 0.0f; mp.tangentImpulse = 0.0f; boolean found = false; final ContactID id = new ContactID(mp.id); for (int j = 0; j < m0.pointCount; ++j) { if (persisted[j] == true) { continue; } final ManifoldPoint mp0 = m0.points[j]; if (mp0.id.isEqual(id)) { persisted[j] = true; mp.normalImpulse = mp0.normalImpulse; mp.tangentImpulse = mp0.tangentImpulse; // A persistent point. found = true; // Report persistent point. if (listener != null) { b1.getWorldLocationToOut(mp.localPoint1, cp.position); //Vec2 v1 = b1.getLinearVelocityFromLocalPoint(mp.localPoint1); b1.getLinearVelocityFromLocalPointToOut(mp.localPoint1, v1); //Vec2 v2 = b2.getLinearVelocityFromLocalPoint(mp.localPoint2); b2.getLinearVelocityFromLocalPointToOut(mp.localPoint2, cp.velocity); //cp.velocity = v2.sub(v1); cp.velocity.subLocal(v1); cp.normal.set(m_manifold.normal); cp.separation = mp.separation; cp.id.set(id); listener.persist(cp); } break; } } // Report added point. if (found == false && listener != null) { b1.getWorldLocationToOut(mp.localPoint1, cp.position); //Vec2 v1 = b1.getLinearVelocityFromLocalPoint(mp.localPoint1); b1.getLinearVelocityFromLocalPointToOut(mp.localPoint1, v1); //Vec2 v2 = b2.getLinearVelocityFromLocalPoint(mp.localPoint2); b2.getLinearVelocityFromLocalPointToOut(mp.localPoint2, cp.velocity); //cp.velocity = v2.sub(v1); cp.velocity.subLocal(v1); cp.normal.set(m_manifold.normal); cp.separation = mp.separation; cp.id.set(id); listener.add(cp); } } m_manifoldCount = 1; } else { m_manifoldCount = 0; } if (listener == null) { return; } // Report removed points. for (int i = 0; i < m0.pointCount; ++i) { if (persisted[i]) { continue; } final ManifoldPoint mp0 = m0.points[i]; b1.getWorldLocationToOut(mp0.localPoint1, cp.position); //Vec2 v1 = b1.getLinearVelocityFromLocalPoint(mp.localPoint1); b1.getLinearVelocityFromLocalPointToOut(mp0.localPoint1, v1); //Vec2 v2 = b2.getLinearVelocityFromLocalPoint(mp.localPoint2); b2.getLinearVelocityFromLocalPointToOut(mp0.localPoint2, cp.velocity); //cp.velocity = v2.sub(v1); cp.velocity.subLocal(v1); cp.normal.set(m_manifold.normal); cp.separation = mp0.separation; cp.id.set(mp0.id); listener.remove(cp); } } }
optimization
src/org/jbox2d/dynamics/contacts/PolyAndCircleContact.java
optimization
Java
bsd-3-clause
addf7eda0f2dc7be83bde8f2daecdea302db87da
0
arjunbm13/stevia,persado/stevia,persado/stevia,ptsiakos77/stevia,arjunbm13/stevia,ptsiakos77/stevia,persado/stevia,arjunbm13/stevia,ptsiakos77/stevia,ptsiakos77/stevia,arjunbm13/stevia,persado/stevia
src/main/java/com/persado/oss/quality/stevia/selenium/core/controllers/AppiumWebController.java
package com.persado.oss.quality.stevia.selenium.core.controllers; import com.persado.oss.quality.stevia.network.http.HttpCookie; import com.persado.oss.quality.stevia.selenium.core.WebController; import com.persado.oss.quality.stevia.selenium.core.controllers.commonapi.KeyInfo; import org.openqa.selenium.Point; import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebElement; import org.openqa.selenium.interactions.Actions; import java.io.IOException; import java.util.List; /** * Created by gkogketsof on 12/10/13. */ public class AppiumWebController extends WebControllerBase implements WebController { private WebDriver driver; @Override public void enableActionsLogging() { } @Override public void disableActionsLogging() { } @Override public void close() { } @Override public void quit() { } @Override public WebElement waitForElement(String locator) { return null; } @Override public WebElement waitForElement(String locator, long waitSeconds) { return null; } @Override public void waitForElementInvisibility(String locator) { } @Override public void waitForElementInvisibility(String locator, long waitSeconds) { } @Override public WebElement waitForElementPresence(String locator) { return null; } @Override public WebElement waitForElementPresence(String locator, long waitSeconds) { return null; } @Override public List<WebElement> findElements(String locator) { return null; } @Override public void input(String locator, String value) { } @Override public void press(String locator) { } @Override public void pressAndWaitForPageToLoad(String locator) { } @Override public void pressAndWaitForElement(String pressLocator, String elementToWaitLocator, long waitSeconds) { } @Override public void pressAndWaitForElement(String pressLocator, String elementToWaitLocator) { } @Override public void pressAndClickOkInAlert(String locator) { } @Override public void pressAndClickOkInAlertNoPageLoad(String locator) { } @Override public void pressAndClickCancelInAlert(String locator) { } @Override public void select(String locator, String option) { } @Override public void selectByValue(String locator, String value) { } @Override public void multiSelectAdd(String locator, String option) { } @Override public Object executeJavascript(String js, Object... args) { return null; } @Override public void waitForCondition(String jscondition) { } @Override public void waitForCondition(String jscondition, long waitSeconds) { } @Override public void clear(String locator) { } @Override public Actions getBuilder() { return null; } @Override public void mouseOver(String locator) { } @Override public void mouseUp(String locator) { } @Override public void mouseDown(String locator) { } @Override public void click(String locator) { } @Override public void doubleClick(String locator) { } @Override public void highlight(String locator) { } @Override public void highlight(String locator, String color) { } @Override public void takeScreenShot() throws IOException { } @Override public String getText(String locator) { return null; } @Override public void getFocus(String locator) { } @Override public String getSelectedOption(String locator) { return null; } @Override public List<String> getSelectedOptions(String locator) { return null; } @Override public String getInputValue(String locator) { return null; } @Override public boolean isAlertPresent() { return false; } @Override public boolean isTextPresent(String value) { return false; } @Override public boolean isTextNotPresent(String value) { return false; } @Override public boolean isComponentEditable(String locator) { return false; } @Override public boolean isComponentDisabled(String locator) { return false; } @Override public boolean isComponentPresent(String locator) { return false; } @Override public boolean isComponentPresent(String locator, long seconds) { return false; } @Override public boolean isComponentNotPresent(String locator) { return false; } @Override public boolean isComponentVisible(String locator) { return false; } @Override public boolean isComponentVisible(String locator, long seconds) { return false; } @Override public boolean isComponentNotVisible(String locator) { return false; } @Override public boolean isComponentNotVisible(String locator, long seconds) { return false; } @Override public boolean isComponentSelected(String locator) { return false; } @Override public boolean isComponentNotSelected(String locator) { return false; } @Override public void pressLinkName(String linkName) { } @Override public void pressLinkNameAndWaitForPageToLoad(String linkName) { } @Override public void pressLinkNameAndClickOkInAlert(String linkName) { } @Override public void pressLinkNameAndClickOkInAlertNoPageLoad(String linkName) { } @Override public void pressLinkNameAndClickCancelInAlert(String linkName) { } @Override public void typeKeys(String locator, String value) { } @Override public void keyDown(String locator, KeyInfo thekey) { } @Override public void keyUp(String locator, KeyInfo thekey) { } @Override public void keyPress(String locator, KeyInfo thekey) { } @Override public void keyDown(KeyInfo thekey) { } @Override public void keyUp(KeyInfo thekey) { } @Override public void keyPress(KeyInfo thekey) { } @Override public void clickOkInAlert() { } @Override public void promptInputPressOK(String inputMessage) { } @Override public void promptInputPressCancel(String inputMessage) { } @Override public void clickCancelInAlert() { } @Override public void navigate(String url) { } @Override public void refresh() { } @Override public String getTableElementRowPosition(String locator, String elementName) { return null; } @Override public int getNumberOfTotalRows(String locator) { return 0; } @Override public int getNumberOfTotalColumns(String locator) { return 0; } @Override public List<List<String>> getTableInfoAsList(String locator) { return null; } @Override public String getTableElementTextUnderHeader(String locator, String elementName, String headerName) { return null; } @Override public String getTableElementTextForRowAndColumn(String locator, String row, String column) { return null; } @Override public String getTableHeaderPosition(String locator, String headerName) { return null; } @Override public String getTableElementColumnPosition(String locator, String elementName) { return null; } @Override public List<String> getTableRecordsUnderHeader(String locator, String headerName) { return null; } @Override public String[][] getTableElements2DArray(String locator) { return new String[0][]; } @Override public String getTableElementSpecificHeaderLocator(String locator, String elementName, String headerName) { return null; } @Override public String getTableElementSpecificRowAndColumnLocator(String locator, String row, String column) { return null; } @Override public String getAttributeValue(String locator, String attribute) { return null; } @Override public HttpCookie getCookieByName(String name) { return null; } @Override public List<HttpCookie> getAllCookies() { return null; } @Override public void dragAndDrop(String locatorFrom, String locatorTo) { } @Override public void switchToLatestWindow() { } @Override public String getAlertText() { return null; } @Override public List<String> getAllListOptions(String locator) { return null; } @Override public void selectFrame(String frameID) { } @Override public void selectFrameMain() { } @Override public void maximizeWindow() { } @Override public int getNumberOfElementsMatchLocator(String locator) { return 0; } @Override public void moveToElement(String locator, int x, int y) { } @Override public void moveToElement(String locator) { } @Override public void moveByOffset(int xOffset, int yOffset) { } @Override public void waitForAjaxComplete(long milliseconds) { } @Override public String getCurrentUrl() { return null; } @Override public void dragAndDrop(String locatorFrom, int xOffset, int yOffset) { } @Override public Point getElementPosition(String locator) { return null; } @Override public String getPageSource() { return null; } public void setDriver(WebDriver driver) { this.driver = driver; } }
RMV: wrong push
src/main/java/com/persado/oss/quality/stevia/selenium/core/controllers/AppiumWebController.java
RMV: wrong push
Java
isc
96312ae95bf106f9dfb8da38f6314f5e489894ba
0
garyttierney/apollo,ryleykimmel/apollo,garyttierney/apollo,apollo-rsps/apollo,apollo-rsps/apollo,SJ19/apollo,Major-/apollo,LegendSky/apollo,Major-/apollo,SJ19/apollo,apollo-rsps/apollo,LegendSky/apollo,ryleykimmel/apollo
package org.apollo.game.model; import java.io.BufferedInputStream; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.util.HashMap; import java.util.Map; import java.util.logging.Logger; import org.apollo.Service; import org.apollo.fs.IndexedFileSystem; import org.apollo.fs.decoder.ItemDefinitionDecoder; import org.apollo.fs.decoder.NpcDefinitionDecoder; import org.apollo.fs.decoder.ObjectDefinitionDecoder; import org.apollo.fs.decoder.StaticObjectDecoder; import org.apollo.game.command.CommandDispatcher; import org.apollo.game.login.LoginDispatcher; import org.apollo.game.login.LogoutDispatcher; import org.apollo.game.model.def.EquipmentDefinition; import org.apollo.game.model.def.ItemDefinition; import org.apollo.game.model.def.NpcDefinition; import org.apollo.game.model.def.ObjectDefinition; import org.apollo.game.model.obj.GameObject; import org.apollo.game.model.sector.Sector; import org.apollo.game.model.sector.SectorCoordinates; import org.apollo.game.model.sector.SectorRepository; import org.apollo.game.scheduling.ScheduledTask; import org.apollo.game.scheduling.Scheduler; import org.apollo.io.EquipmentDefinitionParser; import org.apollo.util.MobRepository; import org.apollo.util.NameUtil; import org.apollo.util.plugin.PluginManager; /** * The world class is a singleton which contains objects like the {@link MobRepository} for players and NPCs. It should * only contain things relevant to the in-game world and not classes which deal with I/O and such (these may be better * off inside some custom {@link Service} or other code, however, the circumstances are rare). * * @author Graham */ public final class World { /** * Represents the different status codes for registering a player. * * @author Graham */ public enum RegistrationStatus { /** * Indicates that the player is already online. */ ALREADY_ONLINE, /** * Indicates that the player was registered successfully. */ OK, /** * Indicates the world is full. */ WORLD_FULL; } /** * The logger for this class. */ private static final Logger logger = Logger.getLogger(World.class.getName()); /** * The world. */ private static final World world = new World(); /** * Gets the world. * * @return The world. */ public static World getWorld() { return world; } /** * The command dispatcher. */ private final CommandDispatcher commandDispatcher = new CommandDispatcher(); /** * The login dispatcher. */ private final LoginDispatcher loginDispatcher = new LoginDispatcher(); /** * The logout dispatcher. */ private final LogoutDispatcher logoutDispatcher = new LogoutDispatcher(); /** * The {@link MobRepository} of {@link Npc}s. */ private final MobRepository<Npc> npcRepository = new MobRepository<>(WorldConstants.MAXIMUM_NPCS); /** * The {@link MobRepository} of {@link Player}s. */ private final MobRepository<Player> playerRepository = new MobRepository<>(WorldConstants.MAXIMUM_PLAYERS); /** * A {@link Map} of player usernames and the player objects. */ private final Map<Long, Player> players = new HashMap<>(); /** * The {@link PluginManager}. TODO: better place than here!! */ private PluginManager pluginManager; /** * The release number (i.e. version) of this world. */ private int releaseNumber; /** * The scheduler. */ private final Scheduler scheduler = new Scheduler(); /** * This world's {@link SectorRepository}. */ private final SectorRepository sectorRepository = new SectorRepository(false); /** * Creates the world. */ private World() { } /** * Gets the command dispatcher. * * @return The command dispatcher. */ public CommandDispatcher getCommandDispatcher() { return commandDispatcher; } /** * Gets the {@link LoginDispatcher}. * * @return The dispatcher. */ public LoginDispatcher getLoginDispatcher() { return loginDispatcher; } /** * Gets the {@link LogoutDispatcher}. * * @return The dispatcher. */ public LogoutDispatcher getLogoutDispatcher() { return logoutDispatcher; } /** * Gets the npc repository. * * @return The npc repository. */ public MobRepository<Npc> getNpcRepository() { return npcRepository; } /** * Gets the {@link Player} with the specified username. Note that this will return {@code null} if the player is * offline. * * @param username The username. * @return The player. */ public Player getPlayer(String username) { return players.get(NameUtil.encodeBase37(username.toLowerCase())); } /** * Gets the player repository. * * @return The player repository. */ public MobRepository<Player> getPlayerRepository() { return playerRepository; } /** * Gets the plugin manager. TODO should this be here? * * @return The plugin manager. */ public PluginManager getPluginManager() { return pluginManager; } /** * Gets the release number of this world. * * @return The release number. */ public int getReleaseNumber() { return releaseNumber; } /** * Gets this world's {@link SectorRepository}. * * @return The sector repository. */ public SectorRepository getSectorRepository() { return sectorRepository; } /** * Initialises the world by loading definitions from the specified file system. * * @param release The release number. * @param fs The file system. * @param manager The plugin manager. TODO move this. * @throws IOException If an I/O error occurs. */ public void init(int release, IndexedFileSystem fs, PluginManager manager) throws Exception { this.releaseNumber = release; ItemDefinitionDecoder itemDefDecoder = new ItemDefinitionDecoder(fs); ItemDefinition[] itemDefs = itemDefDecoder.decode(); ItemDefinition.init(itemDefs); logger.info("Loaded " + itemDefs.length + " item definitions."); try (InputStream is = new BufferedInputStream(new FileInputStream("data/equipment-" + release + ".dat"))) { EquipmentDefinitionParser parser = new EquipmentDefinitionParser(is); EquipmentDefinition[] defs = parser.parse(); EquipmentDefinition.init(defs); logger.info("Loaded " + defs.length + " equipment definitions."); } NpcDefinitionDecoder npcDecoder = new NpcDefinitionDecoder(fs); NpcDefinition[] npcDefs = npcDecoder.decode(); NpcDefinition.init(npcDefs); logger.info("Loaded " + npcDefs.length + " npc definitions."); ObjectDefinitionDecoder objectDecoder = new ObjectDefinitionDecoder(fs); ObjectDefinition[] objDefs = objectDecoder.decode(); ObjectDefinition.init(objDefs); logger.info("Loaded " + objDefs.length + " object definitions."); StaticObjectDecoder staticDecoder = new StaticObjectDecoder(fs); GameObject[] objects = staticDecoder.decode(); placeEntities(objects); logger.info("Loaded " + objects.length + " static objects."); manager.start(); pluginManager = manager; // TODO move!! } /** * Checks if the {@link Player} with the specified name is online. * * @param username The name. * @return {@code true} if the player is online, otherwise {@code false}. */ public boolean isPlayerOnline(String username) { return players.get(NameUtil.encodeBase37(username.toLowerCase())) != null; } /** * Adds entities to sectors in the {@link SectorRepository}. * * @param entities The entities. * @return {@code true} if all entities were added successfully, otherwise {@code false}. */ private boolean placeEntities(Entity... entities) { boolean success = true; for (Entity entity : entities) { Sector sector = sectorRepository.get(SectorCoordinates.fromPosition(entity.getPosition())); success &= sector.addEntity(entity); } return success; } /** * Pulses the world. */ public void pulse() { scheduler.pulse(); } /** * Registers the specified npc. * * @param npc The npc. * @return {@code true} if the npc registered successfully, otherwise {@code false}. */ public boolean register(final Npc npc) { boolean success = npcRepository.add(npc); if (success) { Sector sector = sectorRepository.get(SectorCoordinates.fromPosition(npc.getPosition())); sector.addEntity(npc); } else { logger.warning("Failed to register npc, repository capacity reached: [count=" + npcRepository.size() + "]"); } return success; } /** * Registers the specified player. * * @param player The player. * @return A {@link RegistrationStatus}. */ public RegistrationStatus register(final Player player) { if (isPlayerOnline(player.getUsername())) { return RegistrationStatus.ALREADY_ONLINE; } boolean success = playerRepository.add(player) && players.put(NameUtil.encodeBase37(player.getUsername().toLowerCase()), player) == null; if (success) { logger.info("Registered player: " + player + " [count=" + playerRepository.size() + "]"); Sector sector = sectorRepository.get(SectorCoordinates.fromPosition(player.getPosition())); sector.addEntity(player); return RegistrationStatus.OK; } logger.warning("Failed to register player: " + player + " [count=" + playerRepository.size() + "]"); return RegistrationStatus.WORLD_FULL; } /** * Schedules a new task. * * @param task The {@link ScheduledTask}. */ public boolean schedule(ScheduledTask task) { return scheduler.schedule(task); } /** * Unregisters the specified {@link Npc}. * * @param npc The npc. */ public void unregister(final Npc npc) { if (npcRepository.remove(npc)) { Sector sector = sectorRepository.get(SectorCoordinates.fromPosition(npc.getPosition())); sector.removeEntity(npc); } else { logger.warning("Could not find npc " + npc + " to unregister!"); } } /** * Unregisters the specified player. * * @param player The player. */ public void unregister(final Player player) { if (playerRepository.remove(player) & players.remove(NameUtil.encodeBase37(player.getUsername().toLowerCase())) == player) { logger.info("Unregistered player: " + player + " [count=" + playerRepository.size() + "]"); Sector sector = sectorRepository.get(SectorCoordinates.fromPosition(player.getPosition())); sector.removeEntity(player); logoutDispatcher.dispatch(player); } else { logger.warning("Could not find player " + player + " to unregister!"); } } }
src/org/apollo/game/model/World.java
package org.apollo.game.model; import java.io.BufferedInputStream; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.util.HashMap; import java.util.Map; import java.util.logging.Logger; import org.apollo.Service; import org.apollo.fs.IndexedFileSystem; import org.apollo.fs.decoder.ItemDefinitionDecoder; import org.apollo.fs.decoder.NpcDefinitionDecoder; import org.apollo.fs.decoder.ObjectDefinitionDecoder; import org.apollo.fs.decoder.StaticObjectDecoder; import org.apollo.game.command.CommandDispatcher; import org.apollo.game.login.LoginDispatcher; import org.apollo.game.login.LogoutDispatcher; import org.apollo.game.model.def.EquipmentDefinition; import org.apollo.game.model.def.ItemDefinition; import org.apollo.game.model.def.NpcDefinition; import org.apollo.game.model.def.ObjectDefinition; import org.apollo.game.model.obj.GameObject; import org.apollo.game.model.sector.Sector; import org.apollo.game.model.sector.SectorCoordinates; import org.apollo.game.model.sector.SectorRepository; import org.apollo.game.scheduling.ScheduledTask; import org.apollo.game.scheduling.Scheduler; import org.apollo.io.EquipmentDefinitionParser; import org.apollo.util.MobRepository; import org.apollo.util.NameUtil; import org.apollo.util.plugin.PluginManager; /** * The world class is a singleton which contains objects like the {@link MobRepository} for players and NPCs. It should * only contain things relevant to the in-game world and not classes which deal with I/O and such (these may be better * off inside some custom {@link Service} or other code, however, the circumstances are rare). * * @author Graham */ public final class World { /** * Represents the different status codes for registering a player. * * @author Graham */ public enum RegistrationStatus { /** * Indicates that the player is already online. */ ALREADY_ONLINE, /** * Indicates that the player was registered successfully. */ OK, /** * Indicates the world is full. */ WORLD_FULL; } /** * The logger for this class. */ private static final Logger logger = Logger.getLogger(World.class.getName()); /** * The world. */ private static final World world = new World(); /** * Gets the world. * * @return The world. */ public static World getWorld() { return world; } /** * The command dispatcher. */ private final CommandDispatcher commandDispatcher = new CommandDispatcher(); /** * The login dispatcher. */ private final LoginDispatcher loginDispatcher = new LoginDispatcher(); /** * The logout dispatcher. */ private final LogoutDispatcher logoutDispatcher = new LogoutDispatcher(); /** * The {@link MobRepository} of {@link Npc}s. */ private final MobRepository<Npc> npcRepository = new MobRepository<>(WorldConstants.MAXIMUM_NPCS); /** * The {@link MobRepository} of {@link Player}s. */ private final MobRepository<Player> playerRepository = new MobRepository<>(WorldConstants.MAXIMUM_PLAYERS); /** * A {@link Map} of player usernames and the player objects. */ private final Map<Long, Player> players = new HashMap<>(); /** * The {@link PluginManager}. TODO: better place than here!! */ private PluginManager pluginManager; /** * The release number (i.e. version) of this world. */ private int releaseNumber; /** * The scheduler. */ private final Scheduler scheduler = new Scheduler(); /** * This world's {@link SectorRepository}. */ private final SectorRepository sectorRepository = new SectorRepository(false); /** * Creates the world. */ private World() { } /** * Gets the command dispatcher. * * @return The command dispatcher. */ public CommandDispatcher getCommandDispatcher() { return commandDispatcher; } /** * Gets the {@link LoginDispatcher}. * * @return The dispatcher. */ public LoginDispatcher getLoginDispatcher() { return loginDispatcher; } /** * Gets the {@link LogoutDispatcher}. * * @return The dispatcher. */ public LogoutDispatcher getLogoutDispatcher() { return logoutDispatcher; } /** * Gets the npc repository. * * @return The npc repository. */ public MobRepository<Npc> getNpcRepository() { return npcRepository; } /** * Gets the {@link Player} with the specified username. Note that this will return {@code null} if the player is * offline. * * @param username The username. * @return The player. */ public Player getPlayer(String username) { return players.get(NameUtil.encodeBase37(username.toLowerCase())); } /** * Gets the player repository. * * @return The player repository. */ public MobRepository<Player> getPlayerRepository() { return playerRepository; } /** * Gets the plugin manager. TODO should this be here? * * @return The plugin manager. */ public PluginManager getPluginManager() { return pluginManager; } /** * Gets the release number of this world. * * @return The release number. */ public int getReleaseNumber() { return releaseNumber; } /** * Gets this world's {@link SectorRepository}. * * @return The sector repository. */ public SectorRepository getSectorRepository() { return sectorRepository; } /** * Initialises the world by loading definitions from the specified file system. * * @param release The release number. * @param fs The file system. * @param manager The plugin manager. TODO move this. * @throws IOException If an I/O error occurs. */ public void init(int release, IndexedFileSystem fs, PluginManager manager) throws Exception { this.releaseNumber = release; ItemDefinitionDecoder itemDefDecoder = new ItemDefinitionDecoder(fs); ItemDefinition[] itemDefs = itemDefDecoder.decode(); ItemDefinition.init(itemDefs); logger.info("Loaded " + itemDefs.length + " item definitions."); try (InputStream is = new BufferedInputStream(new FileInputStream("data/equipment-" + release + ".dat"))) { EquipmentDefinitionParser parser = new EquipmentDefinitionParser(is); EquipmentDefinition[] defs = parser.parse(); EquipmentDefinition.init(defs); logger.info("Loaded " + defs.length + " equipment definitions."); } NpcDefinitionDecoder npcDecoder = new NpcDefinitionDecoder(fs); NpcDefinition[] npcDefs = npcDecoder.decode(); NpcDefinition.init(npcDefs); logger.info("Loaded " + npcDefs.length + " npc definitions."); ObjectDefinitionDecoder objectDecoder = new ObjectDefinitionDecoder(fs); ObjectDefinition[] objDefs = objectDecoder.decode(); ObjectDefinition.init(objDefs); logger.info("Loaded " + objDefs.length + " object definitions."); StaticObjectDecoder staticDecoder = new StaticObjectDecoder(fs); GameObject[] objects = staticDecoder.decode(); placeEntities(objects); logger.info("Loaded " + objects.length + " static objects."); manager.start(); pluginManager = manager; // TODO move!! } /** * Checks if the {@link Player} with the specified name is online. * * @param username The name. * @return {@code true} if the player is online, otherwise {@code false}. */ public boolean isPlayerOnline(String username) { return players.get(NameUtil.encodeBase37(username.toLowerCase())) != null; } /** * Adds entities to sectors in the {@link SectorRepository}. * * @param entities The entities. * @return {@code true} if all entities were added successfully, otherwise {@code false}. */ private boolean placeEntities(Entity... entities) { boolean success = true; for (Entity entity : entities) { Sector sector = sectorRepository.get(SectorCoordinates.fromPosition(entity.getPosition())); success &= sector.addEntity(entity); } return success; } /** * Pulses the world. */ public void pulse() { scheduler.pulse(); } /** * Registers the specified npc. * * @param npc The npc. * @return {@code true} if the npc registered successfully, otherwise {@code false}. */ public boolean register(final Npc npc) { boolean success = npcRepository.add(npc); if (success) { Sector sector = sectorRepository.get(SectorCoordinates.fromPosition(npc.getPosition())); sector.addEntity(npc); } else { logger.warning("Failed to register npc, repository capacity reached: [count=" + npcRepository.size() + "]"); } return success; } /** * Registers the specified player. * * @param player The player. * @return A {@link RegistrationStatus}. */ public RegistrationStatus register(final Player player) { if (isPlayerOnline(player.getUsername())) { return RegistrationStatus.ALREADY_ONLINE; } boolean success = playerRepository.add(player) & players.put(NameUtil.encodeBase37(player.getUsername().toLowerCase()), player) == null; if (success) { logger.info("Registered player: " + player + " [count=" + playerRepository.size() + "]"); Sector sector = sectorRepository.get(SectorCoordinates.fromPosition(player.getPosition())); sector.addEntity(player); return RegistrationStatus.OK; } logger.warning("Failed to register player: " + player + " [count=" + playerRepository.size() + "]"); return RegistrationStatus.WORLD_FULL; } /** * Schedules a new task. * * @param task The {@link ScheduledTask}. */ public boolean schedule(ScheduledTask task) { return scheduler.schedule(task); } /** * Unregisters the specified {@link Npc}. * * @param npc The npc. */ public void unregister(final Npc npc) { if (npcRepository.remove(npc)) { Sector sector = sectorRepository.get(SectorCoordinates.fromPosition(npc.getPosition())); sector.removeEntity(npc); } else { logger.warning("Could not find npc " + npc + " to unregister!"); } } /** * Unregisters the specified player. * * @param player The player. */ public void unregister(final Player player) { if (playerRepository.remove(player) & players.remove(NameUtil.encodeBase37(player.getUsername().toLowerCase())) == player) { logger.info("Unregistered player: " + player + " [count=" + playerRepository.size() + "]"); Sector sector = sectorRepository.get(SectorCoordinates.fromPosition(player.getPosition())); sector.removeEntity(player); logoutDispatcher.dispatch(player); } else { logger.warning("Could not find player " + player + " to unregister!"); } } }
Update World.java. If the first operand is false (meaning the world is full) the bitwise AND operation will still check the second operand, thus adding them to the online players list. the player will never be unregistered (removed from the player list) because they they never registered. the server will always report them as online and logged in, even though this is not the case.
src/org/apollo/game/model/World.java
Update World.java.
Java
isc
14e509dfd287a1142ede2d745782b130d6416471
0
badlogic/avian,badlogic/avian,bgould/avian,lostdj/avian,dicej/avian,joshuawarner32/avian,MaartenR/avian,ucdseniordesign/avian,bigfatbrowncat/avian-pack.avian,lostdj/avian,joshuawarner32/avian,badlogic/avian,bigfatbrowncat/avian-pack.avian,marcinolawski/avian,lostdj/avian,joshuawarner32/avian,dicej/avian,joshuawarner32/avian,MaartenR/avian,ucdseniordesign/avian,lostdj/avian,marcinolawski/avian,dicej/avian,minor-jason/avian,lwahlmeier/avian,badlogic/avian,bgould/avian,minor-jason/avian,lwahlmeier/avian,bgould/avian,getlantern/avian,bigfatbrowncat/avian-pack.avian,bgould/avian,ucdseniordesign/avian,marcinolawski/avian,MaartenR/avian,lwahlmeier/avian,getlantern/avian,minor-jason/avian,getlantern/avian,dicej/avian,getlantern/avian,marcinolawski/avian,lwahlmeier/avian,MaartenR/avian,ucdseniordesign/avian,minor-jason/avian,bigfatbrowncat/avian-pack.avian
public class Subroutine { private static void expect(boolean v) { if (! v) throw new RuntimeException(); } // This test is intended to cover the jsr and ret instructions. // However, recent Sun javac versions avoid generating these // instructions by default, so we must compile this class using // -source 1.2 -target 1.1 -XDjsrlimit=0. // // http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4381996 // private static void test(boolean throw_, boolean predicate) { int x = 42; int y = 99; int a = 0; try { try { int z = x + y; if (throw_) throw new DummyException(); if (predicate) { return; } Integer.valueOf(z).toString(); } finally { a = x + y; System.gc(); } expect(a == x + y); } catch (DummyException e) { e.printStackTrace(); } } public static void main(String[] args) { test(false, false); test(false, true); test(true, false); } private static class DummyException extends RuntimeException { } }
test/Subroutine.java
public class Subroutine { private static void expect(boolean v) { if (! v) throw new RuntimeException(); } // This test is intended to cover the jsr and ret instructions. // However, recent Sun javac versions avoid generating these // instructions by default, so we must compile this class using // -source 1.2 -target 1.1 -XDjsrlimit=0. // // http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4381996 // private static void test(boolean throw_) { int x = 42; int y = 99; int a = 0; try { try { int z = x + y; if (throw_) throw new DummyException(); Integer.valueOf(z).toString(); } finally { a = x + y; System.gc(); } expect(a == x + y); } catch (DummyException e) { e.printStackTrace(); } } public static void main(String[] args) { test(false); test(true); } private static class DummyException extends RuntimeException { } }
add additional jsr test to Subroutine
test/Subroutine.java
add additional jsr test to Subroutine
Java
mit
437515fa2a203b0e826d02827ed18efc317ed703
0
Microsoft/ProjectOxford-ClientSDK,Microsoft/ProjectOxford-ClientSDK,Microsoft/ProjectOxford-ClientSDK,Microsoft/ProjectOxford-ClientSDK,Microsoft/ProjectOxford-ClientSDK,Microsoft/ProjectOxford-ClientSDK,Microsoft/ProjectOxford-ClientSDK,Microsoft/ProjectOxford-ClientSDK
// // Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. // // Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services // // Microsoft Cognitive Services (formerly Project Oxford) GitHub: // https://github.com/Microsoft/ProjectOxford-ClientSDK // // Copyright (c) Microsoft Corporation // All rights reserved. // // MIT License: // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // package com.microsoft.projectoxford.emotion.contract; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; public class Scores { public double anger; public double contempt; public double disgust; public double fear; public double happiness; public double neutral; public double sadness; public double surprise; public List<Map.Entry<String, Double>> ToRankedList(Order order) { // create a Map to store each entry Map<String, Double> collection = new HashMap<String, Double>() ; // add each entry with its own key and value collection.put("ANGER",anger); collection.put("CONTEMPT",contempt); collection.put("DISGUST",disgust); collection.put("FEAR",fear); collection.put("HAPPINESS",happiness); collection.put("NEUTRAL",neutral); collection.put("SADNESS",sadness); collection.put("SURPRISE",surprise); // create a list with the entries List<Map.Entry<String, Double>> list = new ArrayList<Map.Entry<String, Double>>(collection.entrySet()); // we are going to create a comparator according to the value of the enum order switch (order) { case ASCENDING: Collections.sort(list, new Comparator<Map.Entry<String, Double>>() { @Override public int compare(Entry<String, Double> first, Entry<String, Double> second) { // we should compare the value of the first entry and the value of the second entry return first.getValue().compareTo(second.getValue()); } }); break; case DESCENDING: // for ordering descending we should create a reverse order comparator Collections.sort(list, Collections.reverseOrder(new Comparator<Map.Entry<String, Double>>() { @Override public int compare(Entry<String, Double> first, Entry<String, Double> second) { return first.getValue().compareTo(second.getValue()); } })); break; default: break; } return list; } }
Emotion/Android/ClientLibrary/lib/src/main/java/com/microsoft/projectoxford/emotion/contract/Scores.java
// // Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. // // Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services // // Microsoft Cognitive Services (formerly Project Oxford) GitHub: // https://github.com/Microsoft/ProjectOxford-ClientSDK // // Copyright (c) Microsoft Corporation // All rights reserved. // // MIT License: // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // package com.microsoft.projectoxford.emotion.contract; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; public class Scores { public double anger; public double contempt; public double disgust; public double fear; public double happiness; public double neutral; public double sadness; public double surprise; public List<Map.Entry<String, Double>> ToRankedList(Order order) { // create a Map to store each entry Map<String, Double> collection = new HashMap<String, Double>() ; // add each entry with its own key and value collection.put("ANGER",anger); collection.put("CONTEMPT",contempt); collection.put("DISGUST",disgust); collection.put("FEAR",fear); collection.put("HAPPINESS",happiness); collection.put("NEUTRAL",neutral); collection.put("SADNESS",sadness); collection.put("SURPRISE",surprise); // create a list with the entries List<Map.Entry<String, Double>> list = new ArrayList<Map.Entry<String, Double>>(collection.entrySet()); // we are going to create a comparator according to the value of the enum order switch (order) { case ASCENDING: Collections.sort(list, new Comparator<Map.Entry<String, Double>>() { @Override public int compare(Entry<String, Double> first, Entry<String, Double> second) { // we should compare the value of the first entry and the value of the second entry return first.getValue().compareTo(second.getValue()); } }); break; case DESCENDING: // for ordering descending we should create a reverse order comparator Collections.sort(list, Collections.reverseOrder(new Comparator<Map.Entry<String, Double>>() { @Override public int compare(Entry<String, Double> first, Entry<String, Double> second) { return first.getValue().compareTo(second.getValue()); } })); break; default: break; } return list; } }
Identation fixed
Emotion/Android/ClientLibrary/lib/src/main/java/com/microsoft/projectoxford/emotion/contract/Scores.java
Identation fixed
Java
mit
b373b3b4588b022de390c40116b51145949304e5
0
EasyBatch/easybatch-framework,EasyBatch/easybatch-framework
package org.easybatch.extensions.stream; import org.easybatch.core.reader.RecordReader; import org.easybatch.core.record.GenericRecord; import org.easybatch.core.record.Header; import org.easybatch.core.record.Record; import java.util.Date; import java.util.Iterator; import java.util.stream.Stream; /** * Reader that reads records form a {@link Stream}. * * This reader produces {@link GenericRecord} instances. * * @param <T> Type of elements in the stream. * @author Charles Fleury * @since 5.0 */ public class StreamRecordReader<T> implements RecordReader { private static final String DEFAULT_DATASOURCE_NAME = "DATASOURCE"; protected String datasource; protected Stream<T> stream; protected Iterator<T> iterator; protected long currentRecordNumber; /** * Create a {@link StreamRecordReader} to read record from a {@link Stream}. * * @param stream to read record from */ public StreamRecordReader(final Stream<T> stream) { this(stream, DEFAULT_DATASOURCE_NAME); } /** * Create a {@link StreamRecordReader} to read record from a {@link Stream}. * * @param stream to read record from * @param datasource name (default to DEFAULT_DATASOURCE_NAME) */ public StreamRecordReader(final Stream<T> stream, final String datasource) { this.stream = stream; this.datasource = datasource; } /** * Open the reader. */ @Override public void open() throws Exception { if (stream == null) { throw new IllegalArgumentException("stream must not be null"); } if (datasource == null || datasource.isEmpty()) { datasource = DEFAULT_DATASOURCE_NAME; } currentRecordNumber = 0; iterator = stream.iterator(); } /** * Read next record from the data source. * * @return the next record from the data source. */ @Override public Record<T> readRecord() throws Exception { if (iterator.hasNext()) { Header header = new Header(++currentRecordNumber, datasource, new Date()); return new GenericRecord<>(header, iterator.next()); } else { return null; } } /** * Close the reader. */ @Override public void close() throws Exception { stream.close(); } }
easybatch-extensions/easybatch-stream/src/main/java/org/easybatch/extensions/stream/StreamRecordReader.java
package org.easybatch.extensions.stream; import org.easybatch.core.reader.RecordReader; import org.easybatch.core.record.GenericRecord; import org.easybatch.core.record.Header; import org.easybatch.core.record.Record; import java.util.Date; import java.util.Iterator; import java.util.stream.Stream; /** * Reader that reads records form a {@link Stream}. * * This reader produces {@link GenericRecord} instances. * * @param <T> Type of elements in the stream. * @author Charles Fleury * @since 5.0 */ public class StreamRecordReader<T> implements RecordReader { private static final String DEFAULT_DATASOURCE_NAME = "DATASOURCE"; protected String datasource; protected Stream<T> stream; protected Iterator<T> iterator; protected long currentRecordNumber; /** * Create a {@link StreamRecordReader} to read record from a {@link Stream}. * * @param stream to read record from */ public StreamRecordReader(final Stream<T> stream) { this(stream, DEFAULT_DATASOURCE_NAME); } /** * Create a {@link StreamRecordReader} to read record from a {@link Stream}. * * @param stream to read record from * @param datasource name (default to DEFAULT_DATASOURCE_NAME) */ public StreamRecordReader(final Stream<T> stream, final String datasource) { this.stream = stream; this.datasource = datasource; } /** * Open the reader. */ @Override public void open() throws Exception { if (stream == null) { throw new IllegalArgumentException("stream must not be null"); } if (datasource == null || datasource.isEmpty()) { datasource = DEFAULT_DATASOURCE_NAME; } currentRecordNumber = 0; iterator = stream.iterator(); } /** * Read next record from the data source. * * @return the next record from the data source. */ @Override public Record<T> readRecord() throws Exception { if (iterator.hasNext()) { Header header = new Header(++currentRecordNumber, datasource, new Date()); return new GenericRecord<>(header, iterator.next()); } else { return null; } } /** * Close the reader. */ @Override public void close() throws Exception { // no op } }
close the stream in the end of job
easybatch-extensions/easybatch-stream/src/main/java/org/easybatch/extensions/stream/StreamRecordReader.java
close the stream in the end of job
Java
mit
b8cfc2274706d2d74e32323b558c114fcea05f2b
0
kmdouglass/Micro-Manager,kmdouglass/Micro-Manager
/////////////////////////////////////////////////////////////////////////////// //FILE: MMStudioMainFrame.java //PROJECT: Micro-Manager //SUBSYSTEM: mmstudio //----------------------------------------------------------------------------- //AUTHOR: Nenad Amodaj, [email protected], Jul 18, 2005 // Modifications by Arthur Edelstein, Nico Stuurman, Henry Pinkard //COPYRIGHT: University of California, San Francisco, 2006-2013 // 100X Imaging Inc, www.100ximaging.com, 2008 //LICENSE: This file is distributed under the BSD license. // License text is included with the source distribution. // This file is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty // of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. // IN NO EVENT SHALL THE COPYRIGHT OWNER OR // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES. //CVS: $Id$ // package org.micromanager; import ij.IJ; import ij.ImageJ; import ij.ImagePlus; import ij.WindowManager; import ij.gui.Line; import ij.gui.Roi; import ij.process.ImageProcessor; import java.awt.Color; import java.awt.Component; import java.awt.Dimension; import java.awt.Font; import java.awt.Rectangle; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.FocusAdapter; import java.awt.event.FocusEvent; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.awt.geom.Point2D; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.prefs.Preferences; import javax.swing.AbstractButton; import javax.swing.BorderFactory; import javax.swing.JButton; import javax.swing.JCheckBox; import javax.swing.JCheckBoxMenuItem; import javax.swing.JComboBox; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JMenu; import javax.swing.JMenuBar; import javax.swing.JMenuItem; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JSplitPane; import javax.swing.JTextField; import javax.swing.JToggleButton; import javax.swing.SpringLayout; import javax.swing.SwingConstants; import javax.swing.SwingUtilities; import javax.swing.ToolTipManager; import javax.swing.UIManager; import mmcorej.CMMCore; import mmcorej.DeviceType; import mmcorej.MMCoreJ; import mmcorej.MMEventCallback; import mmcorej.StrVector; import org.json.JSONObject; import org.micromanager.acquisition.AcquisitionManager; import org.micromanager.api.Autofocus; import org.micromanager.api.DataProcessor; import org.micromanager.api.MMPlugin; import org.micromanager.api.MMProcessorPlugin; import org.micromanager.api.MMTags; import org.micromanager.api.PositionList; import org.micromanager.api.ScriptInterface; import org.micromanager.api.MMListenerInterface; import org.micromanager.api.SequenceSettings; import org.micromanager.conf2.ConfiguratorDlg2; import org.micromanager.conf2.MMConfigFileException; import org.micromanager.conf2.MicroscopeModel; import org.micromanager.events.EventManager; import org.micromanager.graph.GraphData; import org.micromanager.graph.GraphFrame; import org.micromanager.navigation.CenterAndDragListener; import org.micromanager.navigation.XYZKeyListener; import org.micromanager.navigation.ZWheelListener; import org.micromanager.pipelineUI.PipelinePanel; import org.micromanager.utils.AutofocusManager; import org.micromanager.utils.ContrastSettings; import org.micromanager.utils.GUIColors; import org.micromanager.utils.GUIUtils; import org.micromanager.utils.JavaUtils; import org.micromanager.utils.MMException; import org.micromanager.utils.MMScriptException; import org.micromanager.utils.NumberUtils; import org.micromanager.utils.TextUtils; import org.micromanager.utils.WaitDialog; import bsh.EvalError; import bsh.Interpreter; import com.swtdesigner.SwingResourceManager; import ij.gui.ImageCanvas; import ij.gui.ImageWindow; import ij.gui.Toolbar; import java.awt.*; import java.awt.dnd.DropTarget; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import mmcorej.TaggedImage; import org.json.JSONException; import org.micromanager.acquisition.*; import org.micromanager.api.ImageCache; import org.micromanager.api.IAcquisitionEngine2010; import org.micromanager.graph.HistogramSettings; import org.micromanager.internalinterfaces.LiveModeListener; import org.micromanager.utils.DragDropUtil; import org.micromanager.utils.FileDialogs; import org.micromanager.utils.FileDialogs.FileType; import org.micromanager.utils.HotKeysDialog; import org.micromanager.utils.ImageUtils; import org.micromanager.utils.MDUtils; import org.micromanager.utils.MMKeyDispatcher; import org.micromanager.utils.ReportingUtils; import org.micromanager.utils.UIMonitor; /* * Main panel and application class for the MMStudio. */ public class MMStudioMainFrame extends JFrame implements ScriptInterface { private static final String MICRO_MANAGER_TITLE = "Micro-Manager"; private static final long serialVersionUID = 3556500289598574541L; private static final String MAIN_FRAME_X = "x"; private static final String MAIN_FRAME_Y = "y"; private static final String MAIN_FRAME_WIDTH = "width"; private static final String MAIN_FRAME_HEIGHT = "height"; private static final String MAIN_FRAME_DIVIDER_POS = "divider_pos"; private static final String MAIN_EXPOSURE = "exposure"; private static final String MAIN_SAVE_METHOD = "saveMethod"; private static final String SYSTEM_CONFIG_FILE = "sysconfig_file"; private static final String OPEN_ACQ_DIR = "openDataDir"; private static final String SCRIPT_CORE_OBJECT = "mmc"; private static final String SCRIPT_ACQENG_OBJECT = "acq"; private static final String SCRIPT_GUI_OBJECT = "gui"; private static final String AUTOFOCUS_DEVICE = "autofocus_device"; private static final String MOUSE_MOVES_STAGE = "mouse_moves_stage"; private static final String EXPOSURE_SETTINGS_NODE = "MainExposureSettings"; private static final String CONTRAST_SETTINGS_NODE = "MainContrastSettings"; private static final int TOOLTIP_DISPLAY_DURATION_MILLISECONDS = 15000; private static final int TOOLTIP_DISPLAY_INITIAL_DELAY_MILLISECONDS = 2000; // cfg file saving private static final String CFGFILE_ENTRY_BASE = "CFGFileEntry"; // + {0, 1, 2, 3, 4} // GUI components private JComboBox comboBinning_; private JComboBox shutterComboBox_; private JTextField textFieldExp_; private JLabel labelImageDimensions_; private JToggleButton liveButton_; private JCheckBox autoShutterCheckBox_; private MMOptions options_; private boolean runsAsPlugin_; private JCheckBoxMenuItem centerAndDragMenuItem_; private JButton snapButton_; private JButton autofocusNowButton_; private JButton autofocusConfigureButton_; private JToggleButton toggleShutterButton_; private GUIColors guiColors_; private GraphFrame profileWin_; private PropertyEditor propertyBrowser_; private CalibrationListDlg calibrationListDlg_; private AcqControlDlg acqControlWin_; private JMenu pluginMenu_; private Map<String, JMenu> pluginSubMenus_; private List<MMListenerInterface> MMListeners_ = Collections.synchronizedList(new ArrayList<MMListenerInterface>()); private List<LiveModeListener> liveModeListeners_ = Collections.synchronizedList(new ArrayList<LiveModeListener>()); private List<Component> MMFrames_ = Collections.synchronizedList(new ArrayList<Component>()); private AutofocusManager afMgr_; private final static String DEFAULT_CONFIG_FILE_NAME = "MMConfig_demo.cfg"; private final static String DEFAULT_CONFIG_FILE_PROPERTY = "org.micromanager.default.config.file"; private ArrayList<String> MRUConfigFiles_; private static final int maxMRUCfgs_ = 5; private String sysConfigFile_; private String startupScriptFile_; private ConfigGroupPad configPad_; private LiveModeTimer liveModeTimer_; private GraphData lineProfileData_; // labels for standard devices private String cameraLabel_; private String zStageLabel_; private String shutterLabel_; private String xyStageLabel_; // applications settings private Preferences mainPrefs_; private Preferences systemPrefs_; private Preferences colorPrefs_; private Preferences exposurePrefs_; private Preferences contrastPrefs_; // MMcore private CMMCore core_; private AcquisitionWrapperEngine engine_; private PositionList posList_; private PositionListDlg posListDlg_; private String openAcqDirectory_ = ""; private boolean running_; private boolean configChanged_ = false; private StrVector shutters_ = null; private JButton saveConfigButton_; private ScriptPanel scriptPanel_; private PipelinePanel pipelinePanel_; private org.micromanager.utils.HotKeys hotKeys_; private CenterAndDragListener centerAndDragListener_; private ZWheelListener zWheelListener_; private XYZKeyListener xyzKeyListener_; private AcquisitionManager acqMgr_; private static VirtualAcquisitionDisplay simpleDisplay_; private Color[] multiCameraColors_ = {Color.RED, Color.GREEN, Color.BLUE, Color.YELLOW, Color.CYAN}; private boolean liveModeSuspended_; public Font defaultScriptFont_ = null; public static final String SIMPLE_ACQ = "Snap/Live Window"; public static FileType MM_CONFIG_FILE = new FileType("MM_CONFIG_FILE", "Micro-Manager Config File", "./MyScope.cfg", true, "cfg"); // Our instance private static MMStudioMainFrame gui_; // Callback private CoreEventCallback cb_; // Lock invoked while shutting down private final Object shutdownLock_ = new Object(); private JMenuBar menuBar_; private ConfigPadButtonPanel configPadButtonPanel_; private final JMenu switchConfigurationMenu_; private final MetadataPanel metadataPanel_; public static FileType MM_DATA_SET = new FileType("MM_DATA_SET", "Micro-Manager Image Location", System.getProperty("user.home") + "/Untitled", false, (String[]) null); private Thread acquisitionEngine2010LoadingThread_ = null; private Class<?> acquisitionEngine2010Class_ = null; private IAcquisitionEngine2010 acquisitionEngine2010_ = null; private final JSplitPane splitPane_; private volatile boolean ignorePropertyChanges_; private PluginLoader pluginLoader_; private AbstractButton setRoiButton_; private AbstractButton clearRoiButton_; /** * Simple class used to cache static info */ private class StaticInfo { public long width_; public long height_; public long bytesPerPixel_; public long imageBitDepth_; public double pixSizeUm_; public double zPos_; public double x_; public double y_; } private StaticInfo staticInfo_ = new StaticInfo(); /** * Main procedure for stand alone operation. */ public static void main(String args[]) { try { UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); MMStudioMainFrame frame = new MMStudioMainFrame(false); frame.setVisible(true); frame.setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE); } catch (Throwable e) { ReportingUtils.showError(e, "A java error has caused Micro-Manager to exit."); System.exit(1); } } /** * MMStudioMainframe constructor * @param pluginStatus */ @SuppressWarnings("LeakingThisInConstructor") public MMStudioMainFrame(boolean pluginStatus) { org.micromanager.diagnostics.ThreadExceptionLogger.setUp(); // Set up event handling early, so following code can subscribe/publish // events as needed. EventManager manager = new EventManager(); startLoadingPipelineClass(); options_ = new MMOptions(); try { options_.loadSettings(); } catch (NullPointerException ex) { ReportingUtils.logError(ex); } UIMonitor.enable(options_.debugLogEnabled_); guiColors_ = new GUIColors(); pluginLoader_ = new PluginLoader(); // plugins_ = new ArrayList<PluginItem>(); gui_ = this; runsAsPlugin_ = pluginStatus; setIconImage(SwingResourceManager.getImage(MMStudioMainFrame.class, "icons/microscope.gif")); running_ = true; acqMgr_ = new AcquisitionManager(); sysConfigFile_ = new File(DEFAULT_CONFIG_FILE_NAME).getAbsolutePath(); sysConfigFile_ = System.getProperty(DEFAULT_CONFIG_FILE_PROPERTY, sysConfigFile_); if (options_.startupScript_.length() > 0) { startupScriptFile_ = new File(options_.startupScript_).getAbsolutePath(); } else { startupScriptFile_ = ""; } ReportingUtils.SetContainingFrame(gui_); // set the location for app preferences try { mainPrefs_ = Preferences.userNodeForPackage(this.getClass()); } catch (Exception e) { ReportingUtils.logError(e); } systemPrefs_ = mainPrefs_; colorPrefs_ = mainPrefs_.node(mainPrefs_.absolutePath() + "/" + AcqControlDlg.COLOR_SETTINGS_NODE); exposurePrefs_ = mainPrefs_.node(mainPrefs_.absolutePath() + "/" + EXPOSURE_SETTINGS_NODE); contrastPrefs_ = mainPrefs_.node(mainPrefs_.absolutePath() + "/" + CONTRAST_SETTINGS_NODE); // check system preferences try { Preferences p = Preferences.systemNodeForPackage(this.getClass()); if (null != p) { // if we can not write to the systemPrefs, use AppPrefs instead if (JavaUtils.backingStoreAvailable(p)) { systemPrefs_ = p; } } } catch (Exception e) { ReportingUtils.logError(e); } showRegistrationDialogMaybe(); // load application preferences // NOTE: only window size and position preferences are loaded, // not the settings for the camera and live imaging - // attempting to set those automatically on startup may cause problems // with the hardware int x = mainPrefs_.getInt(MAIN_FRAME_X, 100); int y = mainPrefs_.getInt(MAIN_FRAME_Y, 100); int width = mainPrefs_.getInt(MAIN_FRAME_WIDTH, 644); int height = mainPrefs_.getInt(MAIN_FRAME_HEIGHT, 570); openAcqDirectory_ = mainPrefs_.get(OPEN_ACQ_DIR, ""); try { ImageUtils.setImageStorageClass(Class.forName (mainPrefs_.get(MAIN_SAVE_METHOD, ImageUtils.getImageStorageClass().getName()) ) ); } catch (ClassNotFoundException ex) { ReportingUtils.logError(ex, "Class not found error. Should never happen"); } ToolTipManager ttManager = ToolTipManager.sharedInstance(); ttManager.setDismissDelay(TOOLTIP_DISPLAY_DURATION_MILLISECONDS); ttManager.setInitialDelay(TOOLTIP_DISPLAY_INITIAL_DELAY_MILLISECONDS); setBounds(x, y, width, height); setExitStrategy(options_.closeOnExit_); setTitle(MICRO_MANAGER_TITLE + " " + MMVersion.VERSION_STRING); setBackground(guiColors_.background.get((options_.displayBackground_))); setMinimumSize(new Dimension(605,480)); menuBar_ = new JMenuBar(); switchConfigurationMenu_ = new JMenu(); setJMenuBar(menuBar_); initializeFileMenu(); initializeToolsMenu(); splitPane_ = createSplitPane(mainPrefs_.getInt(MAIN_FRAME_DIVIDER_POS, 200)); getContentPane().add(splitPane_); createTopPanelWidgets((JPanel) splitPane_.getComponent(0)); metadataPanel_ = createMetadataPanel((JPanel) splitPane_.getComponent(1)); setupWindowHandlers(); // Add our own keyboard manager that handles Micro-Manager shortcuts MMKeyDispatcher mmKD = new MMKeyDispatcher(gui_); KeyboardFocusManager.getCurrentKeyboardFocusManager().addKeyEventDispatcher(mmKD); DropTarget dropTarget = new DropTarget(this, new DragDropUtil()); } private void setupWindowHandlers() { // add window listeners addWindowListener(new WindowAdapter() { @Override public void windowClosing(WindowEvent e) { closeSequence(false); } @Override public void windowOpened(WindowEvent e) { // ------------------- // initialize hardware // ------------------- try { core_ = new CMMCore(); } catch(UnsatisfiedLinkError ex) { ReportingUtils.showError(ex, "Failed to load the MMCoreJ_wrap native library"); return; } ReportingUtils.setCore(core_); core_.enableDebugLog(options_.debugLogEnabled_); logStartupProperties(); cameraLabel_ = ""; shutterLabel_ = ""; zStageLabel_ = ""; xyStageLabel_ = ""; engine_ = new AcquisitionWrapperEngine(acqMgr_); // processorStackManager_ = new ProcessorStackManager(engine_); // register callback for MMCore notifications, this is a global // to avoid garbage collection cb_ = new CoreEventCallback(); core_.registerCallback(cb_); try { core_.setCircularBufferMemoryFootprint(options_.circularBufferSizeMB_); } catch (Exception e2) { ReportingUtils.showError(e2); } MMStudioMainFrame parent = (MMStudioMainFrame) e.getWindow(); if (parent != null) { engine_.setParentGUI(parent); } loadMRUConfigFiles(); afMgr_ = new AutofocusManager(gui_); Thread pluginInitializer = initializePlugins(); toFront(); if (!options_.doNotAskForConfigFile_) { MMIntroDlg introDlg = new MMIntroDlg(MMVersion.VERSION_STRING, MRUConfigFiles_); introDlg.setConfigFile(sysConfigFile_); introDlg.setBackground(guiColors_.background.get((options_.displayBackground_))); introDlg.setVisible(true); if (!introDlg.okChosen()) { closeSequence(false); return; } sysConfigFile_ = introDlg.getConfigFile(); } saveMRUConfigFiles(); mainPrefs_.put(SYSTEM_CONFIG_FILE, sysConfigFile_); paint(MMStudioMainFrame.this.getGraphics()); engine_.setCore(core_, afMgr_); posList_ = new PositionList(); engine_.setPositionList(posList_); // load (but do no show) the scriptPanel createScriptPanel(); // Ditto with the image pipeline panel. createPipelinePanel(); // Create an instance of HotKeys so that they can be read in from prefs hotKeys_ = new org.micromanager.utils.HotKeys(); hotKeys_.loadSettings(); // before loading the system configuration, we need to wait // until the plugins are loaded try { pluginInitializer.join(2000); } catch (InterruptedException ex) { ReportingUtils.logError(ex, "Plugin loader thread was interupted"); } // if an error occurred during config loading, // do not display more errors than needed if (!loadSystemConfiguration()) ReportingUtils.showErrorOn(false); executeStartupScript(); // Create Multi-D window here but do not show it. // This window needs to be created in order to properly set the "ChannelGroup" // based on the Multi-D parameters acqControlWin_ = new AcqControlDlg(engine_, mainPrefs_, MMStudioMainFrame.this, options_); addMMBackgroundListener(acqControlWin_); configPad_.setCore(core_); if (parent != null) { configPad_.setParentGUI(parent); } configPadButtonPanel_.setCore(core_); // initialize controls initializeHelpMenu(); String afDevice = mainPrefs_.get(AUTOFOCUS_DEVICE, ""); if (afMgr_.hasDevice(afDevice)) { try { afMgr_.selectDevice(afDevice); } catch (MMException e1) { // this error should never happen ReportingUtils.showError(e1); } } centerAndDragListener_ = new CenterAndDragListener(gui_); zWheelListener_ = new ZWheelListener(core_, gui_); gui_.addLiveModeListener(zWheelListener_); xyzKeyListener_ = new XYZKeyListener(core_, gui_); gui_.addLiveModeListener(xyzKeyListener_); // switch error reporting back on ReportingUtils.showErrorOn(true); } private Thread initializePlugins() { pluginMenu_ = GUIUtils.createMenuInMenuBar(menuBar_, "Plugins"); Thread myThread = new ThreadPluginLoading("Plugin loading"); myThread.start(); return myThread; } class ThreadPluginLoading extends Thread { public ThreadPluginLoading(String string) { super(string); } @Override public void run() { // Needed for loading clojure-based jars: Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader()); pluginLoader_.loadPlugins(); } } }); } /** * Callback to update GUI when a change happens in the MMCore. */ public class CoreEventCallback extends MMEventCallback { public CoreEventCallback() { super(); } @Override public void onPropertiesChanged() { // TODO: remove test once acquisition engine is fully multithreaded if (engine_ != null && engine_.isAcquisitionRunning()) { core_.logMessage("Notification from MMCore ignored because acquistion is running!", true); } else { if (ignorePropertyChanges_) { core_.logMessage("Notification from MMCore ignored since the system is still loading", true); } else { core_.updateSystemStateCache(); updateGUI(true); // update all registered listeners for (MMListenerInterface mmIntf : MMListeners_) { mmIntf.propertiesChangedAlert(); } core_.logMessage("Notification from MMCore!", true); } } } @Override public void onPropertyChanged(String deviceName, String propName, String propValue) { core_.logMessage("Notification for Device: " + deviceName + " Property: " + propName + " changed to value: " + propValue, true); // update all registered listeners for (MMListenerInterface mmIntf:MMListeners_) { mmIntf.propertyChangedAlert(deviceName, propName, propValue); } } @Override public void onConfigGroupChanged(String groupName, String newConfig) { try { configPad_.refreshGroup(groupName, newConfig); for (MMListenerInterface mmIntf:MMListeners_) { mmIntf.configGroupChangedAlert(groupName, newConfig); } } catch (Exception e) { } } @Override public void onSystemConfigurationLoaded() { for (MMListenerInterface mmIntf:MMListeners_) { mmIntf.systemConfigurationLoaded(); } } @Override public void onPixelSizeChanged(double newPixelSizeUm) { updatePixSizeUm (newPixelSizeUm); for (MMListenerInterface mmIntf:MMListeners_) { mmIntf.pixelSizeChangedAlert(newPixelSizeUm); } } @Override public void onStagePositionChanged(String deviceName, double pos) { if (deviceName.equals(zStageLabel_)) { updateZPos(pos); for (MMListenerInterface mmIntf:MMListeners_) { mmIntf.stagePositionChangedAlert(deviceName, pos); } } } @Override public void onStagePositionChangedRelative(String deviceName, double pos) { if (deviceName.equals(zStageLabel_)) updateZPosRelative(pos); } @Override public void onXYStagePositionChanged(String deviceName, double xPos, double yPos) { if (deviceName.equals(xyStageLabel_)) { updateXYPos(xPos, yPos); for (MMListenerInterface mmIntf:MMListeners_) { mmIntf.xyStagePositionChanged(deviceName, xPos, yPos); } } } @Override public void onXYStagePositionChangedRelative(String deviceName, double xPos, double yPos) { if (deviceName.equals(xyStageLabel_)) updateXYPosRelative(xPos, yPos); } @Override public void onExposureChanged(String deviceName, double exposure) { if (deviceName.equals(cameraLabel_)){ // update exposure in gui textFieldExp_.setText(NumberUtils.doubleToDisplayString(exposure)); } for (MMListenerInterface mmIntf:MMListeners_) { mmIntf.exposureChanged(deviceName, exposure); } } } private void handleException(Exception e, String msg) { String errText = "Exception occurred: "; if (msg.length() > 0) { errText += msg + " -- "; } if (options_.debugLogEnabled_) { errText += e.getMessage(); } else { errText += e.toString() + "\n"; ReportingUtils.showError(e); } handleError(errText); } private void handleException(Exception e) { handleException(e, ""); } private void handleError(String message) { if (isLiveModeOn()) { // Should we always stop live mode on any error? enableLiveMode(false); } JOptionPane.showMessageDialog(this, message); core_.logMessage(message); } public ImageWindow getImageWin() { return getSnapLiveWin(); } public static VirtualAcquisitionDisplay getSimpleDisplay() { return simpleDisplay_; } public static void createSimpleDisplay(String name, ImageCache cache) throws MMScriptException { simpleDisplay_ = new VirtualAcquisitionDisplay(cache, name); } public void checkSimpleAcquisition() { if (core_.getCameraDevice().length() == 0) { ReportingUtils.showError("No camera configured"); return; } int width = (int) core_.getImageWidth(); int height = (int) core_.getImageHeight(); int depth = (int) core_.getBytesPerPixel(); int bitDepth = (int) core_.getImageBitDepth(); int numCamChannels = (int) core_.getNumberOfCameraChannels(); try { if (acquisitionExists(SIMPLE_ACQ)) { if ((getAcquisitionImageWidth(SIMPLE_ACQ) != width) || (getAcquisitionImageHeight(SIMPLE_ACQ) != height) || (getAcquisitionImageByteDepth(SIMPLE_ACQ) != depth) || (getAcquisitionImageBitDepth(SIMPLE_ACQ) != bitDepth) || (getAcquisitionMultiCamNumChannels(SIMPLE_ACQ) != numCamChannels)) { //Need to close and reopen simple window closeAcquisitionWindow(SIMPLE_ACQ); } } if (!acquisitionExists(SIMPLE_ACQ)) { openAcquisition(SIMPLE_ACQ, "", 1, numCamChannels, 1, true); if (numCamChannels > 1) { for (long i = 0; i < numCamChannels; i++) { String chName = core_.getCameraChannelName(i); int defaultColor = multiCameraColors_[(int) i % multiCameraColors_.length].getRGB(); setChannelColor(SIMPLE_ACQ, (int) i, getChannelColor(chName, defaultColor)); setChannelName(SIMPLE_ACQ, (int) i, chName); } } initializeSimpleAcquisition(SIMPLE_ACQ, width, height, depth, bitDepth, numCamChannels); getAcquisition(SIMPLE_ACQ).promptToSave(false); getAcquisition(SIMPLE_ACQ).getAcquisitionWindow().getHyperImage().getWindow().toFront(); this.updateCenterAndDragListener(); } } catch (Exception ex) { ReportingUtils.showError(ex); } } public void checkSimpleAcquisition(TaggedImage image) { try { JSONObject tags = image.tags; int width = MDUtils.getWidth(tags); int height = MDUtils.getHeight(tags); int depth = MDUtils.getDepth(tags); int bitDepth = MDUtils.getBitDepth(tags); int numCamChannels = (int) core_.getNumberOfCameraChannels(); if (acquisitionExists(SIMPLE_ACQ)) { if ((getAcquisitionImageWidth(SIMPLE_ACQ) != width) || (getAcquisitionImageHeight(SIMPLE_ACQ) != height) || (getAcquisitionImageByteDepth(SIMPLE_ACQ) != depth) || (getAcquisitionImageBitDepth(SIMPLE_ACQ) != bitDepth) || (getAcquisitionMultiCamNumChannels(SIMPLE_ACQ) != numCamChannels)) { //Need to close and reopen simple window closeAcquisitionWindow(SIMPLE_ACQ); // Seems that closeAcquisitionWindow also closes the acquisition... //closeAcquisition(SIMPLE_ACQ); } } if (!acquisitionExists(SIMPLE_ACQ)) { openAcquisition(SIMPLE_ACQ, "", 1, numCamChannels, 1, true); if (numCamChannels > 1) { for (long i = 0; i < numCamChannels; i++) { String chName = core_.getCameraChannelName(i); int defaultColor = multiCameraColors_[(int) i % multiCameraColors_.length].getRGB(); setChannelColor(SIMPLE_ACQ, (int) i, getChannelColor(chName, defaultColor)); setChannelName(SIMPLE_ACQ, (int) i, chName); } } initializeSimpleAcquisition(SIMPLE_ACQ, width, height, depth, bitDepth, numCamChannels); getAcquisition(SIMPLE_ACQ).promptToSave(false); getAcquisition(SIMPLE_ACQ).getAcquisitionWindow().getHyperImage().getWindow().toFront(); this.updateCenterAndDragListener(); } } catch (Exception ex) { ReportingUtils.showError(ex); } } public void saveChannelColor(String chName, int rgb) { if (colorPrefs_ != null) { colorPrefs_.putInt("Color_" + chName, rgb); } } public Color getChannelColor(String chName, int defaultColor) { if (colorPrefs_ != null) { defaultColor = colorPrefs_.getInt("Color_" + chName, defaultColor); } return new Color(defaultColor); } public void copyFromLiveModeToAlbum(VirtualAcquisitionDisplay display) throws MMScriptException, JSONException { ImageCache ic = display.getImageCache(); int channels = ic.getSummaryMetadata().getInt("Channels"); if (channels == 1) { //RGB or monchrome addToAlbum(ic.getImage(0, 0, 0, 0), ic.getDisplayAndComments()); } else { //multicamera for (int i = 0; i < channels; i++) { addToAlbum(ic.getImage(i, 0, 0, 0), ic.getDisplayAndComments()); } } } private void createActiveShutterChooser(JPanel topPanel) { createLabel("Shutter", false, topPanel, 111, 73, 158, 86); shutterComboBox_ = new JComboBox(); shutterComboBox_.setName("Shutter"); shutterComboBox_.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent arg0) { try { if (shutterComboBox_.getSelectedItem() != null) { core_.setShutterDevice((String) shutterComboBox_.getSelectedItem()); } } catch (Exception e) { ReportingUtils.showError(e); } } }); GUIUtils.addWithEdges(topPanel, shutterComboBox_, 170, 70, 275, 92); } private void createBinningChooser(JPanel topPanel) { createLabel("Binning", false, topPanel, 111, 43, 199, 64); comboBinning_ = new JComboBox(); comboBinning_.setName("Binning"); comboBinning_.setFont(new Font("Arial", Font.PLAIN, 10)); comboBinning_.setMaximumRowCount(4); comboBinning_.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { changeBinning(); } }); GUIUtils.addWithEdges(topPanel, comboBinning_, 200, 43, 275, 66); } private void createExposureField(JPanel topPanel) { createLabel("Exposure [ms]", false, topPanel, 111, 23, 198, 39); textFieldExp_ = new JTextField(); textFieldExp_.addFocusListener(new FocusAdapter() { @Override public void focusLost(FocusEvent fe) { synchronized(shutdownLock_) { if (core_ != null) setExposure(); } } }); textFieldExp_.setFont(new Font("Arial", Font.PLAIN, 10)); textFieldExp_.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { setExposure(); } }); GUIUtils.addWithEdges(topPanel, textFieldExp_, 203, 21, 276, 40); } private void toggleAutoShutter() { shutterLabel_ = core_.getShutterDevice(); if (shutterLabel_.length() == 0) { toggleShutterButton_.setEnabled(false); } else { if (autoShutterCheckBox_.isSelected()) { try { core_.setAutoShutter(true); core_.setShutterOpen(false); toggleShutterButton_.setSelected(false); toggleShutterButton_.setText("Open"); toggleShutterButton_.setEnabled(false); } catch (Exception e2) { ReportingUtils.logError(e2); } } else { try { core_.setAutoShutter(false); core_.setShutterOpen(false); toggleShutterButton_.setEnabled(true); toggleShutterButton_.setText("Open"); } catch (Exception exc) { ReportingUtils.logError(exc); } } } } private void createShutterControls(JPanel topPanel) { autoShutterCheckBox_ = new JCheckBox(); autoShutterCheckBox_.setFont(new Font("Arial", Font.PLAIN, 10)); autoShutterCheckBox_.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { toggleAutoShutter(); } }); autoShutterCheckBox_.setIconTextGap(6); autoShutterCheckBox_.setHorizontalTextPosition(SwingConstants.LEADING); autoShutterCheckBox_.setText("Auto shutter"); GUIUtils.addWithEdges(topPanel, autoShutterCheckBox_, 107, 96, 199, 119); toggleShutterButton_ = (JToggleButton) GUIUtils.createButton(true, "toggleShutterButton", "Open", "Open/close the shutter", new Runnable() { public void run() { toggleShutter(); } }, null, topPanel, 203, 96, 275, 117); // Shutter button } private void createCameraSettingsWidgets(JPanel topPanel) { createLabel("Camera settings", true, topPanel, 109, 2, 211, 22); createExposureField(topPanel); createBinningChooser(topPanel); createActiveShutterChooser(topPanel); createShutterControls(topPanel); } private void createConfigurationControls(JPanel topPanel) { createLabel("Configuration settings", true, topPanel, 280, 2, 430, 22); saveConfigButton_ = (JButton) GUIUtils.createButton(false, "saveConfigureButton", "Save", "Save current presets to the configuration file", new Runnable() { public void run() { saveConfigPresets(); } }, null, topPanel, -80, 2, -5, 20); configPad_ = new ConfigGroupPad(); configPadButtonPanel_ = new ConfigPadButtonPanel(); configPadButtonPanel_.setConfigPad(configPad_); configPadButtonPanel_.setGUI(MMStudioMainFrame.getInstance()); configPad_.setFont(new Font("", Font.PLAIN, 10)); GUIUtils.addWithEdges(topPanel, configPad_, 280, 21, -4, -44); GUIUtils.addWithEdges(topPanel, configPadButtonPanel_, 280, -40, -4, -20); } private void createMainButtons(JPanel topPanel) { snapButton_ = (JButton) GUIUtils.createButton(false, "Snap", "Snap", "Snap single image", new Runnable() { public void run() { doSnap(); } }, "camera.png", topPanel, 7, 4, 95, 25); liveButton_ = (JToggleButton) GUIUtils.createButton(true, "Live", "Live", "Continuous live view", new Runnable() { public void run() { enableLiveMode(!isLiveModeOn()); } }, "camera_go.png", topPanel, 7, 26, 95, 47); /* toAlbumButton_ = (JButton) */ GUIUtils.createButton(false, "Album", "Album", "Acquire single frame and add to an album", new Runnable() { public void run() { snapAndAddToImage5D(); } }, "camera_plus_arrow.png", topPanel, 7, 48, 95, 69); /* MDA Button = */ GUIUtils.createButton(false, "Multi-D Acq.", "Multi-D Acq.", "Open multi-dimensional acquisition window", new Runnable() { public void run() { openAcqControlDialog(); } }, "film.png", topPanel, 7, 70, 95, 91); /* Refresh = */ GUIUtils.createButton(false, "Refresh", "Refresh", "Refresh all GUI controls directly from the hardware", new Runnable() { public void run() { core_.updateSystemStateCache(); updateGUI(true); } }, "arrow_refresh.png", topPanel, 7, 92, 95, 113); } private static MetadataPanel createMetadataPanel(JPanel bottomPanel) { MetadataPanel metadataPanel = new MetadataPanel(); GUIUtils.addWithEdges(bottomPanel, metadataPanel, 0, 0, 0, 0); metadataPanel.setBorder(BorderFactory.createEmptyBorder()); return metadataPanel; } private void createPleaLabel(JPanel topPanel) { JLabel citePleaLabel = new JLabel("<html>Please <a href=\"http://micro-manager.org\">cite Micro-Manager</a> so funding will continue!</html>"); citePleaLabel.setFont(new Font("Arial", Font.PLAIN, 11)); GUIUtils.addWithEdges(topPanel, citePleaLabel, 7, 119, 270, 139); class Pleader extends Thread{ Pleader(){ super("pleader"); } @Override public void run(){ try { ij.plugin.BrowserLauncher.openURL("https://micro-manager.org/wiki/Citing_Micro-Manager"); } catch (IOException e1) { ReportingUtils.showError(e1); } } } citePleaLabel.addMouseListener(new MouseAdapter() { @Override public void mousePressed(MouseEvent e) { Pleader p = new Pleader(); p.start(); } }); // add a listener to the main ImageJ window to catch it quitting out on us /* * The current version of ImageJ calls the command "Quit", which we * handle in MMStudioPlugin. Calling the closeSequence from here as well * leads to crashes since the core will be cleaned up by one of the two * threads doing the same thing. I do not know since which version of * ImageJ introduced this behavior - NS, 2014-04-26 if (ij.IJ.getInstance() != null) { ij.IJ.getInstance().addWindowListener(new WindowAdapter() { @Override public void windowClosing(WindowEvent e) { //closeSequence(true); }; }); } */ } private JSplitPane createSplitPane(int dividerPos) { JPanel topPanel = new JPanel(); JPanel bottomPanel = new JPanel(); topPanel.setLayout(new SpringLayout()); topPanel.setMinimumSize(new Dimension(580, 195)); bottomPanel.setLayout(new SpringLayout()); JSplitPane splitPane = new JSplitPane(JSplitPane.VERTICAL_SPLIT, true, topPanel, bottomPanel); splitPane.setBorder(BorderFactory.createEmptyBorder()); splitPane.setDividerLocation(dividerPos); splitPane.setResizeWeight(0.0); return splitPane; } private void createTopPanelWidgets(JPanel topPanel) { createMainButtons(topPanel); createCameraSettingsWidgets(topPanel); createPleaLabel(topPanel); createUtilityButtons(topPanel); createConfigurationControls(topPanel); labelImageDimensions_ = createLabel("", false, topPanel, 5, -20, 0, 0); } private void createUtilityButtons(JPanel topPanel) { // ROI createLabel("ROI", true, topPanel, 8, 140, 71, 154); setRoiButton_ = GUIUtils.createButton(false, "setRoiButton", null, "Set Region Of Interest to selected rectangle", new Runnable() { public void run() { setROI(); } }, "shape_handles.png", topPanel, 7, 154, 37, 174); clearRoiButton_ = GUIUtils.createButton(false, "clearRoiButton", null, "Reset Region of Interest to full frame", new Runnable() { public void run() { clearROI(); } }, "arrow_out.png", topPanel, 40, 154, 70, 174); // Zoom createLabel("Zoom", true, topPanel, 81, 140, 139, 154); GUIUtils.createButton(false, "zoomInButton", null, "Zoom in", new Runnable() { public void run() { zoomIn(); } }, "zoom_in.png", topPanel, 80, 154, 110, 174); GUIUtils.createButton(false, "zoomOutButton", null, "Zoom out", new Runnable() { public void run() { zoomOut(); } }, "zoom_out.png", topPanel, 113, 154, 143, 174); // Profile createLabel("Profile", true, topPanel, 154, 140, 217, 154); GUIUtils.createButton(false, "lineProfileButton", null, "Open line profile window (requires line selection)", new Runnable() { public void run() { openLineProfileWindow(); } }, "chart_curve.png", topPanel, 153, 154, 183, 174); // Autofocus createLabel("Autofocus", true, topPanel, 194, 140, 276, 154); autofocusNowButton_ = (JButton) GUIUtils.createButton(false, "autofocusNowButton", null, "Autofocus now", new Runnable() { public void run() { autofocusNow(); } }, "find.png", topPanel, 193, 154, 223, 174); autofocusConfigureButton_ = (JButton) GUIUtils.createButton(false, "autofocusConfigureButton", null, "Set autofocus options", new Runnable() { public void run() { showAutofocusDialog(); } }, "wrench_orange.png", topPanel, 226, 154, 256, 174); } private void initializeFileMenu() { JMenu fileMenu = GUIUtils.createMenuInMenuBar(menuBar_, "File"); GUIUtils.addMenuItem(fileMenu, "Open (Virtual)...", null, new Runnable() { public void run() { new Thread() { @Override public void run() { openAcquisitionData(false); } }.start(); } }); GUIUtils.addMenuItem(fileMenu, "Open (RAM)...", null, new Runnable() { public void run() { new Thread() { @Override public void run() { openAcquisitionData(true); } }.start(); } }); fileMenu.addSeparator(); GUIUtils.addMenuItem(fileMenu, "Exit", null, new Runnable() { public void run() { closeSequence(false); } }); } private void initializeHelpMenu() { final JMenu helpMenu = GUIUtils.createMenuInMenuBar(menuBar_, "Help"); GUIUtils.addMenuItem(helpMenu, "User's Guide", null, new Runnable() { public void run() { try { ij.plugin.BrowserLauncher.openURL("http://micro-manager.org/wiki/Micro-Manager_User%27s_Guide"); } catch (IOException e1) { ReportingUtils.showError(e1); } } }); GUIUtils.addMenuItem(helpMenu, "Configuration Guide", null, new Runnable() { public void run() { try { ij.plugin.BrowserLauncher.openURL("http://micro-manager.org/wiki/Micro-Manager_Configuration_Guide"); } catch (IOException e1) { ReportingUtils.showError(e1); } } }); if (!systemPrefs_.getBoolean(RegistrationDlg.REGISTRATION, false)) { GUIUtils.addMenuItem(helpMenu, "Register your copy of Micro-Manager...", null, new Runnable() { public void run() { try { RegistrationDlg regDlg = new RegistrationDlg(systemPrefs_); regDlg.setVisible(true); } catch (Exception e1) { ReportingUtils.showError(e1); } } }); } GUIUtils.addMenuItem(helpMenu, "Report Problem...", null, new Runnable() { @Override public void run() { org.micromanager.diagnostics.gui.ProblemReportController.start(core_, options_); } }); GUIUtils.addMenuItem(helpMenu, "About Micromanager", null, new Runnable() { public void run() { MMAboutDlg dlg = new MMAboutDlg(); String versionInfo = "MM Studio version: " + MMVersion.VERSION_STRING; versionInfo += "\n" + core_.getVersionInfo(); versionInfo += "\n" + core_.getAPIVersionInfo(); versionInfo += "\nUser: " + core_.getUserId(); versionInfo += "\nHost: " + core_.getHostName(); dlg.setVersionInfo(versionInfo); dlg.setVisible(true); } }); menuBar_.validate(); } private void initializeToolsMenu() { // Tools menu final JMenu toolsMenu = GUIUtils.createMenuInMenuBar(menuBar_, "Tools"); GUIUtils.addMenuItem(toolsMenu, "Refresh GUI", "Refresh all GUI controls directly from the hardware", new Runnable() { public void run() { core_.updateSystemStateCache(); updateGUI(true); } }, "arrow_refresh.png"); GUIUtils.addMenuItem(toolsMenu, "Rebuild GUI", "Regenerate Micro-Manager user interface", new Runnable() { public void run() { initializeGUI(); core_.updateSystemStateCache(); } }); toolsMenu.addSeparator(); GUIUtils.addMenuItem(toolsMenu, "Image Pipeline...", "Display the image processing pipeline", new Runnable() { public void run() { pipelinePanel_.setVisible(true); } }); GUIUtils.addMenuItem(toolsMenu, "Script Panel...", "Open Micro-Manager script editor window", new Runnable() { public void run() { scriptPanel_.setVisible(true); } }); GUIUtils.addMenuItem(toolsMenu, "Shortcuts...", "Create keyboard shortcuts to activate image acquisition, mark positions, or run custom scripts", new Runnable() { public void run() { HotKeysDialog hk = new HotKeysDialog(guiColors_.background.get((options_.displayBackground_))); //hk.setBackground(guiColors_.background.get((options_.displayBackground_))); } }); GUIUtils.addMenuItem(toolsMenu, "Device/Property Browser...", "Open new window to view and edit property values in current configuration", new Runnable() { public void run() { createPropertyEditor(); } }); toolsMenu.addSeparator(); GUIUtils.addMenuItem(toolsMenu, "XY List...", "Open position list manager window", new Runnable() { public void run() { showXYPositionList(); } }, "application_view_list.png"); GUIUtils.addMenuItem(toolsMenu, "Multi-Dimensional Acquisition...", "Open multi-dimensional acquisition setup window", new Runnable() { public void run() { openAcqControlDialog(); } }, "film.png"); centerAndDragMenuItem_ = GUIUtils.addCheckBoxMenuItem(toolsMenu, "Mouse Moves Stage (use Hand Tool)", "When enabled, double clicking or dragging in the snap/live\n" + "window moves the XY-stage. Requires the hand tool.", new Runnable() { public void run() { updateCenterAndDragListener(); IJ.setTool(Toolbar.HAND); mainPrefs_.putBoolean(MOUSE_MOVES_STAGE, centerAndDragMenuItem_.isSelected()); } }, mainPrefs_.getBoolean(MOUSE_MOVES_STAGE, false)); GUIUtils.addMenuItem(toolsMenu, "Pixel Size Calibration...", "Define size calibrations specific to each objective lens. " + "When the objective in use has a calibration defined, " + "micromanager will automatically use it when " + "calculating metadata", new Runnable() { public void run() { createCalibrationListDlg(); } }); /* GUIUtils.addMenuItem(toolsMenu, "Image Processor Manager", "Control the order in which Image Processor plugins" + "are applied to incoming images.", new Runnable() { public void run() { processorStackManager_.show(); } }); */ toolsMenu.addSeparator(); GUIUtils.addMenuItem(toolsMenu, "Hardware Configuration Wizard...", "Open wizard to create new hardware configuration", new Runnable() { public void run() { runHardwareWizard(); } }); GUIUtils.addMenuItem(toolsMenu, "Load Hardware Configuration...", "Un-initialize current configuration and initialize new one", new Runnable() { public void run() { loadConfiguration(); initializeGUI(); } }); GUIUtils.addMenuItem(toolsMenu, "Reload Hardware Configuration", "Shutdown current configuration and initialize most recently loaded configuration", new Runnable() { public void run() { loadSystemConfiguration(); initializeGUI(); } }); for (int i=0; i<5; i++) { JMenuItem configItem = new JMenuItem(); configItem.setText(Integer.toString(i)); switchConfigurationMenu_.add(configItem); } switchConfigurationMenu_.setText("Switch Hardware Configuration"); toolsMenu.add(switchConfigurationMenu_); switchConfigurationMenu_.setToolTipText("Switch between recently used configurations"); GUIUtils.addMenuItem(toolsMenu, "Save Configuration Settings as...", "Save current configuration settings as new configuration file", new Runnable() { public void run() { saveConfigPresets(); updateChannelCombos(); } }); toolsMenu.addSeparator(); final MMStudioMainFrame thisInstance = this; GUIUtils.addMenuItem(toolsMenu, "Options...", "Set a variety of Micro-Manager configuration options", new Runnable() { public void run() { final int oldBufsize = options_.circularBufferSizeMB_; OptionsDlg dlg = new OptionsDlg(options_, core_, mainPrefs_, thisInstance); dlg.setVisible(true); // adjust memory footprint if necessary if (oldBufsize != options_.circularBufferSizeMB_) { try { core_.setCircularBufferMemoryFootprint(options_.circularBufferSizeMB_); } catch (Exception exc) { ReportingUtils.showError(exc); } } } }); } private void showRegistrationDialogMaybe() { // show registration dialog if not already registered // first check user preferences (for legacy compatibility reasons) boolean userReg = mainPrefs_.getBoolean(RegistrationDlg.REGISTRATION, false) || mainPrefs_.getBoolean(RegistrationDlg.REGISTRATION_NEVER, false); if (!userReg) { boolean systemReg = systemPrefs_.getBoolean( RegistrationDlg.REGISTRATION, false) || systemPrefs_.getBoolean(RegistrationDlg.REGISTRATION_NEVER, false); if (!systemReg) { // prompt for registration info RegistrationDlg dlg = new RegistrationDlg(systemPrefs_); dlg.setVisible(true); } } } private void updateSwitchConfigurationMenu() { switchConfigurationMenu_.removeAll(); for (final String configFile : MRUConfigFiles_) { if (!configFile.equals(sysConfigFile_)) { GUIUtils.addMenuItem(switchConfigurationMenu_, configFile, null, new Runnable() { public void run() { sysConfigFile_ = configFile; loadSystemConfiguration(); mainPrefs_.put(SYSTEM_CONFIG_FILE, sysConfigFile_); } }); } } } public final void addLiveModeListener (LiveModeListener listener) { if (liveModeListeners_.contains(listener)) { return; } liveModeListeners_.add(listener); } public void removeLiveModeListener(LiveModeListener listener) { liveModeListeners_.remove(listener); } public void callLiveModeListeners(boolean enable) { for (LiveModeListener listener : liveModeListeners_) { listener.liveModeEnabled(enable); } } /** * Part of ScriptInterface * Manipulate acquisition so that it looks like a burst */ public void runBurstAcquisition() throws MMScriptException { double interval = engine_.getFrameIntervalMs(); int nr = engine_.getNumFrames(); boolean doZStack = engine_.isZSliceSettingEnabled(); boolean doChannels = engine_.isChannelsSettingEnabled(); engine_.enableZSliceSetting(false); engine_.setFrames(nr, 0); engine_.enableChannelsSetting(false); try { engine_.acquire(); } catch (MMException e) { throw new MMScriptException(e); } engine_.setFrames(nr, interval); engine_.enableZSliceSetting(doZStack); engine_.enableChannelsSetting(doChannels); } public void runBurstAcquisition(int nr) throws MMScriptException { int originalNr = engine_.getNumFrames(); double interval = engine_.getFrameIntervalMs(); engine_.setFrames(nr, 0); this.runBurstAcquisition(); engine_.setFrames(originalNr, interval); } public void runBurstAcquisition(int nr, String name, String root) throws MMScriptException { String originalRoot = engine_.getRootName(); engine_.setDirName(name); engine_.setRootName(root); this.runBurstAcquisition(nr); engine_.setRootName(originalRoot); } /** * @Deprecated * @throws MMScriptException */ public void startBurstAcquisition() throws MMScriptException { runAcquisition(); } public boolean isBurstAcquisitionRunning() throws MMScriptException { if (engine_ == null) return false; return engine_.isAcquisitionRunning(); } private void startLoadingPipelineClass() { Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader()); acquisitionEngine2010LoadingThread_ = new Thread("Pipeline Class loading thread") { @Override public void run() { try { acquisitionEngine2010Class_ = Class.forName("org.micromanager.AcquisitionEngine2010"); } catch (Exception ex) { ReportingUtils.logError(ex); acquisitionEngine2010Class_ = null; } } }; acquisitionEngine2010LoadingThread_.start(); } /** * Shows images as they appear in the default display window. Uses * the default processor stack to process images as they arrive on * the rawImageQueue. */ public void runDisplayThread(BlockingQueue<TaggedImage> rawImageQueue, final DisplayImageRoutine displayImageRoutine) { final BlockingQueue<TaggedImage> processedImageQueue = ProcessorStack.run(rawImageQueue, getAcquisitionEngine().getImageProcessors()); new Thread("Display thread") { @Override public void run() { try { TaggedImage image; do { image = processedImageQueue.take(); if (image != TaggedImageQueue.POISON) { displayImageRoutine.show(image); } } while (image != TaggedImageQueue.POISON); } catch (InterruptedException ex) { ReportingUtils.logError(ex); } } }.start(); } private static JLabel createLabel(String text, boolean big, JPanel parentPanel, int west, int north, int east, int south) { final JLabel label = new JLabel(); label.setFont(new Font("Arial", big ? Font.BOLD : Font.PLAIN, big ? 11 : 10)); label.setText(text); GUIUtils.addWithEdges(parentPanel, label, west, north, east, south); return label; } public interface DisplayImageRoutine { public void show(TaggedImage image); } /** * used to store contrast settings to be later used for initialization of contrast of new windows. * Shouldn't be called by loaded data sets, only * ones that have been acquired */ public void saveChannelHistogramSettings(String channelGroup, String channel, boolean mda, HistogramSettings settings) { String type = mda ? "MDA_" : "SnapLive_"; if (options_.syncExposureMainAndMDA_) { type = ""; //only one group of contrast settings } contrastPrefs_.putInt("ContrastMin_" + channelGroup + "_" + type + channel, settings.min_); contrastPrefs_.putInt("ContrastMax_" + channelGroup + "_" + type + channel, settings.max_); contrastPrefs_.putDouble("ContrastGamma_" + channelGroup + "_" + type + channel, settings.gamma_); contrastPrefs_.putInt("ContrastHistMax_" + channelGroup + "_" + type + channel, settings.histMax_); contrastPrefs_.putInt("ContrastHistDisplayMode_" + channelGroup + "_" + type + channel, settings.displayMode_); } public HistogramSettings loadStoredChannelHisotgramSettings(String channelGroup, String channel, boolean mda) { String type = mda ? "MDA_" : "SnapLive_"; if (options_.syncExposureMainAndMDA_) { type = ""; //only one group of contrast settings } return new HistogramSettings( contrastPrefs_.getInt("ContrastMin_" + channelGroup + "_" + type + channel,0), contrastPrefs_.getInt("ContrastMax_" + channelGroup + "_" + type + channel, 65536), contrastPrefs_.getDouble("ContrastGamma_" + channelGroup + "_" + type + channel, 1.0), contrastPrefs_.getInt("ContrastHistMax_" + channelGroup + "_" + type + channel, -1), contrastPrefs_.getInt("ContrastHistDisplayMode_" + channelGroup + "_" + type + channel, 1) ); } private void setExposure() { try { if (!isLiveModeOn()) { core_.setExposure(NumberUtils.displayStringToDouble( textFieldExp_.getText())); } else { liveModeTimer_.stop(); core_.setExposure(NumberUtils.displayStringToDouble( textFieldExp_.getText())); try { liveModeTimer_.begin(); } catch (Exception e) { ReportingUtils.showError("Couldn't restart live mode"); liveModeTimer_.stop(); } } // Display the new exposure time double exposure = core_.getExposure(); textFieldExp_.setText(NumberUtils.doubleToDisplayString(exposure)); // update current channel in MDA window with this exposure String channelGroup = core_.getChannelGroup(); String channel = core_.getCurrentConfigFromCache(channelGroup); if (!channel.equals("") ) { exposurePrefs_.putDouble("Exposure_" + channelGroup + "_" + channel, exposure); if (options_.syncExposureMainAndMDA_) { getAcqDlg().setChannelExposureTime(channelGroup, channel, exposure); } } } catch (Exception exp) { // Do nothing. } } public double getPreferredWindowMag() { return options_.windowMag_; } public boolean getMetadataFileWithMultipageTiff() { return options_.mpTiffMetadataFile_; } public boolean getSeparateFilesForPositionsMPTiff() { return options_.mpTiffSeparateFilesForPositions_; } @Override public boolean getHideMDADisplayOption() { return options_.hideMDADisplay_; } private void updateTitle() { this.setTitle(MICRO_MANAGER_TITLE + " " + MMVersion.VERSION_STRING + " - " + sysConfigFile_); } public void updateLineProfile() { if (WindowManager.getCurrentWindow() == null || profileWin_ == null || !profileWin_.isShowing()) { return; } calculateLineProfileData(WindowManager.getCurrentImage()); profileWin_.setData(lineProfileData_); } private void openLineProfileWindow() { if (WindowManager.getCurrentWindow() == null || WindowManager.getCurrentWindow().isClosed()) { return; } calculateLineProfileData(WindowManager.getCurrentImage()); if (lineProfileData_ == null) { return; } profileWin_ = new GraphFrame(); profileWin_.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE); profileWin_.setData(lineProfileData_); profileWin_.setAutoScale(); profileWin_.setTitle("Live line profile"); profileWin_.setBackground(guiColors_.background.get((options_.displayBackground_))); addMMBackgroundListener(profileWin_); profileWin_.setVisible(true); } @Override public Rectangle getROI() throws MMScriptException { // ROI values are given as x,y,w,h in individual one-member arrays (pointers in C++): int[][] a = new int[4][1]; try { core_.getROI(a[0], a[1], a[2], a[3]); } catch (Exception e) { throw new MMScriptException(e.getMessage()); } // Return as a single array with x,y,w,h: return new Rectangle(a[0][0], a[1][0], a[2][0], a[3][0]); } private void calculateLineProfileData(ImagePlus imp) { // generate line profile Roi roi = imp.getRoi(); if (roi == null || !roi.isLine()) { // if there is no line ROI, create one Rectangle r = imp.getProcessor().getRoi(); int iWidth = r.width; int iHeight = r.height; int iXROI = r.x; int iYROI = r.y; if (roi == null) { iXROI += iWidth / 2; iYROI += iHeight / 2; } roi = new Line(iXROI - iWidth / 4, iYROI - iWidth / 4, iXROI + iWidth / 4, iYROI + iHeight / 4); imp.setRoi(roi); roi = imp.getRoi(); } ImageProcessor ip = imp.getProcessor(); ip.setInterpolate(true); Line line = (Line) roi; if (lineProfileData_ == null) { lineProfileData_ = new GraphData(); } lineProfileData_.setData(line.getPixels()); } private void setROI() { ImagePlus curImage = WindowManager.getCurrentImage(); if (curImage == null) { return; } Roi roi = curImage.getRoi(); try { if (roi == null) { // if there is no ROI, create one Rectangle r = curImage.getProcessor().getRoi(); int iWidth = r.width; int iHeight = r.height; int iXROI = r.x; int iYROI = r.y; if (roi == null) { iWidth /= 2; iHeight /= 2; iXROI += iWidth / 2; iYROI += iHeight / 2; } curImage.setRoi(iXROI, iYROI, iWidth, iHeight); roi = curImage.getRoi(); } if (roi.getType() != Roi.RECTANGLE) { handleError("ROI must be a rectangle.\nUse the ImageJ rectangle tool to draw the ROI."); return; } Rectangle r = roi.getBounds(); // if we already had an ROI defined, correct for the offsets Rectangle cameraR = getROI(); r.x += cameraR.x; r.y += cameraR.y; // Stop (and restart) live mode if it is running setROI(r); } catch (Exception e) { ReportingUtils.showError(e); } } private void clearROI() { try { boolean liveRunning = false; if (isLiveModeOn()) { liveRunning = true; enableLiveMode(false); } core_.clearROI(); updateStaticInfo(); if (liveRunning) { enableLiveMode(true); } } catch (Exception e) { ReportingUtils.showError(e); } } /** * Returns instance of the core uManager object; */ @Override public CMMCore getMMCore() { return core_; } /** * Returns singleton instance of MMStudioMainFrame */ public static MMStudioMainFrame getInstance() { return gui_; } public MetadataPanel getMetadataPanel() { return metadataPanel_; } public final void setExitStrategy(boolean closeOnExit) { if (closeOnExit) { setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); } else { setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE); } } @Override public void saveConfigPresets() { MicroscopeModel model = new MicroscopeModel(); try { model.loadFromFile(sysConfigFile_); model.createSetupConfigsFromHardware(core_); model.createResolutionsFromHardware(core_); File f = FileDialogs.save(this, "Save the configuration file", MM_CONFIG_FILE); if (f != null) { model.saveToFile(f.getAbsolutePath()); sysConfigFile_ = f.getAbsolutePath(); mainPrefs_.put(SYSTEM_CONFIG_FILE, sysConfigFile_); configChanged_ = false; setConfigSaveButtonStatus(configChanged_); updateTitle(); } } catch (MMConfigFileException e) { ReportingUtils.showError(e); } } protected void setConfigSaveButtonStatus(boolean changed) { saveConfigButton_.setEnabled(changed); } public String getAcqDirectory() { return openAcqDirectory_; } /** * Get currently used configuration file * @return - Path to currently used configuration file */ public String getSysConfigFile() { return sysConfigFile_; } public void setAcqDirectory(String dir) { openAcqDirectory_ = dir; } /** * Open an existing acquisition directory and build viewer window. * */ public void openAcquisitionData(boolean inRAM) { // choose the directory // -------------------- File f = FileDialogs.openDir(this, "Please select an image data set", MM_DATA_SET); if (f != null) { if (f.isDirectory()) { openAcqDirectory_ = f.getAbsolutePath(); } else { openAcqDirectory_ = f.getParent(); } String acq = null; try { acq = openAcquisitionData(openAcqDirectory_, inRAM); } catch (MMScriptException ex) { ReportingUtils.showError(ex); } finally { try { acqMgr_.closeAcquisition(acq); } catch (MMScriptException ex) { ReportingUtils.logError(ex); } } } } @Override public String openAcquisitionData(String dir, boolean inRAM, boolean show) throws MMScriptException { String rootDir = new File(dir).getAbsolutePath(); String name = new File(dir).getName(); rootDir = rootDir.substring(0, rootDir.length() - (name.length() + 1)); name = acqMgr_.getUniqueAcquisitionName(name); acqMgr_.openAcquisition(name, rootDir, show, !inRAM, true); try { getAcquisition(name).initialize(); } catch (MMScriptException mex) { acqMgr_.closeAcquisition(name); throw (mex); } return name; } /** * Opens an existing data set. Shows the acquisition in a window. * @return The acquisition object. */ @Override public String openAcquisitionData(String dir, boolean inRam) throws MMScriptException { return openAcquisitionData(dir, inRam, true); } protected void zoomOut() { ImageWindow curWin = WindowManager.getCurrentWindow(); if (curWin != null) { ImageCanvas canvas = curWin.getCanvas(); Rectangle r = canvas.getBounds(); canvas.zoomOut(r.width / 2, r.height / 2); VirtualAcquisitionDisplay vad = VirtualAcquisitionDisplay.getDisplay(curWin.getImagePlus()); if (vad != null) { vad.storeWindowSizeAfterZoom(curWin); vad.updateWindowTitleAndStatus(); } } } protected void zoomIn() { ImageWindow curWin = WindowManager.getCurrentWindow(); if (curWin != null) { ImageCanvas canvas = curWin.getCanvas(); Rectangle r = canvas.getBounds(); canvas.zoomIn(r.width / 2, r.height / 2); VirtualAcquisitionDisplay vad = VirtualAcquisitionDisplay.getDisplay(curWin.getImagePlus()); if (vad != null) { vad.storeWindowSizeAfterZoom(curWin); vad.updateWindowTitleAndStatus(); } } } protected void changeBinning() { try { boolean liveRunning = false; if (isLiveModeOn() ) { liveRunning = true; enableLiveMode(false); } if (isCameraAvailable()) { Object item = comboBinning_.getSelectedItem(); if (item != null) { core_.setProperty(cameraLabel_, MMCoreJ.getG_Keyword_Binning(), item.toString()); } } updateStaticInfo(); if (liveRunning) { enableLiveMode(true); } } catch (Exception e) { ReportingUtils.showError(e); } } private void createPropertyEditor() { if (propertyBrowser_ != null) { propertyBrowser_.dispose(); } propertyBrowser_ = new PropertyEditor(); propertyBrowser_.setGui(this); propertyBrowser_.setVisible(true); propertyBrowser_.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE); propertyBrowser_.setCore(core_); } private void createCalibrationListDlg() { if (calibrationListDlg_ != null) { calibrationListDlg_.dispose(); } calibrationListDlg_ = new CalibrationListDlg(core_); calibrationListDlg_.setVisible(true); calibrationListDlg_.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE); calibrationListDlg_.setParentGUI(this); } public CalibrationListDlg getCalibrationListDlg() { if (calibrationListDlg_ == null) { createCalibrationListDlg(); } return calibrationListDlg_; } private void createScriptPanel() { if (scriptPanel_ == null) { scriptPanel_ = new ScriptPanel(core_, options_, this); scriptPanel_.insertScriptingObject(SCRIPT_CORE_OBJECT, core_); scriptPanel_.insertScriptingObject(SCRIPT_ACQENG_OBJECT, engine_); scriptPanel_.setParentGUI(this); scriptPanel_.setBackground(guiColors_.background.get((options_.displayBackground_))); addMMBackgroundListener(scriptPanel_); } } private void createPipelinePanel() { if (pipelinePanel_ == null) { pipelinePanel_ = new PipelinePanel(this, engine_); pipelinePanel_.setBackground(guiColors_.background.get((options_.displayBackground_))); addMMBackgroundListener(pipelinePanel_); } } /** * Updates Status line in main window from cached values */ private void updateStaticInfoFromCache() { String dimText = "Image info (from camera): " + staticInfo_.width_ + " X " + staticInfo_.height_ + " X " + staticInfo_.bytesPerPixel_ + ", Intensity range: " + staticInfo_.imageBitDepth_ + " bits"; dimText += ", " + TextUtils.FMT0.format(staticInfo_.pixSizeUm_ * 1000) + "nm/pix"; if (zStageLabel_.length() > 0) { dimText += ", Z=" + TextUtils.FMT2.format(staticInfo_.zPos_) + "um"; } if (xyStageLabel_.length() > 0) { dimText += ", XY=(" + TextUtils.FMT2.format(staticInfo_.x_) + "," + TextUtils.FMT2.format(staticInfo_.y_) + ")um"; } labelImageDimensions_.setText(dimText); } public void updateXYPos(double x, double y) { staticInfo_.x_ = x; staticInfo_.y_ = y; updateStaticInfoFromCache(); } public void updateZPos(double z) { staticInfo_.zPos_ = z; updateStaticInfoFromCache(); } public void updateXYPosRelative(double x, double y) { staticInfo_.x_ += x; staticInfo_.y_ += y; updateStaticInfoFromCache(); } public void updateZPosRelative(double z) { staticInfo_.zPos_ += z; updateStaticInfoFromCache(); } public void updateXYStagePosition(){ double x[] = new double[1]; double y[] = new double[1]; try { if (xyStageLabel_.length() > 0) core_.getXYPosition(xyStageLabel_, x, y); } catch (Exception e) { ReportingUtils.showError(e); } staticInfo_.x_ = x[0]; staticInfo_.y_ = y[0]; updateStaticInfoFromCache(); } private void updatePixSizeUm (double pixSizeUm) { staticInfo_.pixSizeUm_ = pixSizeUm; updateStaticInfoFromCache(); } private void updateStaticInfo() { double zPos = 0.0; double x[] = new double[1]; double y[] = new double[1]; try { if (zStageLabel_.length() > 0) { zPos = core_.getPosition(zStageLabel_); } if (xyStageLabel_.length() > 0) { core_.getXYPosition(xyStageLabel_, x, y); } } catch (Exception e) { handleException(e); } staticInfo_.width_ = core_.getImageWidth(); staticInfo_.height_ = core_.getImageHeight(); staticInfo_.bytesPerPixel_ = core_.getBytesPerPixel(); staticInfo_.imageBitDepth_ = core_.getImageBitDepth(); staticInfo_.pixSizeUm_ = core_.getPixelSizeUm(); staticInfo_.zPos_ = zPos; staticInfo_.x_ = x[0]; staticInfo_.y_ = y[0]; updateStaticInfoFromCache(); } public void toggleShutter() { try { if (!toggleShutterButton_.isEnabled()) return; toggleShutterButton_.requestFocusInWindow(); if (toggleShutterButton_.getText().equals("Open")) { setShutterButton(true); core_.setShutterOpen(true); } else { core_.setShutterOpen(false); setShutterButton(false); } } catch (Exception e1) { ReportingUtils.showError(e1); } } private void updateCenterAndDragListener() { if (centerAndDragMenuItem_.isSelected()) { centerAndDragListener_.start(); } else { centerAndDragListener_.stop(); } } private void setShutterButton(boolean state) { if (state) { toggleShutterButton_.setText("Close"); } else { toggleShutterButton_.setText("Open"); } } private void checkPosListDlg() { if (posListDlg_ == null) { posListDlg_ = new PositionListDlg(core_, this, posList_, acqControlWin_,options_); GUIUtils.recallPosition(posListDlg_); posListDlg_.setBackground(gui_.getBackgroundColor()); gui_.addMMBackgroundListener(posListDlg_); posListDlg_.addListeners(); } } // ////////////////////////////////////////////////////////////////////////// // public interface available for scripting access // ////////////////////////////////////////////////////////////////////////// @Override public void snapSingleImage() { doSnap(); } public Object getPixels() { ImagePlus ip = WindowManager.getCurrentImage(); if (ip != null) { return ip.getProcessor().getPixels(); } return null; } public void setPixels(Object obj) { ImagePlus ip = WindowManager.getCurrentImage(); if (ip == null) { return; } ip.getProcessor().setPixels(obj); } public int getImageHeight() { ImagePlus ip = WindowManager.getCurrentImage(); if (ip != null) return ip.getHeight(); return 0; } public int getImageWidth() { ImagePlus ip = WindowManager.getCurrentImage(); if (ip != null) return ip.getWidth(); return 0; } public int getImageDepth() { ImagePlus ip = WindowManager.getCurrentImage(); if (ip != null) return ip.getBitDepth(); return 0; } public ImageProcessor getImageProcessor() { ImagePlus ip = WindowManager.getCurrentImage(); if (ip == null) return null; return ip.getProcessor(); } private boolean isCameraAvailable() { return cameraLabel_.length() > 0; } /** * Part of ScriptInterface API * Opens the XYPositionList when it is not opened * Adds the current position to the list (same as pressing the "Mark" button) */ @Override public void markCurrentPosition() { if (posListDlg_ == null) { showXYPositionList(); } if (posListDlg_ != null) { posListDlg_.markPosition(); } } /** * Implements ScriptInterface */ @Override @Deprecated public AcqControlDlg getAcqDlg() { return acqControlWin_; } /** * Implements ScriptInterface */ @Override @Deprecated public PositionListDlg getXYPosListDlg() { checkPosListDlg(); return posListDlg_; } /** * Implements ScriptInterface */ @Override public boolean isAcquisitionRunning() { if (engine_ == null) return false; return engine_.isAcquisitionRunning(); } /** * Implements ScriptInterface */ @Override public boolean versionLessThan(String version) throws MMScriptException { try { String[] v = MMVersion.VERSION_STRING.split(" ", 2); String[] m = v[0].split("\\.", 3); String[] v2 = version.split(" ", 2); String[] m2 = v2[0].split("\\.", 3); for (int i=0; i < 3; i++) { if (Integer.parseInt(m[i]) < Integer.parseInt(m2[i])) { ReportingUtils.showError("This code needs Micro-Manager version " + version + " or greater"); return true; } if (Integer.parseInt(m[i]) > Integer.parseInt(m2[i])) { return false; } } if (v2.length < 2 || v2[1].equals("") ) return false; if (v.length < 2 ) { ReportingUtils.showError("This code needs Micro-Manager version " + version + " or greater"); return true; } if (Integer.parseInt(v[1]) < Integer.parseInt(v2[1])) { ReportingUtils.showError("This code needs Micro-Manager version " + version + " or greater"); return false; } return true; } catch (Exception ex) { throw new MMScriptException ("Format of version String should be \"a.b.c\""); } } @Override public boolean isLiveModeOn() { return liveModeTimer_ != null && liveModeTimer_.isRunning(); } public LiveModeTimer getLiveModeTimer() { if (liveModeTimer_ == null) { liveModeTimer_ = new LiveModeTimer(); } return liveModeTimer_; } public void updateButtonsForLiveMode(boolean enable) { autoShutterCheckBox_.setEnabled(!enable); if (core_.getAutoShutter()) { toggleShutterButton_.setText(enable ? "Close" : "Open" ); } snapButton_.setEnabled(!enable); //toAlbumButton_.setEnabled(!enable); liveButton_.setIcon(enable ? SwingResourceManager.getIcon(MMStudioMainFrame.class, "/org/micromanager/icons/cancel.png") : SwingResourceManager.getIcon(MMStudioMainFrame.class, "/org/micromanager/icons/camera_go.png")); liveButton_.setSelected(false); liveButton_.setText(enable ? "Stop Live" : "Live"); } public boolean getLiveMode() { return isLiveModeOn(); } public boolean updateImage() { try { if (isLiveModeOn()) { enableLiveMode(false); return true; // nothing to do, just show the last image } if (WindowManager.getCurrentWindow() == null) { return false; } ImagePlus ip = WindowManager.getCurrentImage(); core_.snapImage(); Object img = core_.getImage(); ip.getProcessor().setPixels(img); ip.updateAndRepaintWindow(); if (!isCurrentImageFormatSupported()) { return false; } updateLineProfile(); } catch (Exception e) { ReportingUtils.showError(e); return false; } return true; } public boolean displayImage(final Object pixels) { if (pixels instanceof TaggedImage) { return displayTaggedImage((TaggedImage) pixels, true); } else { return displayImage(pixels, true); } } public boolean displayImage(final Object pixels, boolean wait) { checkSimpleAcquisition(); try { int width = getAcquisition(SIMPLE_ACQ).getWidth(); int height = getAcquisition(SIMPLE_ACQ).getHeight(); int byteDepth = getAcquisition(SIMPLE_ACQ).getByteDepth(); TaggedImage ti = ImageUtils.makeTaggedImage(pixels, 0, 0, 0,0, width, height, byteDepth); simpleDisplay_.getImageCache().putImage(ti); simpleDisplay_.showImage(ti, wait); return true; } catch (Exception ex) { ReportingUtils.showError(ex); return false; } } public boolean displayImageWithStatusLine(Object pixels, String statusLine) { boolean ret = displayImage(pixels); simpleDisplay_.displayStatusLine(statusLine); return ret; } public void displayStatusLine(String statusLine) { ImagePlus ip = WindowManager.getCurrentImage(); if (!(ip.getWindow() instanceof VirtualAcquisitionDisplay.DisplayWindow)) { return; } VirtualAcquisitionDisplay.getDisplay(ip).displayStatusLine(statusLine); } private boolean isCurrentImageFormatSupported() { boolean ret = false; long channels = core_.getNumberOfComponents(); long bpp = core_.getBytesPerPixel(); if (channels > 1 && channels != 4 && bpp != 1) { handleError("Unsupported image format."); } else { ret = true; } return ret; } public void doSnap() { doSnap(false); } public void doSnap(final boolean album) { if (core_.getCameraDevice().length() == 0) { ReportingUtils.showError("No camera configured"); return; } BlockingQueue<TaggedImage> snapImageQueue = new LinkedBlockingQueue<TaggedImage>(); try { core_.snapImage(); long c = core_.getNumberOfCameraChannels(); runDisplayThread(snapImageQueue, new DisplayImageRoutine() { @Override public void show(final TaggedImage image) { if (album) { try { addToAlbum(image); } catch (MMScriptException ex) { ReportingUtils.showError(ex); } } else { displayImage(image); } } }); for (int i = 0; i < c; ++i) { TaggedImage img = core_.getTaggedImage(i); img.tags.put("Channels", c); snapImageQueue.put(img); } snapImageQueue.put(TaggedImageQueue.POISON); if (simpleDisplay_ != null) { ImagePlus imgp = simpleDisplay_.getImagePlus(); if (imgp != null) { ImageWindow win = imgp.getWindow(); if (win != null) { win.toFront(); } } } } catch (Exception ex) { ReportingUtils.showError(ex); } } /** * Is this function still needed? It does some magic with tags. I found * it to do harmful thing with tags when a Multi-Camera device is * present (that issue is now fixed). */ public void normalizeTags(TaggedImage ti) { if (ti != TaggedImageQueue.POISON) { int channel = 0; try { if (ti.tags.has("ChannelIndex")) { channel = MDUtils.getChannelIndex(ti.tags); } MDUtils.setChannelIndex(ti.tags, channel); MDUtils.setPositionIndex(ti.tags, 0); MDUtils.setSliceIndex(ti.tags, 0); MDUtils.setFrameIndex(ti.tags, 0); } catch (JSONException ex) { ReportingUtils.logError(ex); } } } private boolean displayTaggedImage(TaggedImage ti, boolean update) { try { checkSimpleAcquisition(ti); setCursor(new Cursor(Cursor.WAIT_CURSOR)); ti.tags.put("Summary", getAcquisition(SIMPLE_ACQ).getSummaryMetadata()); addStagePositionToTags(ti); addImage(SIMPLE_ACQ, ti, update, true); } catch (Exception ex) { ReportingUtils.logError(ex); return false; } if (update) { setCursor(new Cursor(Cursor.DEFAULT_CURSOR)); updateLineProfile(); } return true; } public void addStagePositionToTags(TaggedImage ti) throws JSONException { if (gui_.xyStageLabel_.length() > 0) { ti.tags.put("XPositionUm", gui_.staticInfo_.x_); ti.tags.put("YPositionUm", gui_.staticInfo_.y_); } if (gui_.zStageLabel_.length() > 0) { ti.tags.put("ZPositionUm", gui_.staticInfo_.zPos_); } } private void configureBinningCombo() throws Exception { if (cameraLabel_.length() > 0) { ActionListener[] listeners; // binning combo if (comboBinning_.getItemCount() > 0) { comboBinning_.removeAllItems(); } StrVector binSizes = core_.getAllowedPropertyValues( cameraLabel_, MMCoreJ.getG_Keyword_Binning()); listeners = comboBinning_.getActionListeners(); for (int i = 0; i < listeners.length; i++) { comboBinning_.removeActionListener(listeners[i]); } for (int i = 0; i < binSizes.size(); i++) { comboBinning_.addItem(binSizes.get(i)); } comboBinning_.setMaximumRowCount((int) binSizes.size()); if (binSizes.isEmpty()) { comboBinning_.setEditable(true); } else { comboBinning_.setEditable(false); } for (int i = 0; i < listeners.length; i++) { comboBinning_.addActionListener(listeners[i]); } } } public void initializeGUI() { try { // establish device roles cameraLabel_ = core_.getCameraDevice(); shutterLabel_ = core_.getShutterDevice(); zStageLabel_ = core_.getFocusDevice(); xyStageLabel_ = core_.getXYStageDevice(); engine_.setZStageDevice(zStageLabel_); configureBinningCombo(); // active shutter combo try { shutters_ = core_.getLoadedDevicesOfType(DeviceType.ShutterDevice); } catch (Exception e) { ReportingUtils.logError(e); } if (shutters_ != null) { String items[] = new String[(int) shutters_.size()]; for (int i = 0; i < shutters_.size(); i++) { items[i] = shutters_.get(i); } GUIUtils.replaceComboContents(shutterComboBox_, items); String activeShutter = core_.getShutterDevice(); if (activeShutter != null) { shutterComboBox_.setSelectedItem(activeShutter); } else { shutterComboBox_.setSelectedItem(""); } } // Autofocus autofocusConfigureButton_.setEnabled(afMgr_.getDevice() != null); autofocusNowButton_.setEnabled(afMgr_.getDevice() != null); // Rebuild stage list in XY PositinList if (posListDlg_ != null) { posListDlg_.rebuildAxisList(); } updateGUI(true); } catch (Exception e) { ReportingUtils.showError(e); } } /** * Adds plugin_ items to the plugins menu * Adds submenus (currently only 1 level deep) * @param plugin_ - plugin_ to be added to the menu */ public void addPluginToMenu(final PluginLoader.PluginItem plugin) { List<String> path = plugin.getMenuPath(); if (path.size() == 1) { GUIUtils.addMenuItem(pluginMenu_, plugin.getMenuItem(), plugin.getTooltip(), new Runnable() { public void run() { displayPlugin(plugin); } }); } if (path.size() == 2) { if (pluginSubMenus_ == null) { pluginSubMenus_ = new HashMap<String, JMenu>(); } String groupName = path.get(0); JMenu submenu = pluginSubMenus_.get(groupName); if (submenu == null) { submenu = new JMenu(groupName); pluginSubMenus_.put(groupName, submenu); submenu.validate(); pluginMenu_.add(submenu); } GUIUtils.addMenuItem(submenu, plugin.getMenuItem(), plugin.getTooltip(), new Runnable() { public void run() { displayPlugin(plugin); } }); } pluginMenu_.validate(); menuBar_.validate(); } // Handle a plugin being selected from the Plugins menu. private static void displayPlugin(final PluginLoader.PluginItem plugin) { ReportingUtils.logMessage("Plugin command: " + plugin.getMenuItem()); plugin.instantiate(); switch (plugin.getPluginType()) { case PLUGIN_STANDARD: // Standard plugin; create its UI. ((MMPlugin) plugin.getPlugin()).show(); break; case PLUGIN_PROCESSOR: // Processor plugin; check for existing processor of // this type and show its UI if applicable; otherwise // create a new one. MMProcessorPlugin procPlugin = (MMProcessorPlugin) plugin.getPlugin(); String procName = PluginLoader.getNameForPluginClass(procPlugin.getClass()); DataProcessor<TaggedImage> pipelineProcessor = gui_.engine_.getProcessorRegisteredAs(procName); if (pipelineProcessor == null) { // No extant processor of this type; make a new one, // which automatically adds it to the pipeline. pipelineProcessor = gui_.engine_.makeProcessor(procName, gui_); } if (pipelineProcessor != null) { // Show the GUI for this processor. The extra null check is // because making the processor (above) could have failed. pipelineProcessor.makeConfigurationGUI(); } break; default: // Unrecognized plugin type; just skip it. ReportingUtils.logError("Unrecognized plugin type " + plugin.getPluginType()); } } public void updateGUI(boolean updateConfigPadStructure) { updateGUI(updateConfigPadStructure, false); } public void updateGUI(boolean updateConfigPadStructure, boolean fromCache) { try { // establish device roles cameraLabel_ = core_.getCameraDevice(); shutterLabel_ = core_.getShutterDevice(); zStageLabel_ = core_.getFocusDevice(); xyStageLabel_ = core_.getXYStageDevice(); afMgr_.refresh(); // camera settings if (isCameraAvailable()) { double exp = core_.getExposure(); textFieldExp_.setText(NumberUtils.doubleToDisplayString(exp)); configureBinningCombo(); String binSize; if (fromCache) { binSize = core_.getPropertyFromCache(cameraLabel_, MMCoreJ.getG_Keyword_Binning()); } else { binSize = core_.getProperty(cameraLabel_, MMCoreJ.getG_Keyword_Binning()); } GUIUtils.setComboSelection(comboBinning_, binSize); } if (liveModeTimer_ == null || !liveModeTimer_.isRunning()) { autoShutterCheckBox_.setSelected(core_.getAutoShutter()); boolean shutterOpen = core_.getShutterOpen(); setShutterButton(shutterOpen); if (autoShutterCheckBox_.isSelected()) { toggleShutterButton_.setEnabled(false); } else { toggleShutterButton_.setEnabled(true); } } // active shutter combo if (shutters_ != null) { String activeShutter = core_.getShutterDevice(); if (activeShutter != null) { shutterComboBox_.setSelectedItem(activeShutter); } else { shutterComboBox_.setSelectedItem(""); } } // state devices if (updateConfigPadStructure && (configPad_ != null)) { configPad_.refreshStructure(fromCache); // Needed to update read-only properties. May slow things down... if (!fromCache) core_.updateSystemStateCache(); } // update Channel menus in Multi-dimensional acquisition dialog updateChannelCombos(); // update list of pixel sizes in pixel size configuration window if (calibrationListDlg_ != null) { calibrationListDlg_.refreshCalibrations(); } if (propertyBrowser_ != null) { propertyBrowser_.refresh(); } } catch (Exception e) { ReportingUtils.logError(e); } updateStaticInfo(); updateTitle(); } //TODO: Deprecated @Override public boolean okToAcquire() { return !isLiveModeOn(); } //TODO: Deprecated @Override public void stopAllActivity() { if (this.acquisitionEngine2010_ != null) { this.acquisitionEngine2010_.stop(); } enableLiveMode(false); } /** * Cleans up resources while shutting down * * @param calledByImageJ * @return flag indicating success. Shut down should abort when flag is false */ private boolean cleanupOnClose(boolean calledByImageJ) { // Save config presets if they were changed. if (configChanged_) { Object[] options = {"Yes", "No"}; int n = JOptionPane.showOptionDialog(null, "Save Changed Configuration?", "Micro-Manager", JOptionPane.YES_NO_OPTION, JOptionPane.QUESTION_MESSAGE, null, options, options[0]); if (n == JOptionPane.YES_OPTION) { saveConfigPresets(); // if the configChanged_ flag did not become false, the user // must have cancelled the configuration saving and we should cancel // quitting as well if (configChanged_) { return false; } } } if (liveModeTimer_ != null) liveModeTimer_.stop(); // check needed to avoid deadlock if (!calledByImageJ) { if (!WindowManager.closeAllWindows()) { core_.logMessage("Failed to close some windows"); } } if (profileWin_ != null) { removeMMBackgroundListener(profileWin_); profileWin_.dispose(); } if (scriptPanel_ != null) { removeMMBackgroundListener(scriptPanel_); scriptPanel_.closePanel(); } if (pipelinePanel_ != null) { removeMMBackgroundListener(pipelinePanel_); pipelinePanel_.dispose(); } if (propertyBrowser_ != null) { removeMMBackgroundListener(propertyBrowser_); propertyBrowser_.dispose(); } if (acqControlWin_ != null) { removeMMBackgroundListener(acqControlWin_); acqControlWin_.close(); } if (engine_ != null) { engine_.shutdown(); } if (afMgr_ != null) { afMgr_.closeOptionsDialog(); } engine_.disposeProcessors(); pluginLoader_.disposePlugins(); synchronized (shutdownLock_) { try { if (core_ != null) { ReportingUtils.setCore(null); core_.delete(); core_ = null; } } catch (Exception err) { ReportingUtils.showError(err); } } return true; } private void saveSettings() { Rectangle r = this.getBounds(); mainPrefs_.putInt(MAIN_FRAME_X, r.x); mainPrefs_.putInt(MAIN_FRAME_Y, r.y); mainPrefs_.putInt(MAIN_FRAME_WIDTH, r.width); mainPrefs_.putInt(MAIN_FRAME_HEIGHT, r.height); mainPrefs_.putInt(MAIN_FRAME_DIVIDER_POS, this.splitPane_.getDividerLocation()); mainPrefs_.put(OPEN_ACQ_DIR, openAcqDirectory_); mainPrefs_.put(MAIN_SAVE_METHOD, ImageUtils.getImageStorageClass().getName()); // save field values from the main window // NOTE: automatically restoring these values on startup may cause // problems mainPrefs_.put(MAIN_EXPOSURE, textFieldExp_.getText()); // NOTE: do not save auto shutter state if (afMgr_ != null && afMgr_.getDevice() != null) { mainPrefs_.put(AUTOFOCUS_DEVICE, afMgr_.getDevice().getDeviceName()); } } private void loadConfiguration() { File f = FileDialogs.openFile(this, "Load a config file",MM_CONFIG_FILE); if (f != null) { sysConfigFile_ = f.getAbsolutePath(); configChanged_ = false; setConfigSaveButtonStatus(configChanged_); mainPrefs_.put(SYSTEM_CONFIG_FILE, sysConfigFile_); loadSystemConfiguration(); } } public synchronized boolean closeSequence(boolean calledByImageJ) { if (!this.isRunning()) { if (core_ != null) { core_.logMessage("MMStudioMainFrame::closeSequence called while running_ is false"); } return true; } if (engine_ != null && engine_.isAcquisitionRunning()) { int result = JOptionPane.showConfirmDialog( this, "Acquisition in progress. Are you sure you want to exit and discard all data?", "Micro-Manager", JOptionPane.YES_NO_OPTION, JOptionPane.INFORMATION_MESSAGE); if (result == JOptionPane.NO_OPTION) { return false; } } stopAllActivity(); try { // Close all image windows associated with MM. Canceling saving of // any of these should abort shutdown if (!acqMgr_.closeAllImageWindows()) { return false; } } catch (MMScriptException ex) { // Not sure what to do here... } if (!cleanupOnClose(calledByImageJ)) { return false; } running_ = false; saveSettings(); try { configPad_.saveSettings(); options_.saveSettings(); hotKeys_.saveSettings(); } catch (NullPointerException e) { if (core_ != null) this.logError(e); } // disposing sometimes hangs ImageJ! // this.dispose(); if (options_.closeOnExit_) { if (!runsAsPlugin_) { System.exit(0); } else { ImageJ ij = IJ.getInstance(); if (ij != null) { ij.quit(); } } } else { this.dispose(); } return true; } /* public void applyContrastSettings(ContrastSettings contrast8, ContrastSettings contrast16) { ImagePlus img = WindowManager.getCurrentImage(); if (img == null|| VirtualAcquisitionDisplay.getDisplay(img) == null ) return; if (img.getBytesPerPixel() == 1) VirtualAcquisitionDisplay.getDisplay(img).setChannelContrast(0, contrast8.min, contrast8.max, contrast8.gamma); else VirtualAcquisitionDisplay.getDisplay(img).setChannelContrast(0, contrast16.min, contrast16.max, contrast16.gamma); } */ //TODO: Deprecated @Override public ContrastSettings getContrastSettings() { ImagePlus img = WindowManager.getCurrentImage(); if (img == null || VirtualAcquisitionDisplay.getDisplay(img) == null ) return null; return VirtualAcquisitionDisplay.getDisplay(img).getChannelContrastSettings(0); } /* public boolean is16bit() { ImagePlus ip = WindowManager.getCurrentImage(); if (ip != null && ip.getProcessor() instanceof ShortProcessor) { return true; } return false; } * */ public boolean isRunning() { return running_; } /** * Executes the beanShell script. This script instance only supports * commands directed to the core object. */ private void executeStartupScript() { // execute startup script File f = new File(startupScriptFile_); if (startupScriptFile_.length() > 0 && f.exists()) { WaitDialog waitDlg = new WaitDialog( "Executing startup script, please wait..."); waitDlg.showDialog(); Interpreter interp = new Interpreter(); try { // insert core object only interp.set(SCRIPT_CORE_OBJECT, core_); interp.set(SCRIPT_ACQENG_OBJECT, engine_); interp.set(SCRIPT_GUI_OBJECT, this); // read text file and evaluate interp.eval(TextUtils.readTextFile(startupScriptFile_)); } catch (IOException exc) { ReportingUtils.logError(exc, "Unable to read the startup script (" + startupScriptFile_ + ")."); } catch (EvalError exc) { ReportingUtils.logError(exc); } finally { waitDlg.closeDialog(); } } else { if (startupScriptFile_.length() > 0) ReportingUtils.logMessage("Startup script file ("+startupScriptFile_+") not present."); } } /** * Loads system configuration from the cfg file. */ private boolean loadSystemConfiguration() { boolean result = true; saveMRUConfigFiles(); final WaitDialog waitDlg = new WaitDialog( "Loading system configuration, please wait..."); waitDlg.setAlwaysOnTop(true); waitDlg.showDialog(); this.setEnabled(false); try { if (sysConfigFile_.length() > 0) { GUIUtils.preventDisplayAdapterChangeExceptions(); core_.waitForSystem(); ignorePropertyChanges_ = true; core_.loadSystemConfiguration(sysConfigFile_); ignorePropertyChanges_ = false; GUIUtils.preventDisplayAdapterChangeExceptions(); } } catch (final Exception err) { GUIUtils.preventDisplayAdapterChangeExceptions(); ReportingUtils.showError(err); result = false; } finally { waitDlg.closeDialog(); } setEnabled(true); initializeGUI(); updateSwitchConfigurationMenu(); FileDialogs.storePath(MM_CONFIG_FILE, new File(sysConfigFile_)); return result; } private void saveMRUConfigFiles() { if (0 < sysConfigFile_.length()) { if (MRUConfigFiles_.contains(sysConfigFile_)) { MRUConfigFiles_.remove(sysConfigFile_); } if (maxMRUCfgs_ <= MRUConfigFiles_.size()) { MRUConfigFiles_.remove(maxMRUCfgs_ - 1); } MRUConfigFiles_.add(0, sysConfigFile_); // save the MRU list to the preferences for (Integer icfg = 0; icfg < MRUConfigFiles_.size(); ++icfg) { String value = ""; if (null != MRUConfigFiles_.get(icfg)) { value = MRUConfigFiles_.get(icfg).toString(); } mainPrefs_.put(CFGFILE_ENTRY_BASE + icfg.toString(), value); } } } private void loadMRUConfigFiles() { sysConfigFile_ = mainPrefs_.get(SYSTEM_CONFIG_FILE, sysConfigFile_); // startupScriptFile_ = mainPrefs_.get(STARTUP_SCRIPT_FILE, // startupScriptFile_); MRUConfigFiles_ = new ArrayList<String>(); for (Integer icfg = 0; icfg < maxMRUCfgs_; ++icfg) { String value = ""; value = mainPrefs_.get(CFGFILE_ENTRY_BASE + icfg.toString(), value); if (0 < value.length()) { File ruFile = new File(value); if (ruFile.exists()) { if (!MRUConfigFiles_.contains(value)) { MRUConfigFiles_.add(value); } } } } // initialize MRU list from old persistant data containing only SYSTEM_CONFIG_FILE if (0 < sysConfigFile_.length()) { if (!MRUConfigFiles_.contains(sysConfigFile_)) { // in case persistant data is inconsistent if (maxMRUCfgs_ <= MRUConfigFiles_.size()) { MRUConfigFiles_.remove(maxMRUCfgs_ - 1); } MRUConfigFiles_.add(0, sysConfigFile_); } } } /** * Opens Acquisition dialog. */ private void openAcqControlDialog() { try { if (acqControlWin_ == null) { acqControlWin_ = new AcqControlDlg(engine_, mainPrefs_, this, options_); } if (acqControlWin_.isActive()) { acqControlWin_.setTopPosition(); } acqControlWin_.setVisible(true); acqControlWin_.repaint(); } catch (Exception exc) { ReportingUtils.showError(exc, "\nAcquistion window failed to open due to invalid or corrupted settings.\n" + "Try resetting registry settings to factory defaults (Menu Tools|Options)."); } } private void updateChannelCombos() { if (this.acqControlWin_ != null) { this.acqControlWin_.updateChannelAndGroupCombo(); } } private void runHardwareWizard() { try { if (configChanged_) { Object[] options = {"Yes", "No"}; int n = JOptionPane.showOptionDialog(null, "Save Changed Configuration?", "Micro-Manager", JOptionPane.YES_NO_OPTION, JOptionPane.QUESTION_MESSAGE, null, options, options[0]); if (n == JOptionPane.YES_OPTION) { saveConfigPresets(); } configChanged_ = false; } boolean liveRunning = false; if (isLiveModeOn()) { liveRunning = true; enableLiveMode(false); } // unload all devices before starting configurator core_.reset(); GUIUtils.preventDisplayAdapterChangeExceptions(); // run Configurator ConfiguratorDlg2 cfg2 = null; try { setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); cfg2 = new ConfiguratorDlg2(core_, sysConfigFile_); } finally { setCursor(Cursor.getDefaultCursor()); } if (cfg2 == null) { ReportingUtils.showError("Failed to launch Hardware Configuration Wizard"); return; } cfg2.setVisible(true); GUIUtils.preventDisplayAdapterChangeExceptions(); // re-initialize the system with the new configuration file sysConfigFile_ = cfg2.getFileName(); mainPrefs_.put(SYSTEM_CONFIG_FILE, sysConfigFile_); loadSystemConfiguration(); GUIUtils.preventDisplayAdapterChangeExceptions(); if (liveRunning) { enableLiveMode(liveRunning); } } catch (Exception e) { ReportingUtils.showError(e); } } private void autofocusNow() { if (afMgr_.getDevice() != null) { new Thread() { @Override public void run() { try { boolean lmo = isLiveModeOn(); if (lmo) { enableLiveMode(false); } afMgr_.getDevice().fullFocus(); if (lmo) { enableLiveMode(true); } } catch (MMException ex) { ReportingUtils.logError(ex); } } }.start(); // or any other method from Autofocus.java API } } private class ExecuteAcq implements Runnable { public ExecuteAcq() { } @Override public void run() { if (acqControlWin_ != null) { acqControlWin_.runAcquisition(); } } } private void testForAbortRequests() throws MMScriptException { if (scriptPanel_ != null) { if (scriptPanel_.stopRequestPending()) { throw new MMScriptException("Script interrupted by the user!"); } } } // ////////////////////////////////////////////////////////////////////////// // Script interface // ////////////////////////////////////////////////////////////////////////// @Override public String getVersion() { return MMVersion.VERSION_STRING; } /** * Inserts version info for various components in the Corelog */ @Override public void logStartupProperties() { core_.logMessage("MM Studio version: " + getVersion()); core_.logMessage(core_.getVersionInfo()); core_.logMessage(core_.getAPIVersionInfo()); core_.logMessage("Operating System: " + System.getProperty("os.name") + " (" + System.getProperty("os.arch") + ") " + System.getProperty("os.version")); core_.logMessage("JVM: " + System.getProperty("java.vm.name") + ", version " + System.getProperty("java.version") + ", " + System.getProperty("sun.arch.data.model") + "-bit"); } @Override public void makeActive() { toFront(); } @Override public boolean displayImage(TaggedImage ti) { normalizeTags(ti); return displayTaggedImage(ti, true); } /** * Opens a dialog to record stage positions */ @Override public void showXYPositionList() { checkPosListDlg(); posListDlg_.setVisible(true); } @Override public void setConfigChanged(boolean status) { configChanged_ = status; setConfigSaveButtonStatus(configChanged_); } /** * Lets JComponents register themselves so that their background can be * manipulated */ @Override public void addMMBackgroundListener(Component comp) { if (MMFrames_.contains(comp)) return; MMFrames_.add(comp); } /** * Lets JComponents remove themselves from the list whose background gets * changes */ @Override public void removeMMBackgroundListener(Component comp) { if (!MMFrames_.contains(comp)) return; MMFrames_.remove(comp); } /** * Returns exposure time for the desired preset in the given channelgroup * Acquires its info from the preferences * Same thing is used in MDA window, but this class keeps its own copy * * @param channelGroup * @param channel - * @param defaultExp - default value * @return exposure time */ @Override public double getChannelExposureTime(String channelGroup, String channel, double defaultExp) { return exposurePrefs_.getDouble("Exposure_" + channelGroup + "_" + channel, defaultExp); } /** * Updates the exposure time in the given preset * Will also update current exposure if it the given channel and channelgroup * are the current one * * @param channelGroup - * * @param channel - preset for which to change exposure time * @param exposure - desired exposure time */ @Override public void setChannelExposureTime(String channelGroup, String channel, double exposure) { try { exposurePrefs_.putDouble("Exposure_" + channelGroup + "_" + channel, exposure); if (channelGroup != null && channelGroup.equals(core_.getChannelGroup())) { if (channel != null && !channel.equals("") && channel.equals(core_.getCurrentConfigFromCache(channelGroup))) { textFieldExp_.setText(NumberUtils.doubleToDisplayString(exposure)); setExposure(); } } } catch (Exception ex) { ReportingUtils.logError("Failed to set Exposure prefs using Channelgroup: " + channelGroup + ", channel: " + channel + ", exposure: " + exposure); } } @Override public void enableRoiButtons(final boolean enabled) { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { setRoiButton_.setEnabled(enabled); clearRoiButton_.setEnabled(enabled); } }); } @Override public boolean getAutoreloadOption() { return options_.autoreloadDevices_; } /** * Returns the current background color * @return current background color */ @Override public Color getBackgroundColor() { return guiColors_.background.get((options_.displayBackground_)); } /* * Changes background color of this window and all other MM windows */ @Override public void setBackgroundStyle(String backgroundType) { setBackground(guiColors_.background.get((backgroundType))); paint(MMStudioMainFrame.this.getGraphics()); // sets background of all registered Components for (Component comp:MMFrames_) { if (comp != null) comp.setBackground(guiColors_.background.get(backgroundType)); } } @Override public String getBackgroundStyle() { return options_.displayBackground_; } @Override public ImageWindow getSnapLiveWin() { if (simpleDisplay_ == null) { return null; } return simpleDisplay_.getHyperImage().getWindow(); } /** * @Deprecated - used to be in api/AcquisitionEngine */ public void startAcquisition() throws MMScriptException { testForAbortRequests(); SwingUtilities.invokeLater(new ExecuteAcq()); } @Override public String runAcquisition() throws MMScriptException { if (SwingUtilities.isEventDispatchThread()) { throw new MMScriptException("Acquisition can not be run from this (EDT) thread"); } testForAbortRequests(); if (acqControlWin_ != null) { String name = acqControlWin_.runAcquisition(); try { while (acqControlWin_.isAcquisitionRunning()) { Thread.sleep(50); } } catch (InterruptedException e) { ReportingUtils.showError(e); } return name; } else { throw new MMScriptException( "Acquisition setup window must be open for this command to work."); } } @Override public String runAcquisition(String name, String root) throws MMScriptException { testForAbortRequests(); if (acqControlWin_ != null) { String acqName = acqControlWin_.runAcquisition(name, root); try { while (acqControlWin_.isAcquisitionRunning()) { Thread.sleep(100); } // ensure that the acquisition has finished. // This does not seem to work, needs something better MMAcquisition acq = acqMgr_.getAcquisition(acqName); boolean finished = false; while (!finished) { ImageCache imCache = acq.getImageCache(); if (imCache != null) { if (imCache.isFinished()) { finished = true; } else { Thread.sleep(100); } } } } catch (InterruptedException e) { ReportingUtils.showError(e); } return acqName; } else { throw new MMScriptException( "Acquisition setup window must be open for this command to work."); } } /** * @Deprecated used to be part of api */ public String runAcqusition(String name, String root) throws MMScriptException { return runAcquisition(name, root); } /** * Loads acquisition settings from file * @param path file containing previously saved acquisition settings * @throws MMScriptException */ @Override public void loadAcquisition(String path) throws MMScriptException { testForAbortRequests(); try { engine_.shutdown(); // load protocol if (acqControlWin_ != null) { acqControlWin_.loadAcqSettingsFromFile(path); } } catch (Exception ex) { throw new MMScriptException(ex.getMessage()); } } @Override public void setPositionList(PositionList pl) throws MMScriptException { testForAbortRequests(); // use serialization to clone the PositionList object posList_ = pl; // PositionList.newInstance(pl); SwingUtilities.invokeLater(new Runnable() { @Override public void run() { if (posListDlg_ != null) posListDlg_.setPositionList(posList_); if (engine_ != null) engine_.setPositionList(posList_); if (acqControlWin_ != null) acqControlWin_.updateGUIContents(); } }); } @Override public PositionList getPositionList() throws MMScriptException { testForAbortRequests(); // use serialization to clone the PositionList object return posList_; //PositionList.newInstance(posList_); } @Override public void sleep(long ms) throws MMScriptException { if (scriptPanel_ != null) { if (scriptPanel_.stopRequestPending()) { throw new MMScriptException("Script interrupted by the user!"); } scriptPanel_.sleep(ms); } } @Override public String getUniqueAcquisitionName(String stub) { return acqMgr_.getUniqueAcquisitionName(stub); } //@Override public void openAcquisition(String name, String rootDir, int nrFrames, int nrChannels, int nrSlices, int nrPositions) throws MMScriptException { this.openAcquisition(name, rootDir, nrFrames, nrChannels, nrSlices, nrPositions, true, false); } //@Override public void openAcquisition(String name, String rootDir, int nrFrames, int nrChannels, int nrSlices) throws MMScriptException { openAcquisition(name, rootDir, nrFrames, nrChannels, nrSlices, 0); } //@Override public void openAcquisition(String name, String rootDir, int nrFrames, int nrChannels, int nrSlices, int nrPositions, boolean show) throws MMScriptException { this.openAcquisition(name, rootDir, nrFrames, nrChannels, nrSlices, nrPositions, show, false); } //@Override public void openAcquisition(String name, String rootDir, int nrFrames, int nrChannels, int nrSlices, boolean show) throws MMScriptException { this.openAcquisition(name, rootDir, nrFrames, nrChannels, nrSlices, 0, show, false); } @Override public void openAcquisition(String name, String rootDir, int nrFrames, int nrChannels, int nrSlices, int nrPositions, boolean show, boolean save) throws MMScriptException { acqMgr_.openAcquisition(name, rootDir, show, save); MMAcquisition acq = acqMgr_.getAcquisition(name); acq.setDimensions(nrFrames, nrChannels, nrSlices, nrPositions); } //@Override public void openAcquisition(String name, String rootDir, int nrFrames, int nrChannels, int nrSlices, boolean show, boolean virtual) throws MMScriptException { this.openAcquisition(name, rootDir, nrFrames, nrChannels, nrSlices, 0, show, virtual); } //@Override public String createAcquisition(JSONObject summaryMetadata, boolean diskCached) { return createAcquisition(summaryMetadata, diskCached, false); } @Override @Deprecated public String createAcquisition(JSONObject summaryMetadata, boolean diskCached, boolean displayOff) { return acqMgr_.createAcquisition(summaryMetadata, diskCached, engine_, displayOff); } //@Override public void initializeSimpleAcquisition(String name, int width, int height, int byteDepth, int bitDepth, int multiCamNumCh) throws MMScriptException { MMAcquisition acq = acqMgr_.getAcquisition(name); acq.setImagePhysicalDimensions(width, height, byteDepth, bitDepth, multiCamNumCh); acq.initializeSimpleAcq(); } @Override public void initializeAcquisition(String name, int width, int height, int byteDepth, int bitDepth) throws MMScriptException { MMAcquisition acq = acqMgr_.getAcquisition(name); //number of multi-cam cameras is set to 1 here for backwards compatibility //might want to change this later acq.setImagePhysicalDimensions(width, height, byteDepth, bitDepth, 1); acq.initialize(); } @Override public int getAcquisitionImageWidth(String acqName) throws MMScriptException { MMAcquisition acq = acqMgr_.getAcquisition(acqName); return acq.getWidth(); } @Override public int getAcquisitionImageHeight(String acqName) throws MMScriptException{ MMAcquisition acq = acqMgr_.getAcquisition(acqName); return acq.getHeight(); } @Override public int getAcquisitionImageBitDepth(String acqName) throws MMScriptException { MMAcquisition acq = acqMgr_.getAcquisition(acqName); return acq.getBitDepth(); } @Override public int getAcquisitionImageByteDepth(String acqName) throws MMScriptException{ MMAcquisition acq = acqMgr_.getAcquisition(acqName); return acq.getByteDepth(); } @Override public int getAcquisitionMultiCamNumChannels(String acqName) throws MMScriptException{ MMAcquisition acq = acqMgr_.getAcquisition(acqName); return acq.getMultiCameraNumChannels(); } @Override public Boolean acquisitionExists(String name) { return acqMgr_.acquisitionExists(name); } @Override public void closeAcquisition(String name) throws MMScriptException { acqMgr_.closeAcquisition(name); } /** * @Deprecated use closeAcquisitionWindow instead * @Deprecated - used to be in api/AcquisitionEngine */ public void closeAcquisitionImage5D(String acquisitionName) throws MMScriptException { acqMgr_.closeImageWindow(acquisitionName); } @Override public void closeAcquisitionWindow(String acquisitionName) throws MMScriptException { acqMgr_.closeImageWindow(acquisitionName); } /** * @Deprecated - used to be in api/AcquisitionEngine * Since Burst and normal acquisition are now carried out by the same engine, * loadBurstAcquistion simply calls loadAcquisition * t * @param path - path to file specifying acquisition settings */ public void loadBurstAcquisition(String path) throws MMScriptException { this.loadAcquisition(path); } @Override public void refreshGUI() { updateGUI(true); } @Override public void refreshGUIFromCache() { updateGUI(true, true); } @Override public void setAcquisitionProperty(String acqName, String propertyName, String value) throws MMScriptException { MMAcquisition acq = acqMgr_.getAcquisition(acqName); acq.setProperty(propertyName, value); } public void setAcquisitionSystemState(String acqName, JSONObject md) throws MMScriptException { // acqMgr_.getAcquisition(acqName).setSystemState(md); setAcquisitionSummary(acqName, md); } //@Override public void setAcquisitionSummary(String acqName, JSONObject md) throws MMScriptException { acqMgr_.getAcquisition(acqName).setSummaryProperties(md); } @Override public void setImageProperty(String acqName, int frame, int channel, int slice, String propName, String value) throws MMScriptException { MMAcquisition acq = acqMgr_.getAcquisition(acqName); acq.setProperty(frame, channel, slice, propName, value); } @Override public String getCurrentAlbum() { return acqMgr_.getCurrentAlbum(); } @Override public void enableLiveMode(boolean enable) { if (core_ == null) { return; } if (enable == isLiveModeOn()) { return; } if (enable) { try { if (core_.getCameraDevice().length() == 0) { ReportingUtils.showError("No camera configured"); updateButtonsForLiveMode(false); return; } if (liveModeTimer_ == null) { liveModeTimer_ = new LiveModeTimer(); } liveModeTimer_.begin(); callLiveModeListeners(enable); } catch (Exception e) { ReportingUtils.showError(e); liveModeTimer_.stop(); callLiveModeListeners(false); updateButtonsForLiveMode(false); return; } } else { liveModeTimer_.stop(); callLiveModeListeners(enable); } updateButtonsForLiveMode(enable); } public String createNewAlbum() { return acqMgr_.createNewAlbum(); } public void appendImage(String name, TaggedImage taggedImg) throws MMScriptException { MMAcquisition acq = acqMgr_.getAcquisition(name); int f = 1 + acq.getLastAcquiredFrame(); try { MDUtils.setFrameIndex(taggedImg.tags, f); } catch (JSONException e) { throw new MMScriptException("Unable to set the frame index."); } acq.insertTaggedImage(taggedImg, f, 0, 0); } @Override public void addToAlbum(TaggedImage taggedImg) throws MMScriptException { addToAlbum(taggedImg, null); } public void addToAlbum(TaggedImage taggedImg, JSONObject displaySettings) throws MMScriptException { normalizeTags(taggedImg); acqMgr_.addToAlbum(taggedImg,displaySettings); } public void addImage(String name, Object img, int frame, int channel, int slice) throws MMScriptException { MMAcquisition acq = acqMgr_.getAcquisition(name); acq.insertImage(img, frame, channel, slice); } //@Override public void addImage(String name, TaggedImage taggedImg) throws MMScriptException { MMAcquisition acq = acqMgr_.getAcquisition(name); if (!acq.isInitialized()) { JSONObject tags = taggedImg.tags; // initialize physical dimensions of the image try { int width = tags.getInt(MMTags.Image.WIDTH); int height = tags.getInt(MMTags.Image.HEIGHT); int byteDepth = MDUtils.getDepth(tags); int bitDepth = tags.getInt(MMTags.Image.BIT_DEPTH); initializeAcquisition(name, width, height, byteDepth, bitDepth); } catch (JSONException e) { throw new MMScriptException(e); } } acq.insertImage(taggedImg); } @Override /** * The basic method for adding images to an existing data set. * If the acquisition was not previously initialized, it will attempt to initialize it from the available image data */ public void addImageToAcquisition(String name, int frame, int channel, int slice, int position, TaggedImage taggedImg) throws MMScriptException { // TODO: complete the tag set and initialize the acquisition MMAcquisition acq = acqMgr_.getAcquisition(name); int positions = acq.getPositions(); // check position, for multi-position data set the number of declared positions should be at least 2 if (acq.getPositions() <= 1 && position > 0) { throw new MMScriptException("The acquisition was open as a single position data set.\n" + "Open acqusition with two or more positions in order to crate a multi-position data set."); } // check position, for multi-position data set the number of declared positions should be at least 2 if (acq.getChannels() <= channel) { throw new MMScriptException("This acquisition was opened with " + acq.getChannels() + " channels.\n" + "The channel number must not exceed declared number of positions."); } JSONObject tags = taggedImg.tags; // if the acquisition was not previously initialized, set physical dimensions of the image if (!acq.isInitialized()) { // automatically initialize physical dimensions of the image try { int width = tags.getInt(MMTags.Image.WIDTH); int height = tags.getInt(MMTags.Image.HEIGHT); int byteDepth = MDUtils.getDepth(tags); int bitDepth = byteDepth * 8; if (tags.has(MMTags.Image.BIT_DEPTH)) { bitDepth = tags.getInt(MMTags.Image.BIT_DEPTH); } initializeAcquisition(name, width, height, byteDepth, bitDepth); } catch (JSONException e) { throw new MMScriptException(e); } } // create required coordinate tags try { tags.put(MMTags.Image.FRAME_INDEX, frame); tags.put(MMTags.Image.FRAME, frame); tags.put(MMTags.Image.CHANNEL_INDEX, channel); tags.put(MMTags.Image.SLICE_INDEX, slice); tags.put(MMTags.Image.POS_INDEX, position); if (!tags.has(MMTags.Summary.SLICES_FIRST) && !tags.has(MMTags.Summary.TIME_FIRST)) { // add default setting tags.put(MMTags.Summary.SLICES_FIRST, true); tags.put(MMTags.Summary.TIME_FIRST, false); } if (acq.getPositions() > 1) { // if no position name is defined we need to insert a default one if (tags.has(MMTags.Image.POS_NAME)) { tags.put(MMTags.Image.POS_NAME, "Pos" + position); } } // update frames if necessary if (acq.getFrames() <= frame) { acq.setProperty(MMTags.Summary.FRAMES, Integer.toString(frame + 1)); } } catch (JSONException e) { throw new MMScriptException(e); } // System.out.println("Inserting frame: " + frame + ", channel: " + channel + ", slice: " + slice + ", pos: " + position); acq.insertImage(taggedImg); } @Override /** * A quick way to implicitly snap an image and add it to the data set. Works * in the same way as above. */ public void snapAndAddImage(String name, int frame, int channel, int slice, int position) throws MMScriptException { TaggedImage ti; try { if (core_.isSequenceRunning()) { ti = core_.getLastTaggedImage(); } else { core_.snapImage(); ti = core_.getTaggedImage(); } MDUtils.setChannelIndex(ti.tags, channel); MDUtils.setFrameIndex(ti.tags, frame); MDUtils.setSliceIndex(ti.tags, slice); MDUtils.setPositionIndex(ti.tags, position); MMAcquisition acq = acqMgr_.getAcquisition(name); if (!acq.isInitialized()) { long width = core_.getImageWidth(); long height = core_.getImageHeight(); long depth = core_.getBytesPerPixel(); long bitDepth = core_.getImageBitDepth(); int multiCamNumCh = (int) core_.getNumberOfCameraChannels(); acq.setImagePhysicalDimensions((int) width, (int) height, (int) depth, (int) bitDepth, multiCamNumCh); acq.initialize(); } if (acq.getPositions() > 1) { MDUtils.setPositionName(ti.tags, "Pos" + position); } addImageToAcquisition(name, frame, channel, slice, position, ti); } catch (Exception e) { throw new MMScriptException(e); } } //@Override public void addImage(String name, TaggedImage img, boolean updateDisplay) throws MMScriptException { acqMgr_.getAcquisition(name).insertImage(img, updateDisplay); } //@Override public void addImage(String name, TaggedImage taggedImg, boolean updateDisplay, boolean waitForDisplay) throws MMScriptException { acqMgr_.getAcquisition(name).insertImage(taggedImg, updateDisplay, waitForDisplay); } //@Override public void addImage(String name, TaggedImage taggedImg, int frame, int channel, int slice, int position) throws MMScriptException { try { acqMgr_.getAcquisition(name).insertImage(taggedImg, frame, channel, slice, position); } catch (JSONException ex) { ReportingUtils.showError(ex); } } //@Override public void addImage(String name, TaggedImage taggedImg, int frame, int channel, int slice, int position, boolean updateDisplay) throws MMScriptException { try { acqMgr_.getAcquisition(name).insertImage(taggedImg, frame, channel, slice, position, updateDisplay); } catch (JSONException ex) { ReportingUtils.showError(ex); } } //@Override public void addImage(String name, TaggedImage taggedImg, int frame, int channel, int slice, int position, boolean updateDisplay, boolean waitForDisplay) throws MMScriptException { try { acqMgr_.getAcquisition(name).insertImage(taggedImg, frame, channel, slice, position, updateDisplay, waitForDisplay); } catch (JSONException ex) { ReportingUtils.showError(ex); } } /** * Closes all acquisitions */ @Override public void closeAllAcquisitions() { acqMgr_.closeAll(); } @Override public String[] getAcquisitionNames() { return acqMgr_.getAcqusitionNames(); } @Override @Deprecated public MMAcquisition getAcquisition(String name) throws MMScriptException { return acqMgr_.getAcquisition(name); } @Override public ImageCache getAcquisitionImageCache(String acquisitionName) throws MMScriptException { return getAcquisition(acquisitionName).getImageCache(); } private class ScriptConsoleMessage implements Runnable { String msg_; public ScriptConsoleMessage(String text) { msg_ = text; } @Override public void run() { if (scriptPanel_ != null) scriptPanel_.message(msg_); } } @Override public void message(String text) throws MMScriptException { if (scriptPanel_ != null) { if (scriptPanel_.stopRequestPending()) { throw new MMScriptException("Script interrupted by the user!"); } SwingUtilities.invokeLater(new ScriptConsoleMessage(text)); } } @Override public void clearMessageWindow() throws MMScriptException { if (scriptPanel_ != null) { if (scriptPanel_.stopRequestPending()) { throw new MMScriptException("Script interrupted by the user!"); } scriptPanel_.clearOutput(); } } public void clearOutput() throws MMScriptException { clearMessageWindow(); } public void clear() throws MMScriptException { clearMessageWindow(); } @Override public void setChannelContrast(String title, int channel, int min, int max) throws MMScriptException { MMAcquisition acq = acqMgr_.getAcquisition(title); acq.setChannelContrast(channel, min, max); } @Override public void setChannelName(String title, int channel, String name) throws MMScriptException { MMAcquisition acq = acqMgr_.getAcquisition(title); acq.setChannelName(channel, name); } @Override public void setChannelColor(String title, int channel, Color color) throws MMScriptException { MMAcquisition acq = acqMgr_.getAcquisition(title); acq.setChannelColor(channel, color.getRGB()); } @Override public void setContrastBasedOnFrame(String title, int frame, int slice) throws MMScriptException { MMAcquisition acq = acqMgr_.getAcquisition(title); acq.setContrastBasedOnFrame(frame, slice); } @Override public void setStagePosition(double z) throws MMScriptException { try { core_.setPosition(core_.getFocusDevice(),z); core_.waitForDevice(core_.getFocusDevice()); } catch (Exception e) { throw new MMScriptException(e.getMessage()); } } @Override public void setRelativeStagePosition(double z) throws MMScriptException { try { core_.setRelativePosition(core_.getFocusDevice(), z); core_.waitForDevice(core_.getFocusDevice()); } catch (Exception e) { throw new MMScriptException(e.getMessage()); } } @Override public void setXYStagePosition(double x, double y) throws MMScriptException { try { core_.setXYPosition(core_.getXYStageDevice(), x, y); core_.waitForDevice(core_.getXYStageDevice()); } catch (Exception e) { throw new MMScriptException(e.getMessage()); } } @Override public void setRelativeXYStagePosition(double x, double y) throws MMScriptException { try { core_.setRelativeXYPosition(core_.getXYStageDevice(), x, y); core_.waitForDevice(core_.getXYStageDevice()); } catch (Exception e) { throw new MMScriptException(e.getMessage()); } } @Override public Point2D.Double getXYStagePosition() throws MMScriptException { String stage = core_.getXYStageDevice(); if (stage.length() == 0) { throw new MMScriptException("XY Stage device is not available"); } double x[] = new double[1]; double y[] = new double[1]; try { core_.getXYPosition(stage, x, y); Point2D.Double pt = new Point2D.Double(x[0], y[0]); return pt; } catch (Exception e) { throw new MMScriptException(e.getMessage()); } } @Override public String getXYStageName() { return core_.getXYStageDevice(); } @Override public void setXYOrigin(double x, double y) throws MMScriptException { String xyStage = core_.getXYStageDevice(); try { core_.setAdapterOriginXY(xyStage, x, y); } catch (Exception e) { throw new MMScriptException(e); } } public AcquisitionWrapperEngine getAcquisitionEngine() { return engine_; } @Override public String installAutofocusPlugin(String className) { try { return installAutofocusPlugin(Class.forName(className)); } catch (ClassNotFoundException e) { String msg = "Internal error: AF manager not instantiated."; ReportingUtils.logError(e, msg); return msg; } } public String installAutofocusPlugin(Class<?> autofocus) { String msg = autofocus.getSimpleName() + " module loaded."; if (afMgr_ != null) { afMgr_.setAFPluginClassName(autofocus.getSimpleName()); try { afMgr_.refresh(); } catch (MMException e) { msg = e.getMessage(); ReportingUtils.logError(e); } } else { msg = "Internal error: AF manager not instantiated."; } return msg; } public CMMCore getCore() { return core_; } @Override public IAcquisitionEngine2010 getAcquisitionEngine2010() { try { acquisitionEngine2010LoadingThread_.join(); if (acquisitionEngine2010_ == null) { acquisitionEngine2010_ = (IAcquisitionEngine2010) acquisitionEngine2010Class_.getConstructor(ScriptInterface.class).newInstance(this); } return acquisitionEngine2010_; } catch (Exception e) { ReportingUtils.logError(e); return null; } } @Override public void addImageProcessor(DataProcessor<TaggedImage> processor) { getAcquisitionEngine().addImageProcessor(processor); } @Override public void removeImageProcessor(DataProcessor<TaggedImage> processor) { getAcquisitionEngine().removeImageProcessor(processor); } @Override public ArrayList<DataProcessor<TaggedImage>> getImageProcessorPipeline() { return getAcquisitionEngine().getImageProcessorPipeline(); } public void registerProcessorClass(Class<?> processorClass, String name) { getAcquisitionEngine().registerProcessorClass(processorClass, name); } // NB will need @Override tags once these functions are exposed in the // ScriptInterface. @Override public void setImageProcessorPipeline(List<DataProcessor<TaggedImage>> pipeline) { getAcquisitionEngine().setImageProcessorPipeline(pipeline); } @Override public void setPause(boolean state) { getAcquisitionEngine().setPause(state); } @Override public boolean isPaused() { return getAcquisitionEngine().isPaused(); } @Override public void attachRunnable(int frame, int position, int channel, int slice, Runnable runnable) { getAcquisitionEngine().attachRunnable(frame, position, channel, slice, runnable); } @Override public void clearRunnables() { getAcquisitionEngine().clearRunnables(); } @Override public SequenceSettings getAcquisitionSettings() { if (engine_ == null) return new SequenceSettings(); return engine_.getSequenceSettings(); } // Deprecated; use correctly spelled version. (Used to be part of API.) public SequenceSettings getAcqusitionSettings() { return getAcquisitionSettings(); } @Override public void setAcquisitionSettings(SequenceSettings ss) { if (engine_ == null) return; engine_.setSequenceSettings(ss); acqControlWin_.updateGUIContents(); } // Deprecated; use correctly spelled version. (Used to be part of API.) public void setAcqusitionSettings(SequenceSettings ss) { setAcquisitionSettings(ss); } @Override public String getAcquisitionPath() { if (engine_ == null) return null; return engine_.getImageCache().getDiskLocation(); } @Override public void promptToSaveAcquisition(String name, boolean prompt) throws MMScriptException { getAcquisition(name).promptToSave(prompt); } // Deprecated; use correctly spelled version. (Used to be part of API.) public void promptToSaveAcqusition(String name, boolean prompt) throws MMScriptException { promptToSaveAcquisition(name, prompt); } @Override public void setROI(Rectangle r) throws MMScriptException { boolean liveRunning = false; if (isLiveModeOn()) { liveRunning = true; enableLiveMode(false); } try { core_.setROI(r.x, r.y, r.width, r.height); } catch (Exception e) { throw new MMScriptException(e.getMessage()); } updateStaticInfo(); if (liveRunning) { enableLiveMode(true); } } public void snapAndAddToImage5D() { if (core_.getCameraDevice().length() == 0) { ReportingUtils.showError("No camera configured"); return; } try { if (this.isLiveModeOn()) { copyFromLiveModeToAlbum(simpleDisplay_); } else { doSnap(true); } } catch (Exception ex) { ReportingUtils.logError(ex); } } public void setAcquisitionEngine(AcquisitionWrapperEngine eng) { engine_ = eng; } public void suspendLiveMode() { liveModeSuspended_ = isLiveModeOn(); enableLiveMode(false); } public void resumeLiveMode() { if (liveModeSuspended_) { enableLiveMode(true); } } @Override public Autofocus getAutofocus() { return afMgr_.getDevice(); } @Override public void showAutofocusDialog() { if (afMgr_.getDevice() != null) { afMgr_.showOptionsDialog(); } } @Override public AutofocusManager getAutofocusManager() { return afMgr_; } public void selectConfigGroup(String groupName) { configPad_.setGroup(groupName); } public String regenerateDeviceList() { Cursor oldc = Cursor.getDefaultCursor(); Cursor waitc = new Cursor(Cursor.WAIT_CURSOR); setCursor(waitc); StringBuffer resultFile = new StringBuffer(); MicroscopeModel.generateDeviceListFile(resultFile, core_); //MicroscopeModel.generateDeviceListFile(); setCursor(oldc); return resultFile.toString(); } @Override public void setImageSavingFormat(Class imageSavingClass) throws MMScriptException { if (! (imageSavingClass.equals(TaggedImageStorageDiskDefault.class) || imageSavingClass.equals(TaggedImageStorageMultipageTiff.class))) { throw new MMScriptException("Unrecognized saving class"); } ImageUtils.setImageStorageClass(imageSavingClass); if (acqControlWin_ != null) { acqControlWin_.updateSavingTypeButtons(); } } /** * Allows MMListeners to register themselves */ @Override public void addMMListener(MMListenerInterface newL) { if (MMListeners_.contains(newL)) return; MMListeners_.add(newL); } /** * Allows MMListeners to remove themselves */ @Override public void removeMMListener(MMListenerInterface oldL) { if (!MMListeners_.contains(oldL)) return; MMListeners_.remove(oldL); } @Override public void logMessage(String msg) { ReportingUtils.logMessage(msg); } @Override public void showMessage(String msg) { ReportingUtils.showMessage(msg); } @Override public void logError(Exception e, String msg) { ReportingUtils.logError(e, msg); } @Override public void logError(Exception e) { ReportingUtils.logError(e); } @Override public void logError(String msg) { ReportingUtils.logError(msg); } @Override public void showError(Exception e, String msg) { ReportingUtils.showError(e, msg); } @Override public void showError(Exception e) { ReportingUtils.showError(e); } @Override public void showError(String msg) { ReportingUtils.showError(msg); } }
mmstudio/src/org/micromanager/MMStudioMainFrame.java
/////////////////////////////////////////////////////////////////////////////// //FILE: MMStudioMainFrame.java //PROJECT: Micro-Manager //SUBSYSTEM: mmstudio //----------------------------------------------------------------------------- //AUTHOR: Nenad Amodaj, [email protected], Jul 18, 2005 // Modifications by Arthur Edelstein, Nico Stuurman, Henry Pinkard //COPYRIGHT: University of California, San Francisco, 2006-2013 // 100X Imaging Inc, www.100ximaging.com, 2008 //LICENSE: This file is distributed under the BSD license. // License text is included with the source distribution. // This file is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty // of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. // IN NO EVENT SHALL THE COPYRIGHT OWNER OR // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES. //CVS: $Id$ // package org.micromanager; import ij.IJ; import ij.ImageJ; import ij.ImagePlus; import ij.WindowManager; import ij.gui.Line; import ij.gui.Roi; import ij.process.ImageProcessor; import java.awt.Color; import java.awt.Component; import java.awt.Dimension; import java.awt.Font; import java.awt.Rectangle; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.FocusAdapter; import java.awt.event.FocusEvent; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.awt.geom.Point2D; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.prefs.Preferences; import javax.swing.AbstractButton; import javax.swing.BorderFactory; import javax.swing.JButton; import javax.swing.JCheckBox; import javax.swing.JCheckBoxMenuItem; import javax.swing.JComboBox; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JMenu; import javax.swing.JMenuBar; import javax.swing.JMenuItem; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JSplitPane; import javax.swing.JTextField; import javax.swing.JToggleButton; import javax.swing.SpringLayout; import javax.swing.SwingConstants; import javax.swing.SwingUtilities; import javax.swing.ToolTipManager; import javax.swing.UIManager; import mmcorej.CMMCore; import mmcorej.DeviceType; import mmcorej.MMCoreJ; import mmcorej.MMEventCallback; import mmcorej.StrVector; import org.json.JSONObject; import org.micromanager.acquisition.AcquisitionManager; import org.micromanager.api.Autofocus; import org.micromanager.api.DataProcessor; import org.micromanager.api.MMPlugin; import org.micromanager.api.MMProcessorPlugin; import org.micromanager.api.MMTags; import org.micromanager.api.PositionList; import org.micromanager.api.ScriptInterface; import org.micromanager.api.MMListenerInterface; import org.micromanager.api.SequenceSettings; import org.micromanager.conf2.ConfiguratorDlg2; import org.micromanager.conf2.MMConfigFileException; import org.micromanager.conf2.MicroscopeModel; import org.micromanager.events.EventManager; import org.micromanager.graph.GraphData; import org.micromanager.graph.GraphFrame; import org.micromanager.navigation.CenterAndDragListener; import org.micromanager.navigation.XYZKeyListener; import org.micromanager.navigation.ZWheelListener; import org.micromanager.pipelineUI.PipelinePanel; import org.micromanager.utils.AutofocusManager; import org.micromanager.utils.ContrastSettings; import org.micromanager.utils.GUIColors; import org.micromanager.utils.GUIUtils; import org.micromanager.utils.JavaUtils; import org.micromanager.utils.MMException; import org.micromanager.utils.MMScriptException; import org.micromanager.utils.NumberUtils; import org.micromanager.utils.TextUtils; import org.micromanager.utils.WaitDialog; import bsh.EvalError; import bsh.Interpreter; import com.swtdesigner.SwingResourceManager; import ij.gui.ImageCanvas; import ij.gui.ImageWindow; import ij.gui.Toolbar; import java.awt.*; import java.awt.dnd.DropTarget; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import mmcorej.TaggedImage; import org.json.JSONException; import org.micromanager.acquisition.*; import org.micromanager.api.ImageCache; import org.micromanager.api.IAcquisitionEngine2010; import org.micromanager.graph.HistogramSettings; import org.micromanager.internalinterfaces.LiveModeListener; import org.micromanager.utils.DragDropUtil; import org.micromanager.utils.FileDialogs; import org.micromanager.utils.FileDialogs.FileType; import org.micromanager.utils.HotKeysDialog; import org.micromanager.utils.ImageUtils; import org.micromanager.utils.MDUtils; import org.micromanager.utils.MMKeyDispatcher; import org.micromanager.utils.ReportingUtils; import org.micromanager.utils.UIMonitor; /* * Main panel and application class for the MMStudio. */ public class MMStudioMainFrame extends JFrame implements ScriptInterface { private static final String MICRO_MANAGER_TITLE = "Micro-Manager"; private static final long serialVersionUID = 3556500289598574541L; private static final String MAIN_FRAME_X = "x"; private static final String MAIN_FRAME_Y = "y"; private static final String MAIN_FRAME_WIDTH = "width"; private static final String MAIN_FRAME_HEIGHT = "height"; private static final String MAIN_FRAME_DIVIDER_POS = "divider_pos"; private static final String MAIN_EXPOSURE = "exposure"; private static final String MAIN_SAVE_METHOD = "saveMethod"; private static final String SYSTEM_CONFIG_FILE = "sysconfig_file"; private static final String OPEN_ACQ_DIR = "openDataDir"; private static final String SCRIPT_CORE_OBJECT = "mmc"; private static final String SCRIPT_ACQENG_OBJECT = "acq"; private static final String SCRIPT_GUI_OBJECT = "gui"; private static final String AUTOFOCUS_DEVICE = "autofocus_device"; private static final String MOUSE_MOVES_STAGE = "mouse_moves_stage"; private static final String EXPOSURE_SETTINGS_NODE = "MainExposureSettings"; private static final String CONTRAST_SETTINGS_NODE = "MainContrastSettings"; private static final int TOOLTIP_DISPLAY_DURATION_MILLISECONDS = 15000; private static final int TOOLTIP_DISPLAY_INITIAL_DELAY_MILLISECONDS = 2000; // cfg file saving private static final String CFGFILE_ENTRY_BASE = "CFGFileEntry"; // + {0, 1, 2, 3, 4} // GUI components private JComboBox comboBinning_; private JComboBox shutterComboBox_; private JTextField textFieldExp_; private JLabel labelImageDimensions_; private JToggleButton liveButton_; private JCheckBox autoShutterCheckBox_; private MMOptions options_; private boolean runsAsPlugin_; private JCheckBoxMenuItem centerAndDragMenuItem_; private JButton snapButton_; private JButton autofocusNowButton_; private JButton autofocusConfigureButton_; private JToggleButton toggleShutterButton_; private GUIColors guiColors_; private GraphFrame profileWin_; private PropertyEditor propertyBrowser_; private CalibrationListDlg calibrationListDlg_; private AcqControlDlg acqControlWin_; private JMenu pluginMenu_; private Map<String, JMenu> pluginSubMenus_; private List<MMListenerInterface> MMListeners_ = Collections.synchronizedList(new ArrayList<MMListenerInterface>()); private List<LiveModeListener> liveModeListeners_ = Collections.synchronizedList(new ArrayList<LiveModeListener>()); private List<Component> MMFrames_ = Collections.synchronizedList(new ArrayList<Component>()); private AutofocusManager afMgr_; private final static String DEFAULT_CONFIG_FILE_NAME = "MMConfig_demo.cfg"; private final static String DEFAULT_CONFIG_FILE_PROPERTY = "org.micromanager.default.config.file"; private ArrayList<String> MRUConfigFiles_; private static final int maxMRUCfgs_ = 5; private String sysConfigFile_; private String startupScriptFile_; private ConfigGroupPad configPad_; private LiveModeTimer liveModeTimer_; private GraphData lineProfileData_; // labels for standard devices private String cameraLabel_; private String zStageLabel_; private String shutterLabel_; private String xyStageLabel_; // applications settings private Preferences mainPrefs_; private Preferences systemPrefs_; private Preferences colorPrefs_; private Preferences exposurePrefs_; private Preferences contrastPrefs_; // MMcore private CMMCore core_; private AcquisitionWrapperEngine engine_; private PositionList posList_; private PositionListDlg posListDlg_; private String openAcqDirectory_ = ""; private boolean running_; private boolean configChanged_ = false; private StrVector shutters_ = null; private JButton saveConfigButton_; private ScriptPanel scriptPanel_; private PipelinePanel pipelinePanel_; private org.micromanager.utils.HotKeys hotKeys_; private CenterAndDragListener centerAndDragListener_; private ZWheelListener zWheelListener_; private XYZKeyListener xyzKeyListener_; private AcquisitionManager acqMgr_; private static VirtualAcquisitionDisplay simpleDisplay_; private Color[] multiCameraColors_ = {Color.RED, Color.GREEN, Color.BLUE, Color.YELLOW, Color.CYAN}; private boolean liveModeSuspended_; public Font defaultScriptFont_ = null; public static final String SIMPLE_ACQ = "Snap/Live Window"; public static FileType MM_CONFIG_FILE = new FileType("MM_CONFIG_FILE", "Micro-Manager Config File", "./MyScope.cfg", true, "cfg"); // Our instance private static MMStudioMainFrame gui_; // Callback private CoreEventCallback cb_; // Lock invoked while shutting down private final Object shutdownLock_ = new Object(); private JMenuBar menuBar_; private ConfigPadButtonPanel configPadButtonPanel_; private final JMenu switchConfigurationMenu_; private final MetadataPanel metadataPanel_; public static FileType MM_DATA_SET = new FileType("MM_DATA_SET", "Micro-Manager Image Location", System.getProperty("user.home") + "/Untitled", false, (String[]) null); private Thread acquisitionEngine2010LoadingThread_ = null; private Class<?> acquisitionEngine2010Class_ = null; private IAcquisitionEngine2010 acquisitionEngine2010_ = null; private final JSplitPane splitPane_; private volatile boolean ignorePropertyChanges_; private PluginLoader pluginLoader_; private AbstractButton setRoiButton_; private AbstractButton clearRoiButton_; /** * Simple class used to cache static info */ private class StaticInfo { public long width_; public long height_; public long bytesPerPixel_; public long imageBitDepth_; public double pixSizeUm_; public double zPos_; public double x_; public double y_; } private StaticInfo staticInfo_ = new StaticInfo(); /** * Main procedure for stand alone operation. */ public static void main(String args[]) { try { UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); MMStudioMainFrame frame = new MMStudioMainFrame(false); frame.setVisible(true); frame.setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE); } catch (Throwable e) { ReportingUtils.showError(e, "A java error has caused Micro-Manager to exit."); System.exit(1); } } /** * MMStudioMainframe constructor * @param pluginStatus */ @SuppressWarnings("LeakingThisInConstructor") public MMStudioMainFrame(boolean pluginStatus) { org.micromanager.diagnostics.ThreadExceptionLogger.setUp(); // Set up event handling early, so following code can subscribe/publish // events as needed. EventManager manager = new EventManager(); startLoadingPipelineClass(); options_ = new MMOptions(); try { options_.loadSettings(); } catch (NullPointerException ex) { ReportingUtils.logError(ex); } UIMonitor.enable(options_.debugLogEnabled_); guiColors_ = new GUIColors(); pluginLoader_ = new PluginLoader(); // plugins_ = new ArrayList<PluginItem>(); gui_ = this; runsAsPlugin_ = pluginStatus; setIconImage(SwingResourceManager.getImage(MMStudioMainFrame.class, "icons/microscope.gif")); running_ = true; acqMgr_ = new AcquisitionManager(); sysConfigFile_ = new File(DEFAULT_CONFIG_FILE_NAME).getAbsolutePath(); sysConfigFile_ = System.getProperty(DEFAULT_CONFIG_FILE_PROPERTY, sysConfigFile_); if (options_.startupScript_.length() > 0) { startupScriptFile_ = new File(options_.startupScript_).getAbsolutePath(); } else { startupScriptFile_ = ""; } ReportingUtils.SetContainingFrame(gui_); // set the location for app preferences try { mainPrefs_ = Preferences.userNodeForPackage(this.getClass()); } catch (Exception e) { ReportingUtils.logError(e); } systemPrefs_ = mainPrefs_; colorPrefs_ = mainPrefs_.node(mainPrefs_.absolutePath() + "/" + AcqControlDlg.COLOR_SETTINGS_NODE); exposurePrefs_ = mainPrefs_.node(mainPrefs_.absolutePath() + "/" + EXPOSURE_SETTINGS_NODE); contrastPrefs_ = mainPrefs_.node(mainPrefs_.absolutePath() + "/" + CONTRAST_SETTINGS_NODE); // check system preferences try { Preferences p = Preferences.systemNodeForPackage(this.getClass()); if (null != p) { // if we can not write to the systemPrefs, use AppPrefs instead if (JavaUtils.backingStoreAvailable(p)) { systemPrefs_ = p; } } } catch (Exception e) { ReportingUtils.logError(e); } showRegistrationDialogMaybe(); // load application preferences // NOTE: only window size and position preferences are loaded, // not the settings for the camera and live imaging - // attempting to set those automatically on startup may cause problems // with the hardware int x = mainPrefs_.getInt(MAIN_FRAME_X, 100); int y = mainPrefs_.getInt(MAIN_FRAME_Y, 100); int width = mainPrefs_.getInt(MAIN_FRAME_WIDTH, 644); int height = mainPrefs_.getInt(MAIN_FRAME_HEIGHT, 570); openAcqDirectory_ = mainPrefs_.get(OPEN_ACQ_DIR, ""); try { ImageUtils.setImageStorageClass(Class.forName (mainPrefs_.get(MAIN_SAVE_METHOD, ImageUtils.getImageStorageClass().getName()) ) ); } catch (ClassNotFoundException ex) { ReportingUtils.logError(ex, "Class not found error. Should never happen"); } ToolTipManager ttManager = ToolTipManager.sharedInstance(); ttManager.setDismissDelay(TOOLTIP_DISPLAY_DURATION_MILLISECONDS); ttManager.setInitialDelay(TOOLTIP_DISPLAY_INITIAL_DELAY_MILLISECONDS); setBounds(x, y, width, height); setExitStrategy(options_.closeOnExit_); setTitle(MICRO_MANAGER_TITLE + " " + MMVersion.VERSION_STRING); setBackground(guiColors_.background.get((options_.displayBackground_))); setMinimumSize(new Dimension(605,480)); menuBar_ = new JMenuBar(); switchConfigurationMenu_ = new JMenu(); setJMenuBar(menuBar_); initializeFileMenu(); initializeToolsMenu(); splitPane_ = createSplitPane(mainPrefs_.getInt(MAIN_FRAME_DIVIDER_POS, 200)); getContentPane().add(splitPane_); createTopPanelWidgets((JPanel) splitPane_.getComponent(0)); metadataPanel_ = createMetadataPanel((JPanel) splitPane_.getComponent(1)); setupWindowHandlers(); // Add our own keyboard manager that handles Micro-Manager shortcuts MMKeyDispatcher mmKD = new MMKeyDispatcher(gui_); KeyboardFocusManager.getCurrentKeyboardFocusManager().addKeyEventDispatcher(mmKD); DropTarget dropTarget = new DropTarget(this, new DragDropUtil()); } private void setupWindowHandlers() { // add window listeners addWindowListener(new WindowAdapter() { @Override public void windowClosing(WindowEvent e) { closeSequence(false); } @Override public void windowOpened(WindowEvent e) { // ------------------- // initialize hardware // ------------------- try { core_ = new CMMCore(); } catch(UnsatisfiedLinkError ex) { ReportingUtils.showError(ex, "Failed to load the MMCoreJ_wrap native library"); return; } ReportingUtils.setCore(core_); core_.enableDebugLog(options_.debugLogEnabled_); logStartupProperties(); cameraLabel_ = ""; shutterLabel_ = ""; zStageLabel_ = ""; xyStageLabel_ = ""; engine_ = new AcquisitionWrapperEngine(acqMgr_); // processorStackManager_ = new ProcessorStackManager(engine_); // register callback for MMCore notifications, this is a global // to avoid garbage collection cb_ = new CoreEventCallback(); core_.registerCallback(cb_); try { core_.setCircularBufferMemoryFootprint(options_.circularBufferSizeMB_); } catch (Exception e2) { ReportingUtils.showError(e2); } MMStudioMainFrame parent = (MMStudioMainFrame) e.getWindow(); if (parent != null) { engine_.setParentGUI(parent); } loadMRUConfigFiles(); afMgr_ = new AutofocusManager(gui_); Thread pluginInitializer = initializePlugins(); toFront(); if (!options_.doNotAskForConfigFile_) { MMIntroDlg introDlg = new MMIntroDlg(MMVersion.VERSION_STRING, MRUConfigFiles_); introDlg.setConfigFile(sysConfigFile_); introDlg.setBackground(guiColors_.background.get((options_.displayBackground_))); introDlg.setVisible(true); if (!introDlg.okChosen()) { closeSequence(false); return; } sysConfigFile_ = introDlg.getConfigFile(); } saveMRUConfigFiles(); mainPrefs_.put(SYSTEM_CONFIG_FILE, sysConfigFile_); paint(MMStudioMainFrame.this.getGraphics()); engine_.setCore(core_, afMgr_); posList_ = new PositionList(); engine_.setPositionList(posList_); // load (but do no show) the scriptPanel createScriptPanel(); // Ditto with the image pipeline panel. createPipelinePanel(); // Create an instance of HotKeys so that they can be read in from prefs hotKeys_ = new org.micromanager.utils.HotKeys(); hotKeys_.loadSettings(); // before loading the system configuration, we need to wait // until the plugins are loaded try { pluginInitializer.join(2000); } catch (InterruptedException ex) { ReportingUtils.logError(ex, "Plugin loader thread was interupted"); } // if an error occurred during config loading, // do not display more errors than needed if (!loadSystemConfiguration()) ReportingUtils.showErrorOn(false); executeStartupScript(); // Create Multi-D window here but do not show it. // This window needs to be created in order to properly set the "ChannelGroup" // based on the Multi-D parameters acqControlWin_ = new AcqControlDlg(engine_, mainPrefs_, MMStudioMainFrame.this, options_); addMMBackgroundListener(acqControlWin_); configPad_.setCore(core_); if (parent != null) { configPad_.setParentGUI(parent); } configPadButtonPanel_.setCore(core_); // initialize controls initializeHelpMenu(); String afDevice = mainPrefs_.get(AUTOFOCUS_DEVICE, ""); if (afMgr_.hasDevice(afDevice)) { try { afMgr_.selectDevice(afDevice); } catch (MMException e1) { // this error should never happen ReportingUtils.showError(e1); } } centerAndDragListener_ = new CenterAndDragListener(gui_); zWheelListener_ = new ZWheelListener(core_, gui_); gui_.addLiveModeListener(zWheelListener_); xyzKeyListener_ = new XYZKeyListener(core_, gui_); gui_.addLiveModeListener(xyzKeyListener_); // switch error reporting back on ReportingUtils.showErrorOn(true); } private Thread initializePlugins() { pluginMenu_ = GUIUtils.createMenuInMenuBar(menuBar_, "Plugins"); Thread myThread = new ThreadPluginLoading("Plugin loading"); myThread.start(); return myThread; } class ThreadPluginLoading extends Thread { public ThreadPluginLoading(String string) { super(string); } @Override public void run() { // Needed for loading clojure-based jars: Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader()); pluginLoader_.loadPlugins(); } } }); } /** * Callback to update GUI when a change happens in the MMCore. */ public class CoreEventCallback extends MMEventCallback { public CoreEventCallback() { super(); } @Override public void onPropertiesChanged() { // TODO: remove test once acquisition engine is fully multithreaded if (engine_ != null && engine_.isAcquisitionRunning()) { core_.logMessage("Notification from MMCore ignored because acquistion is running!", true); } else { if (ignorePropertyChanges_) { core_.logMessage("Notification from MMCore ignored since the system is still loading", true); } else { core_.updateSystemStateCache(); updateGUI(true); // update all registered listeners for (MMListenerInterface mmIntf : MMListeners_) { mmIntf.propertiesChangedAlert(); } core_.logMessage("Notification from MMCore!", true); } } } @Override public void onPropertyChanged(String deviceName, String propName, String propValue) { core_.logMessage("Notification for Device: " + deviceName + " Property: " + propName + " changed to value: " + propValue, true); // update all registered listeners for (MMListenerInterface mmIntf:MMListeners_) { mmIntf.propertyChangedAlert(deviceName, propName, propValue); } } @Override public void onConfigGroupChanged(String groupName, String newConfig) { try { configPad_.refreshGroup(groupName, newConfig); for (MMListenerInterface mmIntf:MMListeners_) { mmIntf.configGroupChangedAlert(groupName, newConfig); } } catch (Exception e) { } } @Override public void onSystemConfigurationLoaded() { for (MMListenerInterface mmIntf:MMListeners_) { mmIntf.systemConfigurationLoaded(); } } @Override public void onPixelSizeChanged(double newPixelSizeUm) { updatePixSizeUm (newPixelSizeUm); for (MMListenerInterface mmIntf:MMListeners_) { mmIntf.pixelSizeChangedAlert(newPixelSizeUm); } } @Override public void onStagePositionChanged(String deviceName, double pos) { if (deviceName.equals(zStageLabel_)) { updateZPos(pos); for (MMListenerInterface mmIntf:MMListeners_) { mmIntf.stagePositionChangedAlert(deviceName, pos); } } } @Override public void onStagePositionChangedRelative(String deviceName, double pos) { if (deviceName.equals(zStageLabel_)) updateZPosRelative(pos); } @Override public void onXYStagePositionChanged(String deviceName, double xPos, double yPos) { if (deviceName.equals(xyStageLabel_)) { updateXYPos(xPos, yPos); for (MMListenerInterface mmIntf:MMListeners_) { mmIntf.xyStagePositionChanged(deviceName, xPos, yPos); } } } @Override public void onXYStagePositionChangedRelative(String deviceName, double xPos, double yPos) { if (deviceName.equals(xyStageLabel_)) updateXYPosRelative(xPos, yPos); } @Override public void onExposureChanged(String deviceName, double exposure) { if (deviceName.equals(cameraLabel_)){ // update exposure in gui textFieldExp_.setText(NumberUtils.doubleToDisplayString(exposure)); } for (MMListenerInterface mmIntf:MMListeners_) { mmIntf.exposureChanged(deviceName, exposure); } } } private void handleException(Exception e, String msg) { String errText = "Exception occurred: "; if (msg.length() > 0) { errText += msg + " -- "; } if (options_.debugLogEnabled_) { errText += e.getMessage(); } else { errText += e.toString() + "\n"; ReportingUtils.showError(e); } handleError(errText); } private void handleException(Exception e) { handleException(e, ""); } private void handleError(String message) { if (isLiveModeOn()) { // Should we always stop live mode on any error? enableLiveMode(false); } JOptionPane.showMessageDialog(this, message); core_.logMessage(message); } public ImageWindow getImageWin() { return getSnapLiveWin(); } public static VirtualAcquisitionDisplay getSimpleDisplay() { return simpleDisplay_; } public static void createSimpleDisplay(String name, ImageCache cache) throws MMScriptException { simpleDisplay_ = new VirtualAcquisitionDisplay(cache, name); } public void checkSimpleAcquisition() { if (core_.getCameraDevice().length() == 0) { ReportingUtils.showError("No camera configured"); return; } int width = (int) core_.getImageWidth(); int height = (int) core_.getImageHeight(); int depth = (int) core_.getBytesPerPixel(); int bitDepth = (int) core_.getImageBitDepth(); int numCamChannels = (int) core_.getNumberOfCameraChannels(); try { if (acquisitionExists(SIMPLE_ACQ)) { if ((getAcquisitionImageWidth(SIMPLE_ACQ) != width) || (getAcquisitionImageHeight(SIMPLE_ACQ) != height) || (getAcquisitionImageByteDepth(SIMPLE_ACQ) != depth) || (getAcquisitionImageBitDepth(SIMPLE_ACQ) != bitDepth) || (getAcquisitionMultiCamNumChannels(SIMPLE_ACQ) != numCamChannels)) { //Need to close and reopen simple window closeAcquisitionWindow(SIMPLE_ACQ); } } if (!acquisitionExists(SIMPLE_ACQ)) { openAcquisition(SIMPLE_ACQ, "", 1, numCamChannels, 1, true); if (numCamChannels > 1) { for (long i = 0; i < numCamChannels; i++) { String chName = core_.getCameraChannelName(i); int defaultColor = multiCameraColors_[(int) i % multiCameraColors_.length].getRGB(); setChannelColor(SIMPLE_ACQ, (int) i, getChannelColor(chName, defaultColor)); setChannelName(SIMPLE_ACQ, (int) i, chName); } } initializeSimpleAcquisition(SIMPLE_ACQ, width, height, depth, bitDepth, numCamChannels); getAcquisition(SIMPLE_ACQ).promptToSave(false); getAcquisition(SIMPLE_ACQ).getAcquisitionWindow().getHyperImage().getWindow().toFront(); this.updateCenterAndDragListener(); } } catch (Exception ex) { ReportingUtils.showError(ex); } } public void checkSimpleAcquisition(TaggedImage image) { try { JSONObject tags = image.tags; int width = MDUtils.getWidth(tags); int height = MDUtils.getHeight(tags); int depth = MDUtils.getDepth(tags); int bitDepth = MDUtils.getBitDepth(tags); int numCamChannels = (int) core_.getNumberOfCameraChannels(); if (acquisitionExists(SIMPLE_ACQ)) { if ((getAcquisitionImageWidth(SIMPLE_ACQ) != width) || (getAcquisitionImageHeight(SIMPLE_ACQ) != height) || (getAcquisitionImageByteDepth(SIMPLE_ACQ) != depth) || (getAcquisitionImageBitDepth(SIMPLE_ACQ) != bitDepth) || (getAcquisitionMultiCamNumChannels(SIMPLE_ACQ) != numCamChannels)) { //Need to close and reopen simple window closeAcquisitionWindow(SIMPLE_ACQ); // Seems that closeAcquisitionWindow also closes the acquisition... //closeAcquisition(SIMPLE_ACQ); } } if (!acquisitionExists(SIMPLE_ACQ)) { openAcquisition(SIMPLE_ACQ, "", 1, numCamChannels, 1, true); if (numCamChannels > 1) { for (long i = 0; i < numCamChannels; i++) { String chName = core_.getCameraChannelName(i); int defaultColor = multiCameraColors_[(int) i % multiCameraColors_.length].getRGB(); setChannelColor(SIMPLE_ACQ, (int) i, getChannelColor(chName, defaultColor)); setChannelName(SIMPLE_ACQ, (int) i, chName); } } initializeSimpleAcquisition(SIMPLE_ACQ, width, height, depth, bitDepth, numCamChannels); getAcquisition(SIMPLE_ACQ).promptToSave(false); getAcquisition(SIMPLE_ACQ).getAcquisitionWindow().getHyperImage().getWindow().toFront(); this.updateCenterAndDragListener(); } } catch (Exception ex) { ReportingUtils.showError(ex); } } public void saveChannelColor(String chName, int rgb) { if (colorPrefs_ != null) { colorPrefs_.putInt("Color_" + chName, rgb); } } public Color getChannelColor(String chName, int defaultColor) { if (colorPrefs_ != null) { defaultColor = colorPrefs_.getInt("Color_" + chName, defaultColor); } return new Color(defaultColor); } public void copyFromLiveModeToAlbum(VirtualAcquisitionDisplay display) throws MMScriptException, JSONException { ImageCache ic = display.getImageCache(); int channels = ic.getSummaryMetadata().getInt("Channels"); if (channels == 1) { //RGB or monchrome addToAlbum(ic.getImage(0, 0, 0, 0), ic.getDisplayAndComments()); } else { //multicamera for (int i = 0; i < channels; i++) { addToAlbum(ic.getImage(i, 0, 0, 0), ic.getDisplayAndComments()); } } } private void createActiveShutterChooser(JPanel topPanel) { createLabel("Shutter", false, topPanel, 111, 73, 158, 86); shutterComboBox_ = new JComboBox(); shutterComboBox_.setName("Shutter"); shutterComboBox_.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent arg0) { try { if (shutterComboBox_.getSelectedItem() != null) { core_.setShutterDevice((String) shutterComboBox_.getSelectedItem()); } } catch (Exception e) { ReportingUtils.showError(e); } } }); GUIUtils.addWithEdges(topPanel, shutterComboBox_, 170, 70, 275, 92); } private void createBinningChooser(JPanel topPanel) { createLabel("Binning", false, topPanel, 111, 43, 199, 64); comboBinning_ = new JComboBox(); comboBinning_.setName("Binning"); comboBinning_.setFont(new Font("Arial", Font.PLAIN, 10)); comboBinning_.setMaximumRowCount(4); comboBinning_.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { changeBinning(); } }); GUIUtils.addWithEdges(topPanel, comboBinning_, 200, 43, 275, 66); } private void createExposureField(JPanel topPanel) { createLabel("Exposure [ms]", false, topPanel, 111, 23, 198, 39); textFieldExp_ = new JTextField(); textFieldExp_.addFocusListener(new FocusAdapter() { @Override public void focusLost(FocusEvent fe) { synchronized(shutdownLock_) { if (core_ != null) setExposure(); } } }); textFieldExp_.setFont(new Font("Arial", Font.PLAIN, 10)); textFieldExp_.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { setExposure(); } }); GUIUtils.addWithEdges(topPanel, textFieldExp_, 203, 21, 276, 40); } private void toggleAutoShutter() { shutterLabel_ = core_.getShutterDevice(); if (shutterLabel_.length() == 0) { toggleShutterButton_.setEnabled(false); } else { if (autoShutterCheckBox_.isSelected()) { try { core_.setAutoShutter(true); core_.setShutterOpen(false); toggleShutterButton_.setSelected(false); toggleShutterButton_.setText("Open"); toggleShutterButton_.setEnabled(false); } catch (Exception e2) { ReportingUtils.logError(e2); } } else { try { core_.setAutoShutter(false); core_.setShutterOpen(false); toggleShutterButton_.setEnabled(true); toggleShutterButton_.setText("Open"); } catch (Exception exc) { ReportingUtils.logError(exc); } } } } private void createShutterControls(JPanel topPanel) { autoShutterCheckBox_ = new JCheckBox(); autoShutterCheckBox_.setFont(new Font("Arial", Font.PLAIN, 10)); autoShutterCheckBox_.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { toggleAutoShutter(); } }); autoShutterCheckBox_.setIconTextGap(6); autoShutterCheckBox_.setHorizontalTextPosition(SwingConstants.LEADING); autoShutterCheckBox_.setText("Auto shutter"); GUIUtils.addWithEdges(topPanel, autoShutterCheckBox_, 107, 96, 199, 119); toggleShutterButton_ = (JToggleButton) GUIUtils.createButton(true, "toggleShutterButton", "Open", "Open/close the shutter", new Runnable() { public void run() { toggleShutter(); } }, null, topPanel, 203, 96, 275, 117); // Shutter button } private void createCameraSettingsWidgets(JPanel topPanel) { createLabel("Camera settings", true, topPanel, 109, 2, 211, 22); createExposureField(topPanel); createBinningChooser(topPanel); createActiveShutterChooser(topPanel); createShutterControls(topPanel); } private void createConfigurationControls(JPanel topPanel) { createLabel("Configuration settings", true, topPanel, 280, 2, 430, 22); saveConfigButton_ = (JButton) GUIUtils.createButton(false, "saveConfigureButton", "Save", "Save current presets to the configuration file", new Runnable() { public void run() { saveConfigPresets(); } }, null, topPanel, -80, 2, -5, 20); configPad_ = new ConfigGroupPad(); configPadButtonPanel_ = new ConfigPadButtonPanel(); configPadButtonPanel_.setConfigPad(configPad_); configPadButtonPanel_.setGUI(MMStudioMainFrame.getInstance()); configPad_.setFont(new Font("", Font.PLAIN, 10)); GUIUtils.addWithEdges(topPanel, configPad_, 280, 21, -4, -44); GUIUtils.addWithEdges(topPanel, configPadButtonPanel_, 280, -40, -4, -20); } private void createMainButtons(JPanel topPanel) { snapButton_ = (JButton) GUIUtils.createButton(false, "Snap", "Snap", "Snap single image", new Runnable() { public void run() { doSnap(); } }, "camera.png", topPanel, 7, 4, 95, 25); liveButton_ = (JToggleButton) GUIUtils.createButton(true, "Live", "Live", "Continuous live view", new Runnable() { public void run() { enableLiveMode(!isLiveModeOn()); } }, "camera_go.png", topPanel, 7, 26, 95, 47); /* toAlbumButton_ = (JButton) */ GUIUtils.createButton(false, "Album", "Album", "Acquire single frame and add to an album", new Runnable() { public void run() { snapAndAddToImage5D(); } }, "camera_plus_arrow.png", topPanel, 7, 48, 95, 69); /* MDA Button = */ GUIUtils.createButton(false, "Multi-D Acq.", "Multi-D Acq.", "Open multi-dimensional acquisition window", new Runnable() { public void run() { openAcqControlDialog(); } }, "film.png", topPanel, 7, 70, 95, 91); /* Refresh = */ GUIUtils.createButton(false, "Refresh", "Refresh", "Refresh all GUI controls directly from the hardware", new Runnable() { public void run() { core_.updateSystemStateCache(); updateGUI(true); } }, "arrow_refresh.png", topPanel, 7, 92, 95, 113); } private static MetadataPanel createMetadataPanel(JPanel bottomPanel) { MetadataPanel metadataPanel = new MetadataPanel(); GUIUtils.addWithEdges(bottomPanel, metadataPanel, 0, 0, 0, 0); metadataPanel.setBorder(BorderFactory.createEmptyBorder()); return metadataPanel; } private void createPleaLabel(JPanel topPanel) { JLabel citePleaLabel = new JLabel("<html>Please <a href=\"http://micro-manager.org\">cite Micro-Manager</a> so funding will continue!</html>"); citePleaLabel.setFont(new Font("Arial", Font.PLAIN, 11)); GUIUtils.addWithEdges(topPanel, citePleaLabel, 7, 119, 270, 139); class Pleader extends Thread{ Pleader(){ super("pleader"); } @Override public void run(){ try { ij.plugin.BrowserLauncher.openURL("https://micro-manager.org/wiki/Citing_Micro-Manager"); } catch (IOException e1) { ReportingUtils.showError(e1); } } } citePleaLabel.addMouseListener(new MouseAdapter() { @Override public void mousePressed(MouseEvent e) { Pleader p = new Pleader(); p.start(); } }); // add a listener to the main ImageJ window to catch it quitting out on us /* * The current version of ImageJ calls the command "Quit", which we * handle in MMStudioPlugin. Calling the closeSequence from here as well * leads to crashes since the core will be cleaned up by one of the two * threads doing the same thing. I do not know since which version of * ImageJ introduced this behavior - NS, 2014-04-26 if (ij.IJ.getInstance() != null) { ij.IJ.getInstance().addWindowListener(new WindowAdapter() { @Override public void windowClosing(WindowEvent e) { //closeSequence(true); }; }); } */ } private JSplitPane createSplitPane(int dividerPos) { JPanel topPanel = new JPanel(); JPanel bottomPanel = new JPanel(); topPanel.setLayout(new SpringLayout()); topPanel.setMinimumSize(new Dimension(580, 195)); bottomPanel.setLayout(new SpringLayout()); JSplitPane splitPane = new JSplitPane(JSplitPane.VERTICAL_SPLIT, true, topPanel, bottomPanel); splitPane.setBorder(BorderFactory.createEmptyBorder()); splitPane.setDividerLocation(dividerPos); splitPane.setResizeWeight(0.0); return splitPane; } private void createTopPanelWidgets(JPanel topPanel) { createMainButtons(topPanel); createCameraSettingsWidgets(topPanel); createPleaLabel(topPanel); createUtilityButtons(topPanel); createConfigurationControls(topPanel); labelImageDimensions_ = createLabel("", false, topPanel, 5, -20, 0, 0); } private void createUtilityButtons(JPanel topPanel) { // ROI createLabel("ROI", true, topPanel, 8, 140, 71, 154); setRoiButton_ = GUIUtils.createButton(false, "setRoiButton", null, "Set Region Of Interest to selected rectangle", new Runnable() { public void run() { setROI(); } }, "shape_handles.png", topPanel, 7, 154, 37, 174); clearRoiButton_ = GUIUtils.createButton(false, "clearRoiButton", null, "Reset Region of Interest to full frame", new Runnable() { public void run() { clearROI(); } }, "arrow_out.png", topPanel, 40, 154, 70, 174); // Zoom createLabel("Zoom", true, topPanel, 81, 140, 139, 154); GUIUtils.createButton(false, "zoomInButton", null, "Zoom in", new Runnable() { public void run() { zoomIn(); } }, "zoom_in.png", topPanel, 80, 154, 110, 174); GUIUtils.createButton(false, "zoomOutButton", null, "Zoom out", new Runnable() { public void run() { zoomOut(); } }, "zoom_out.png", topPanel, 113, 154, 143, 174); // Profile createLabel("Profile", true, topPanel, 154, 140, 217, 154); GUIUtils.createButton(false, "lineProfileButton", null, "Open line profile window (requires line selection)", new Runnable() { public void run() { openLineProfileWindow(); } }, "chart_curve.png", topPanel, 153, 154, 183, 174); // Autofocus createLabel("Autofocus", true, topPanel, 194, 140, 276, 154); autofocusNowButton_ = (JButton) GUIUtils.createButton(false, "autofocusNowButton", null, "Autofocus now", new Runnable() { public void run() { autofocusNow(); } }, "find.png", topPanel, 193, 154, 223, 174); autofocusConfigureButton_ = (JButton) GUIUtils.createButton(false, "autofocusConfigureButton", null, "Set autofocus options", new Runnable() { public void run() { showAutofocusDialog(); } }, "wrench_orange.png", topPanel, 226, 154, 256, 174); } private void initializeFileMenu() { JMenu fileMenu = GUIUtils.createMenuInMenuBar(menuBar_, "File"); GUIUtils.addMenuItem(fileMenu, "Open (Virtual)...", null, new Runnable() { public void run() { new Thread() { @Override public void run() { openAcquisitionData(false); } }.start(); } }); GUIUtils.addMenuItem(fileMenu, "Open (RAM)...", null, new Runnable() { public void run() { new Thread() { @Override public void run() { openAcquisitionData(true); } }.start(); } }); fileMenu.addSeparator(); GUIUtils.addMenuItem(fileMenu, "Exit", null, new Runnable() { public void run() { closeSequence(false); } }); } private void initializeHelpMenu() { final JMenu helpMenu = GUIUtils.createMenuInMenuBar(menuBar_, "Help"); GUIUtils.addMenuItem(helpMenu, "User's Guide", null, new Runnable() { public void run() { try { ij.plugin.BrowserLauncher.openURL("http://micro-manager.org/wiki/Micro-Manager_User%27s_Guide"); } catch (IOException e1) { ReportingUtils.showError(e1); } } }); GUIUtils.addMenuItem(helpMenu, "Configuration Guide", null, new Runnable() { public void run() { try { ij.plugin.BrowserLauncher.openURL("http://micro-manager.org/wiki/Micro-Manager_Configuration_Guide"); } catch (IOException e1) { ReportingUtils.showError(e1); } } }); if (!systemPrefs_.getBoolean(RegistrationDlg.REGISTRATION, false)) { GUIUtils.addMenuItem(helpMenu, "Register your copy of Micro-Manager...", null, new Runnable() { public void run() { try { RegistrationDlg regDlg = new RegistrationDlg(systemPrefs_); regDlg.setVisible(true); } catch (Exception e1) { ReportingUtils.showError(e1); } } }); } GUIUtils.addMenuItem(helpMenu, "Report Problem...", null, new Runnable() { @Override public void run() { org.micromanager.diagnostics.gui.ProblemReportController.start(core_, options_); } }); GUIUtils.addMenuItem(helpMenu, "About Micromanager", null, new Runnable() { public void run() { MMAboutDlg dlg = new MMAboutDlg(); String versionInfo = "MM Studio version: " + MMVersion.VERSION_STRING; versionInfo += "\n" + core_.getVersionInfo(); versionInfo += "\n" + core_.getAPIVersionInfo(); versionInfo += "\nUser: " + core_.getUserId(); versionInfo += "\nHost: " + core_.getHostName(); dlg.setVersionInfo(versionInfo); dlg.setVisible(true); } }); menuBar_.validate(); } private void initializeToolsMenu() { // Tools menu final JMenu toolsMenu = GUIUtils.createMenuInMenuBar(menuBar_, "Tools"); GUIUtils.addMenuItem(toolsMenu, "Refresh GUI", "Refresh all GUI controls directly from the hardware", new Runnable() { public void run() { core_.updateSystemStateCache(); updateGUI(true); } }, "arrow_refresh.png"); GUIUtils.addMenuItem(toolsMenu, "Rebuild GUI", "Regenerate Micro-Manager user interface", new Runnable() { public void run() { initializeGUI(); core_.updateSystemStateCache(); } }); toolsMenu.addSeparator(); GUIUtils.addMenuItem(toolsMenu, "Image Pipeline...", "Display the image processing pipeline", new Runnable() { public void run() { pipelinePanel_.setVisible(true); } }); GUIUtils.addMenuItem(toolsMenu, "Script Panel...", "Open Micro-Manager script editor window", new Runnable() { public void run() { scriptPanel_.setVisible(true); } }); GUIUtils.addMenuItem(toolsMenu, "Shortcuts...", "Create keyboard shortcuts to activate image acquisition, mark positions, or run custom scripts", new Runnable() { public void run() { HotKeysDialog hk = new HotKeysDialog(guiColors_.background.get((options_.displayBackground_))); //hk.setBackground(guiColors_.background.get((options_.displayBackground_))); } }); GUIUtils.addMenuItem(toolsMenu, "Device/Property Browser...", "Open new window to view and edit property values in current configuration", new Runnable() { public void run() { createPropertyEditor(); } }); toolsMenu.addSeparator(); GUIUtils.addMenuItem(toolsMenu, "XY List...", "Open position list manager window", new Runnable() { public void run() { showXYPositionList(); } }, "application_view_list.png"); GUIUtils.addMenuItem(toolsMenu, "Multi-Dimensional Acquisition...", "Open multi-dimensional acquisition setup window", new Runnable() { public void run() { openAcqControlDialog(); } }, "film.png"); centerAndDragMenuItem_ = GUIUtils.addCheckBoxMenuItem(toolsMenu, "Mouse Moves Stage (use Hand Tool)", "When enabled, double clicking or dragging in the snap/live\n" + "window moves the XY-stage. Requires the hand tool.", new Runnable() { public void run() { updateCenterAndDragListener(); IJ.setTool(Toolbar.HAND); mainPrefs_.putBoolean(MOUSE_MOVES_STAGE, centerAndDragMenuItem_.isSelected()); } }, mainPrefs_.getBoolean(MOUSE_MOVES_STAGE, false)); GUIUtils.addMenuItem(toolsMenu, "Pixel Size Calibration...", "Define size calibrations specific to each objective lens. " + "When the objective in use has a calibration defined, " + "micromanager will automatically use it when " + "calculating metadata", new Runnable() { public void run() { createCalibrationListDlg(); } }); /* GUIUtils.addMenuItem(toolsMenu, "Image Processor Manager", "Control the order in which Image Processor plugins" + "are applied to incoming images.", new Runnable() { public void run() { processorStackManager_.show(); } }); */ toolsMenu.addSeparator(); GUIUtils.addMenuItem(toolsMenu, "Hardware Configuration Wizard...", "Open wizard to create new hardware configuration", new Runnable() { public void run() { runHardwareWizard(); } }); GUIUtils.addMenuItem(toolsMenu, "Load Hardware Configuration...", "Un-initialize current configuration and initialize new one", new Runnable() { public void run() { loadConfiguration(); initializeGUI(); } }); GUIUtils.addMenuItem(toolsMenu, "Reload Hardware Configuration", "Shutdown current configuration and initialize most recently loaded configuration", new Runnable() { public void run() { loadSystemConfiguration(); initializeGUI(); } }); for (int i=0; i<5; i++) { JMenuItem configItem = new JMenuItem(); configItem.setText(Integer.toString(i)); switchConfigurationMenu_.add(configItem); } switchConfigurationMenu_.setText("Switch Hardware Configuration"); toolsMenu.add(switchConfigurationMenu_); switchConfigurationMenu_.setToolTipText("Switch between recently used configurations"); GUIUtils.addMenuItem(toolsMenu, "Save Configuration Settings as...", "Save current configuration settings as new configuration file", new Runnable() { public void run() { saveConfigPresets(); updateChannelCombos(); } }); toolsMenu.addSeparator(); final MMStudioMainFrame thisInstance = this; GUIUtils.addMenuItem(toolsMenu, "Options...", "Set a variety of Micro-Manager configuration options", new Runnable() { public void run() { final int oldBufsize = options_.circularBufferSizeMB_; OptionsDlg dlg = new OptionsDlg(options_, core_, mainPrefs_, thisInstance); dlg.setVisible(true); // adjust memory footprint if necessary if (oldBufsize != options_.circularBufferSizeMB_) { try { core_.setCircularBufferMemoryFootprint(options_.circularBufferSizeMB_); } catch (Exception exc) { ReportingUtils.showError(exc); } } } }); } private void showRegistrationDialogMaybe() { // show registration dialog if not already registered // first check user preferences (for legacy compatibility reasons) boolean userReg = mainPrefs_.getBoolean(RegistrationDlg.REGISTRATION, false) || mainPrefs_.getBoolean(RegistrationDlg.REGISTRATION_NEVER, false); if (!userReg) { boolean systemReg = systemPrefs_.getBoolean( RegistrationDlg.REGISTRATION, false) || systemPrefs_.getBoolean(RegistrationDlg.REGISTRATION_NEVER, false); if (!systemReg) { // prompt for registration info RegistrationDlg dlg = new RegistrationDlg(systemPrefs_); dlg.setVisible(true); } } } private void updateSwitchConfigurationMenu() { switchConfigurationMenu_.removeAll(); for (final String configFile : MRUConfigFiles_) { if (!configFile.equals(sysConfigFile_)) { GUIUtils.addMenuItem(switchConfigurationMenu_, configFile, null, new Runnable() { public void run() { sysConfigFile_ = configFile; loadSystemConfiguration(); mainPrefs_.put(SYSTEM_CONFIG_FILE, sysConfigFile_); } }); } } } public final void addLiveModeListener (LiveModeListener listener) { if (liveModeListeners_.contains(listener)) { return; } liveModeListeners_.add(listener); } public void removeLiveModeListener(LiveModeListener listener) { liveModeListeners_.remove(listener); } public void callLiveModeListeners(boolean enable) { for (LiveModeListener listener : liveModeListeners_) { listener.liveModeEnabled(enable); } } /** * Part of ScriptInterface * Manipulate acquisition so that it looks like a burst */ public void runBurstAcquisition() throws MMScriptException { double interval = engine_.getFrameIntervalMs(); int nr = engine_.getNumFrames(); boolean doZStack = engine_.isZSliceSettingEnabled(); boolean doChannels = engine_.isChannelsSettingEnabled(); engine_.enableZSliceSetting(false); engine_.setFrames(nr, 0); engine_.enableChannelsSetting(false); try { engine_.acquire(); } catch (MMException e) { throw new MMScriptException(e); } engine_.setFrames(nr, interval); engine_.enableZSliceSetting(doZStack); engine_.enableChannelsSetting(doChannels); } public void runBurstAcquisition(int nr) throws MMScriptException { int originalNr = engine_.getNumFrames(); double interval = engine_.getFrameIntervalMs(); engine_.setFrames(nr, 0); this.runBurstAcquisition(); engine_.setFrames(originalNr, interval); } public void runBurstAcquisition(int nr, String name, String root) throws MMScriptException { String originalRoot = engine_.getRootName(); engine_.setDirName(name); engine_.setRootName(root); this.runBurstAcquisition(nr); engine_.setRootName(originalRoot); } /** * @Deprecated * @throws MMScriptException */ public void startBurstAcquisition() throws MMScriptException { runAcquisition(); } public boolean isBurstAcquisitionRunning() throws MMScriptException { if (engine_ == null) return false; return engine_.isAcquisitionRunning(); } private void startLoadingPipelineClass() { Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader()); acquisitionEngine2010LoadingThread_ = new Thread("Pipeline Class loading thread") { @Override public void run() { try { acquisitionEngine2010Class_ = Class.forName("org.micromanager.AcquisitionEngine2010"); } catch (Exception ex) { ReportingUtils.logError(ex); acquisitionEngine2010Class_ = null; } } }; acquisitionEngine2010LoadingThread_.start(); } /** * Shows images as they appear in the default display window. Uses * the default processor stack to process images as they arrive on * the rawImageQueue. */ public void runDisplayThread(BlockingQueue<TaggedImage> rawImageQueue, final DisplayImageRoutine displayImageRoutine) { final BlockingQueue<TaggedImage> processedImageQueue = ProcessorStack.run(rawImageQueue, getAcquisitionEngine().getImageProcessors()); new Thread("Display thread") { @Override public void run() { try { TaggedImage image; do { image = processedImageQueue.take(); if (image != TaggedImageQueue.POISON) { displayImageRoutine.show(image); } } while (image != TaggedImageQueue.POISON); } catch (InterruptedException ex) { ReportingUtils.logError(ex); } } }.start(); } private static JLabel createLabel(String text, boolean big, JPanel parentPanel, int west, int north, int east, int south) { final JLabel label = new JLabel(); label.setFont(new Font("Arial", big ? Font.BOLD : Font.PLAIN, big ? 11 : 10)); label.setText(text); GUIUtils.addWithEdges(parentPanel, label, west, north, east, south); return label; } public interface DisplayImageRoutine { public void show(TaggedImage image); } /** * used to store contrast settings to be later used for initialization of contrast of new windows. * Shouldn't be called by loaded data sets, only * ones that have been acquired */ public void saveChannelHistogramSettings(String channelGroup, String channel, boolean mda, HistogramSettings settings) { String type = mda ? "MDA_" : "SnapLive_"; if (options_.syncExposureMainAndMDA_) { type = ""; //only one group of contrast settings } contrastPrefs_.putInt("ContrastMin_" + channelGroup + "_" + type + channel, settings.min_); contrastPrefs_.putInt("ContrastMax_" + channelGroup + "_" + type + channel, settings.max_); contrastPrefs_.putDouble("ContrastGamma_" + channelGroup + "_" + type + channel, settings.gamma_); contrastPrefs_.putInt("ContrastHistMax_" + channelGroup + "_" + type + channel, settings.histMax_); contrastPrefs_.putInt("ContrastHistDisplayMode_" + channelGroup + "_" + type + channel, settings.displayMode_); } public HistogramSettings loadStoredChannelHisotgramSettings(String channelGroup, String channel, boolean mda) { String type = mda ? "MDA_" : "SnapLive_"; if (options_.syncExposureMainAndMDA_) { type = ""; //only one group of contrast settings } return new HistogramSettings( contrastPrefs_.getInt("ContrastMin_" + channelGroup + "_" + type + channel,0), contrastPrefs_.getInt("ContrastMax_" + channelGroup + "_" + type + channel, 65536), contrastPrefs_.getDouble("ContrastGamma_" + channelGroup + "_" + type + channel, 1.0), contrastPrefs_.getInt("ContrastHistMax_" + channelGroup + "_" + type + channel, -1), contrastPrefs_.getInt("ContrastHistDisplayMode_" + channelGroup + "_" + type + channel, 1) ); } private void setExposure() { try { if (!isLiveModeOn()) { core_.setExposure(NumberUtils.displayStringToDouble( textFieldExp_.getText())); } else { liveModeTimer_.stop(); core_.setExposure(NumberUtils.displayStringToDouble( textFieldExp_.getText())); try { liveModeTimer_.begin(); } catch (Exception e) { ReportingUtils.showError("Couldn't restart live mode"); liveModeTimer_.stop(); } } // Display the new exposure time double exposure = core_.getExposure(); textFieldExp_.setText(NumberUtils.doubleToDisplayString(exposure)); // update current channel in MDA window with this exposure String channelGroup = core_.getChannelGroup(); String channel = core_.getCurrentConfigFromCache(channelGroup); if (!channel.equals("") ) { exposurePrefs_.putDouble("Exposure_" + channelGroup + "_" + channel, exposure); if (options_.syncExposureMainAndMDA_) { getAcqDlg().setChannelExposureTime(channelGroup, channel, exposure); } } } catch (Exception exp) { // Do nothing. } } public double getPreferredWindowMag() { return options_.windowMag_; } public boolean getMetadataFileWithMultipageTiff() { return options_.mpTiffMetadataFile_; } public boolean getSeparateFilesForPositionsMPTiff() { return options_.mpTiffSeparateFilesForPositions_; } @Override public boolean getHideMDADisplayOption() { return options_.hideMDADisplay_; } private void updateTitle() { this.setTitle(MICRO_MANAGER_TITLE + " " + MMVersion.VERSION_STRING + " - " + sysConfigFile_); } public void updateLineProfile() { if (WindowManager.getCurrentWindow() == null || profileWin_ == null || !profileWin_.isShowing()) { return; } calculateLineProfileData(WindowManager.getCurrentImage()); profileWin_.setData(lineProfileData_); } private void openLineProfileWindow() { if (WindowManager.getCurrentWindow() == null || WindowManager.getCurrentWindow().isClosed()) { return; } calculateLineProfileData(WindowManager.getCurrentImage()); if (lineProfileData_ == null) { return; } profileWin_ = new GraphFrame(); profileWin_.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE); profileWin_.setData(lineProfileData_); profileWin_.setAutoScale(); profileWin_.setTitle("Live line profile"); profileWin_.setBackground(guiColors_.background.get((options_.displayBackground_))); addMMBackgroundListener(profileWin_); profileWin_.setVisible(true); } @Override public Rectangle getROI() throws MMScriptException { // ROI values are given as x,y,w,h in individual one-member arrays (pointers in C++): int[][] a = new int[4][1]; try { core_.getROI(a[0], a[1], a[2], a[3]); } catch (Exception e) { throw new MMScriptException(e.getMessage()); } // Return as a single array with x,y,w,h: return new Rectangle(a[0][0], a[1][0], a[2][0], a[3][0]); } private void calculateLineProfileData(ImagePlus imp) { // generate line profile Roi roi = imp.getRoi(); if (roi == null || !roi.isLine()) { // if there is no line ROI, create one Rectangle r = imp.getProcessor().getRoi(); int iWidth = r.width; int iHeight = r.height; int iXROI = r.x; int iYROI = r.y; if (roi == null) { iXROI += iWidth / 2; iYROI += iHeight / 2; } roi = new Line(iXROI - iWidth / 4, iYROI - iWidth / 4, iXROI + iWidth / 4, iYROI + iHeight / 4); imp.setRoi(roi); roi = imp.getRoi(); } ImageProcessor ip = imp.getProcessor(); ip.setInterpolate(true); Line line = (Line) roi; if (lineProfileData_ == null) { lineProfileData_ = new GraphData(); } lineProfileData_.setData(line.getPixels()); } private void setROI() { ImagePlus curImage = WindowManager.getCurrentImage(); if (curImage == null) { return; } Roi roi = curImage.getRoi(); try { if (roi == null) { // if there is no ROI, create one Rectangle r = curImage.getProcessor().getRoi(); int iWidth = r.width; int iHeight = r.height; int iXROI = r.x; int iYROI = r.y; if (roi == null) { iWidth /= 2; iHeight /= 2; iXROI += iWidth / 2; iYROI += iHeight / 2; } curImage.setRoi(iXROI, iYROI, iWidth, iHeight); roi = curImage.getRoi(); } if (roi.getType() != Roi.RECTANGLE) { handleError("ROI must be a rectangle.\nUse the ImageJ rectangle tool to draw the ROI."); return; } Rectangle r = roi.getBounds(); // if we already had an ROI defined, correct for the offsets Rectangle cameraR = getROI(); r.x += cameraR.x; r.y += cameraR.y; // Stop (and restart) live mode if it is running setROI(r); } catch (Exception e) { ReportingUtils.showError(e); } } private void clearROI() { try { boolean liveRunning = false; if (isLiveModeOn()) { liveRunning = true; enableLiveMode(false); } core_.clearROI(); updateStaticInfo(); if (liveRunning) { enableLiveMode(true); } } catch (Exception e) { ReportingUtils.showError(e); } } /** * Returns instance of the core uManager object; */ @Override public CMMCore getMMCore() { return core_; } /** * Returns singleton instance of MMStudioMainFrame */ public static MMStudioMainFrame getInstance() { return gui_; } public MetadataPanel getMetadataPanel() { return metadataPanel_; } public final void setExitStrategy(boolean closeOnExit) { if (closeOnExit) { setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); } else { setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE); } } @Override public void saveConfigPresets() { MicroscopeModel model = new MicroscopeModel(); try { model.loadFromFile(sysConfigFile_); model.createSetupConfigsFromHardware(core_); model.createResolutionsFromHardware(core_); File f = FileDialogs.save(this, "Save the configuration file", MM_CONFIG_FILE); if (f != null) { model.saveToFile(f.getAbsolutePath()); sysConfigFile_ = f.getAbsolutePath(); mainPrefs_.put(SYSTEM_CONFIG_FILE, sysConfigFile_); configChanged_ = false; setConfigSaveButtonStatus(configChanged_); updateTitle(); } } catch (MMConfigFileException e) { ReportingUtils.showError(e); } } protected void setConfigSaveButtonStatus(boolean changed) { saveConfigButton_.setEnabled(changed); } public String getAcqDirectory() { return openAcqDirectory_; } /** * Get currently used configuration file * @return - Path to currently used configuration file */ public String getSysConfigFile() { return sysConfigFile_; } public void setAcqDirectory(String dir) { openAcqDirectory_ = dir; } /** * Open an existing acquisition directory and build viewer window. * */ public void openAcquisitionData(boolean inRAM) { // choose the directory // -------------------- File f = FileDialogs.openDir(this, "Please select an image data set", MM_DATA_SET); if (f != null) { if (f.isDirectory()) { openAcqDirectory_ = f.getAbsolutePath(); } else { openAcqDirectory_ = f.getParent(); } String acq = null; try { acq = openAcquisitionData(openAcqDirectory_, inRAM); } catch (MMScriptException ex) { ReportingUtils.showError(ex); } finally { try { acqMgr_.closeAcquisition(acq); } catch (MMScriptException ex) { ReportingUtils.logError(ex); } } } } @Override public String openAcquisitionData(String dir, boolean inRAM, boolean show) throws MMScriptException { String rootDir = new File(dir).getAbsolutePath(); String name = new File(dir).getName(); rootDir = rootDir.substring(0, rootDir.length() - (name.length() + 1)); name = acqMgr_.getUniqueAcquisitionName(name); acqMgr_.openAcquisition(name, rootDir, show, !inRAM, true); try { getAcquisition(name).initialize(); } catch (MMScriptException mex) { acqMgr_.closeAcquisition(name); throw (mex); } return name; } /** * Opens an existing data set. Shows the acquisition in a window. * @return The acquisition object. */ @Override public String openAcquisitionData(String dir, boolean inRam) throws MMScriptException { return openAcquisitionData(dir, inRam, true); } protected void zoomOut() { ImageWindow curWin = WindowManager.getCurrentWindow(); if (curWin != null) { ImageCanvas canvas = curWin.getCanvas(); Rectangle r = canvas.getBounds(); canvas.zoomOut(r.width / 2, r.height / 2); VirtualAcquisitionDisplay vad = VirtualAcquisitionDisplay.getDisplay(curWin.getImagePlus()); if (vad != null) { vad.storeWindowSizeAfterZoom(curWin); vad.updateWindowTitleAndStatus(); } } } protected void zoomIn() { ImageWindow curWin = WindowManager.getCurrentWindow(); if (curWin != null) { ImageCanvas canvas = curWin.getCanvas(); Rectangle r = canvas.getBounds(); canvas.zoomIn(r.width / 2, r.height / 2); VirtualAcquisitionDisplay vad = VirtualAcquisitionDisplay.getDisplay(curWin.getImagePlus()); if (vad != null) { vad.storeWindowSizeAfterZoom(curWin); vad.updateWindowTitleAndStatus(); } } } protected void changeBinning() { try { boolean liveRunning = false; if (isLiveModeOn() ) { liveRunning = true; enableLiveMode(false); } if (isCameraAvailable()) { Object item = comboBinning_.getSelectedItem(); if (item != null) { core_.setProperty(cameraLabel_, MMCoreJ.getG_Keyword_Binning(), item.toString()); } } updateStaticInfo(); if (liveRunning) { enableLiveMode(true); } } catch (Exception e) { ReportingUtils.showError(e); } } private void createPropertyEditor() { if (propertyBrowser_ != null) { propertyBrowser_.dispose(); } propertyBrowser_ = new PropertyEditor(); propertyBrowser_.setGui(this); propertyBrowser_.setVisible(true); propertyBrowser_.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE); propertyBrowser_.setCore(core_); } private void createCalibrationListDlg() { if (calibrationListDlg_ != null) { calibrationListDlg_.dispose(); } calibrationListDlg_ = new CalibrationListDlg(core_); calibrationListDlg_.setVisible(true); calibrationListDlg_.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE); calibrationListDlg_.setParentGUI(this); } public CalibrationListDlg getCalibrationListDlg() { if (calibrationListDlg_ == null) { createCalibrationListDlg(); } return calibrationListDlg_; } private void createScriptPanel() { if (scriptPanel_ == null) { scriptPanel_ = new ScriptPanel(core_, options_, this); scriptPanel_.insertScriptingObject(SCRIPT_CORE_OBJECT, core_); scriptPanel_.insertScriptingObject(SCRIPT_ACQENG_OBJECT, engine_); scriptPanel_.setParentGUI(this); scriptPanel_.setBackground(guiColors_.background.get((options_.displayBackground_))); addMMBackgroundListener(scriptPanel_); } } private void createPipelinePanel() { if (pipelinePanel_ == null) { pipelinePanel_ = new PipelinePanel(this, engine_); pipelinePanel_.setBackground(guiColors_.background.get((options_.displayBackground_))); addMMBackgroundListener(pipelinePanel_); } } /** * Updates Status line in main window from cached values */ private void updateStaticInfoFromCache() { String dimText = "Image info (from camera): " + staticInfo_.width_ + " X " + staticInfo_.height_ + " X " + staticInfo_.bytesPerPixel_ + ", Intensity range: " + staticInfo_.imageBitDepth_ + " bits"; dimText += ", " + TextUtils.FMT0.format(staticInfo_.pixSizeUm_ * 1000) + "nm/pix"; if (zStageLabel_.length() > 0) { dimText += ", Z=" + TextUtils.FMT2.format(staticInfo_.zPos_) + "um"; } if (xyStageLabel_.length() > 0) { dimText += ", XY=(" + TextUtils.FMT2.format(staticInfo_.x_) + "," + TextUtils.FMT2.format(staticInfo_.y_) + ")um"; } labelImageDimensions_.setText(dimText); } public void updateXYPos(double x, double y) { staticInfo_.x_ = x; staticInfo_.y_ = y; updateStaticInfoFromCache(); } public void updateZPos(double z) { staticInfo_.zPos_ = z; updateStaticInfoFromCache(); } public void updateXYPosRelative(double x, double y) { staticInfo_.x_ += x; staticInfo_.y_ += y; updateStaticInfoFromCache(); } public void updateZPosRelative(double z) { staticInfo_.zPos_ += z; updateStaticInfoFromCache(); } public void updateXYStagePosition(){ double x[] = new double[1]; double y[] = new double[1]; try { if (xyStageLabel_.length() > 0) core_.getXYPosition(xyStageLabel_, x, y); } catch (Exception e) { ReportingUtils.showError(e); } staticInfo_.x_ = x[0]; staticInfo_.y_ = y[0]; updateStaticInfoFromCache(); } private void updatePixSizeUm (double pixSizeUm) { staticInfo_.pixSizeUm_ = pixSizeUm; updateStaticInfoFromCache(); } private void updateStaticInfo() { double zPos = 0.0; double x[] = new double[1]; double y[] = new double[1]; try { if (zStageLabel_.length() > 0) { zPos = core_.getPosition(zStageLabel_); } if (xyStageLabel_.length() > 0) { core_.getXYPosition(xyStageLabel_, x, y); } } catch (Exception e) { handleException(e); } staticInfo_.width_ = core_.getImageWidth(); staticInfo_.height_ = core_.getImageHeight(); staticInfo_.bytesPerPixel_ = core_.getBytesPerPixel(); staticInfo_.imageBitDepth_ = core_.getImageBitDepth(); staticInfo_.pixSizeUm_ = core_.getPixelSizeUm(); staticInfo_.zPos_ = zPos; staticInfo_.x_ = x[0]; staticInfo_.y_ = y[0]; updateStaticInfoFromCache(); } public void toggleShutter() { try { if (!toggleShutterButton_.isEnabled()) return; toggleShutterButton_.requestFocusInWindow(); if (toggleShutterButton_.getText().equals("Open")) { setShutterButton(true); core_.setShutterOpen(true); } else { core_.setShutterOpen(false); setShutterButton(false); } } catch (Exception e1) { ReportingUtils.showError(e1); } } private void updateCenterAndDragListener() { if (centerAndDragMenuItem_.isSelected()) { centerAndDragListener_.start(); } else { centerAndDragListener_.stop(); } } private void setShutterButton(boolean state) { if (state) { toggleShutterButton_.setText("Close"); } else { toggleShutterButton_.setText("Open"); } } private void checkPosListDlg() { if (posListDlg_ == null) { posListDlg_ = new PositionListDlg(core_, this, posList_, acqControlWin_,options_); GUIUtils.recallPosition(posListDlg_); posListDlg_.setBackground(gui_.getBackgroundColor()); gui_.addMMBackgroundListener(posListDlg_); posListDlg_.addListeners(); } } // ////////////////////////////////////////////////////////////////////////// // public interface available for scripting access // ////////////////////////////////////////////////////////////////////////// @Override public void snapSingleImage() { doSnap(); } public Object getPixels() { ImagePlus ip = WindowManager.getCurrentImage(); if (ip != null) { return ip.getProcessor().getPixels(); } return null; } public void setPixels(Object obj) { ImagePlus ip = WindowManager.getCurrentImage(); if (ip == null) { return; } ip.getProcessor().setPixels(obj); } public int getImageHeight() { ImagePlus ip = WindowManager.getCurrentImage(); if (ip != null) return ip.getHeight(); return 0; } public int getImageWidth() { ImagePlus ip = WindowManager.getCurrentImage(); if (ip != null) return ip.getWidth(); return 0; } public int getImageDepth() { ImagePlus ip = WindowManager.getCurrentImage(); if (ip != null) return ip.getBitDepth(); return 0; } public ImageProcessor getImageProcessor() { ImagePlus ip = WindowManager.getCurrentImage(); if (ip == null) return null; return ip.getProcessor(); } private boolean isCameraAvailable() { return cameraLabel_.length() > 0; } /** * Part of ScriptInterface API * Opens the XYPositionList when it is not opened * Adds the current position to the list (same as pressing the "Mark" button) */ @Override public void markCurrentPosition() { if (posListDlg_ == null) { showXYPositionList(); } if (posListDlg_ != null) { posListDlg_.markPosition(); } } /** * Implements ScriptInterface */ @Override @Deprecated public AcqControlDlg getAcqDlg() { return acqControlWin_; } /** * Implements ScriptInterface */ @Override @Deprecated public PositionListDlg getXYPosListDlg() { checkPosListDlg(); return posListDlg_; } /** * Implements ScriptInterface */ @Override public boolean isAcquisitionRunning() { if (engine_ == null) return false; return engine_.isAcquisitionRunning(); } /** * Implements ScriptInterface */ @Override public boolean versionLessThan(String version) throws MMScriptException { try { String[] v = MMVersion.VERSION_STRING.split(" ", 2); String[] m = v[0].split("\\.", 3); String[] v2 = version.split(" ", 2); String[] m2 = v2[0].split("\\.", 3); for (int i=0; i < 3; i++) { if (Integer.parseInt(m[i]) < Integer.parseInt(m2[i])) { ReportingUtils.showError("This code needs Micro-Manager version " + version + " or greater"); return true; } if (Integer.parseInt(m[i]) > Integer.parseInt(m2[i])) { return false; } } if (v2.length < 2 || v2[1].equals("") ) return false; if (v.length < 2 ) { ReportingUtils.showError("This code needs Micro-Manager version " + version + " or greater"); return true; } if (Integer.parseInt(v[1]) < Integer.parseInt(v2[1])) { ReportingUtils.showError("This code needs Micro-Manager version " + version + " or greater"); return false; } return true; } catch (Exception ex) { throw new MMScriptException ("Format of version String should be \"a.b.c\""); } } @Override public boolean isLiveModeOn() { return liveModeTimer_ != null && liveModeTimer_.isRunning(); } public LiveModeTimer getLiveModeTimer() { if (liveModeTimer_ == null) { liveModeTimer_ = new LiveModeTimer(); } return liveModeTimer_; } public void updateButtonsForLiveMode(boolean enable) { autoShutterCheckBox_.setEnabled(!enable); if (core_.getAutoShutter()) { toggleShutterButton_.setText(enable ? "Close" : "Open" ); } snapButton_.setEnabled(!enable); //toAlbumButton_.setEnabled(!enable); liveButton_.setIcon(enable ? SwingResourceManager.getIcon(MMStudioMainFrame.class, "/org/micromanager/icons/cancel.png") : SwingResourceManager.getIcon(MMStudioMainFrame.class, "/org/micromanager/icons/camera_go.png")); liveButton_.setSelected(false); liveButton_.setText(enable ? "Stop Live" : "Live"); } public boolean getLiveMode() { return isLiveModeOn(); } public boolean updateImage() { try { if (isLiveModeOn()) { enableLiveMode(false); return true; // nothing to do, just show the last image } if (WindowManager.getCurrentWindow() == null) { return false; } ImagePlus ip = WindowManager.getCurrentImage(); core_.snapImage(); Object img = core_.getImage(); ip.getProcessor().setPixels(img); ip.updateAndRepaintWindow(); if (!isCurrentImageFormatSupported()) { return false; } updateLineProfile(); } catch (Exception e) { ReportingUtils.showError(e); return false; } return true; } public boolean displayImage(final Object pixels) { if (pixels instanceof TaggedImage) { return displayTaggedImage((TaggedImage) pixels, true); } else { return displayImage(pixels, true); } } public boolean displayImage(final Object pixels, boolean wait) { checkSimpleAcquisition(); try { int width = getAcquisition(SIMPLE_ACQ).getWidth(); int height = getAcquisition(SIMPLE_ACQ).getHeight(); int byteDepth = getAcquisition(SIMPLE_ACQ).getByteDepth(); TaggedImage ti = ImageUtils.makeTaggedImage(pixels, 0, 0, 0,0, width, height, byteDepth); simpleDisplay_.getImageCache().putImage(ti); simpleDisplay_.showImage(ti, wait); return true; } catch (Exception ex) { ReportingUtils.showError(ex); return false; } } public boolean displayImageWithStatusLine(Object pixels, String statusLine) { boolean ret = displayImage(pixels); simpleDisplay_.displayStatusLine(statusLine); return ret; } public void displayStatusLine(String statusLine) { ImagePlus ip = WindowManager.getCurrentImage(); if (!(ip.getWindow() instanceof VirtualAcquisitionDisplay.DisplayWindow)) { return; } VirtualAcquisitionDisplay.getDisplay(ip).displayStatusLine(statusLine); } private boolean isCurrentImageFormatSupported() { boolean ret = false; long channels = core_.getNumberOfComponents(); long bpp = core_.getBytesPerPixel(); if (channels > 1 && channels != 4 && bpp != 1) { handleError("Unsupported image format."); } else { ret = true; } return ret; } public void doSnap() { doSnap(false); } public void doSnap(final boolean album) { if (core_.getCameraDevice().length() == 0) { ReportingUtils.showError("No camera configured"); return; } BlockingQueue<TaggedImage> snapImageQueue = new LinkedBlockingQueue<TaggedImage>(); try { core_.snapImage(); long c = core_.getNumberOfCameraChannels(); runDisplayThread(snapImageQueue, new DisplayImageRoutine() { @Override public void show(final TaggedImage image) { if (album) { try { addToAlbum(image); } catch (MMScriptException ex) { ReportingUtils.showError(ex); } } else { displayImage(image); } } }); for (int i = 0; i < c; ++i) { TaggedImage img = core_.getTaggedImage(i); img.tags.put("Channels", c); snapImageQueue.put(img); } snapImageQueue.put(TaggedImageQueue.POISON); if (simpleDisplay_ != null) { ImagePlus imgp = simpleDisplay_.getImagePlus(); if (imgp != null) { ImageWindow win = imgp.getWindow(); if (win != null) { win.toFront(); } } } } catch (Exception ex) { ReportingUtils.showError(ex); } } /** * Is this function still needed? It does some magic with tags. I found * it to do harmful thing with tags when a Multi-Camera device is * present (that issue is now fixed). */ public void normalizeTags(TaggedImage ti) { if (ti != TaggedImageQueue.POISON) { int channel = 0; try { if (ti.tags.has("ChannelIndex")) { channel = MDUtils.getChannelIndex(ti.tags); } MDUtils.setChannelIndex(ti.tags, channel); MDUtils.setPositionIndex(ti.tags, 0); MDUtils.setSliceIndex(ti.tags, 0); MDUtils.setFrameIndex(ti.tags, 0); } catch (JSONException ex) { ReportingUtils.logError(ex); } } } private boolean displayTaggedImage(TaggedImage ti, boolean update) { try { checkSimpleAcquisition(ti); setCursor(new Cursor(Cursor.WAIT_CURSOR)); ti.tags.put("Summary", getAcquisition(SIMPLE_ACQ).getSummaryMetadata()); addStagePositionToTags(ti); addImage(SIMPLE_ACQ, ti, update, true); } catch (Exception ex) { ReportingUtils.logError(ex); return false; } if (update) { setCursor(new Cursor(Cursor.DEFAULT_CURSOR)); updateLineProfile(); } return true; } public void addStagePositionToTags(TaggedImage ti) throws JSONException { if (gui_.xyStageLabel_.length() > 0) { ti.tags.put("XPositionUm", gui_.staticInfo_.x_); ti.tags.put("YPositionUm", gui_.staticInfo_.y_); } if (gui_.zStageLabel_.length() > 0) { ti.tags.put("ZPositionUm", gui_.staticInfo_.zPos_); } } private void configureBinningCombo() throws Exception { if (cameraLabel_.length() > 0) { ActionListener[] listeners; // binning combo if (comboBinning_.getItemCount() > 0) { comboBinning_.removeAllItems(); } StrVector binSizes = core_.getAllowedPropertyValues( cameraLabel_, MMCoreJ.getG_Keyword_Binning()); listeners = comboBinning_.getActionListeners(); for (int i = 0; i < listeners.length; i++) { comboBinning_.removeActionListener(listeners[i]); } for (int i = 0; i < binSizes.size(); i++) { comboBinning_.addItem(binSizes.get(i)); } comboBinning_.setMaximumRowCount((int) binSizes.size()); if (binSizes.isEmpty()) { comboBinning_.setEditable(true); } else { comboBinning_.setEditable(false); } for (int i = 0; i < listeners.length; i++) { comboBinning_.addActionListener(listeners[i]); } } } public void initializeGUI() { try { // establish device roles cameraLabel_ = core_.getCameraDevice(); shutterLabel_ = core_.getShutterDevice(); zStageLabel_ = core_.getFocusDevice(); xyStageLabel_ = core_.getXYStageDevice(); engine_.setZStageDevice(zStageLabel_); configureBinningCombo(); // active shutter combo try { shutters_ = core_.getLoadedDevicesOfType(DeviceType.ShutterDevice); } catch (Exception e) { ReportingUtils.logError(e); } if (shutters_ != null) { String items[] = new String[(int) shutters_.size()]; for (int i = 0; i < shutters_.size(); i++) { items[i] = shutters_.get(i); } GUIUtils.replaceComboContents(shutterComboBox_, items); String activeShutter = core_.getShutterDevice(); if (activeShutter != null) { shutterComboBox_.setSelectedItem(activeShutter); } else { shutterComboBox_.setSelectedItem(""); } } // Autofocus autofocusConfigureButton_.setEnabled(afMgr_.getDevice() != null); autofocusNowButton_.setEnabled(afMgr_.getDevice() != null); // Rebuild stage list in XY PositinList if (posListDlg_ != null) { posListDlg_.rebuildAxisList(); } updateGUI(true); } catch (Exception e) { ReportingUtils.showError(e); } } /** * Adds plugin_ items to the plugins menu * Adds submenus (currently only 1 level deep) * @param plugin_ - plugin_ to be added to the menu */ public void addPluginToMenu(final PluginLoader.PluginItem plugin) { List<String> path = plugin.getMenuPath(); if (path.size() == 1) { GUIUtils.addMenuItem(pluginMenu_, plugin.getMenuItem(), plugin.getTooltip(), new Runnable() { public void run() { ReportingUtils.logMessage("Plugin command: " + plugin.getMenuItem()); MMStudioMainFrame localFrame = MMStudioMainFrame.this; plugin.instantiate(); switch (plugin.getPluginType()) { case PLUGIN_STANDARD: // Standard plugin; create its UI. ((MMPlugin) plugin.getPlugin()).show(); break; case PLUGIN_PROCESSOR: // Processor plugin; check for existing processor of // this type and show its UI if applicable; otherwise // create a new one. MMProcessorPlugin procPlugin = (MMProcessorPlugin) plugin.getPlugin(); String procName = PluginLoader.getNameForPluginClass(procPlugin.getClass()); DataProcessor<TaggedImage> pipelineProcessor = localFrame.engine_.getProcessorRegisteredAs(procName); if (pipelineProcessor == null) { // No extant processor of this type; make a new one, // which automatically adds it to the pipeline. pipelineProcessor = localFrame.engine_.makeProcessor(procName, localFrame); } if (pipelineProcessor != null) { // Show the GUI for this processor. It could be null // if making the processor, above, failed. pipelineProcessor.makeConfigurationGUI(); } break; default: // Unrecognized plugin type; just skip it. ReportingUtils.logError("Unrecognized plugin type " + plugin.getPluginType()); } } }); } if (path.size() == 2) { if (pluginSubMenus_ == null) { pluginSubMenus_ = new HashMap<String, JMenu>(); } String groupName = path.get(0); JMenu submenu = pluginSubMenus_.get(groupName); if (submenu == null) { submenu = new JMenu(groupName); pluginSubMenus_.put(groupName, submenu); submenu.validate(); pluginMenu_.add(submenu); } GUIUtils.addMenuItem(submenu, plugin.getMenuItem(), plugin.getTooltip(), new Runnable() { public void run() { ReportingUtils.logMessage("Plugin command: " + plugin.getMenuItem()); plugin.instantiate(); ((MMPlugin) plugin.getPlugin()).show(); } }); } pluginMenu_.validate(); menuBar_.validate(); } public void updateGUI(boolean updateConfigPadStructure) { updateGUI(updateConfigPadStructure, false); } public void updateGUI(boolean updateConfigPadStructure, boolean fromCache) { try { // establish device roles cameraLabel_ = core_.getCameraDevice(); shutterLabel_ = core_.getShutterDevice(); zStageLabel_ = core_.getFocusDevice(); xyStageLabel_ = core_.getXYStageDevice(); afMgr_.refresh(); // camera settings if (isCameraAvailable()) { double exp = core_.getExposure(); textFieldExp_.setText(NumberUtils.doubleToDisplayString(exp)); configureBinningCombo(); String binSize; if (fromCache) { binSize = core_.getPropertyFromCache(cameraLabel_, MMCoreJ.getG_Keyword_Binning()); } else { binSize = core_.getProperty(cameraLabel_, MMCoreJ.getG_Keyword_Binning()); } GUIUtils.setComboSelection(comboBinning_, binSize); } if (liveModeTimer_ == null || !liveModeTimer_.isRunning()) { autoShutterCheckBox_.setSelected(core_.getAutoShutter()); boolean shutterOpen = core_.getShutterOpen(); setShutterButton(shutterOpen); if (autoShutterCheckBox_.isSelected()) { toggleShutterButton_.setEnabled(false); } else { toggleShutterButton_.setEnabled(true); } } // active shutter combo if (shutters_ != null) { String activeShutter = core_.getShutterDevice(); if (activeShutter != null) { shutterComboBox_.setSelectedItem(activeShutter); } else { shutterComboBox_.setSelectedItem(""); } } // state devices if (updateConfigPadStructure && (configPad_ != null)) { configPad_.refreshStructure(fromCache); // Needed to update read-only properties. May slow things down... if (!fromCache) core_.updateSystemStateCache(); } // update Channel menus in Multi-dimensional acquisition dialog updateChannelCombos(); // update list of pixel sizes in pixel size configuration window if (calibrationListDlg_ != null) { calibrationListDlg_.refreshCalibrations(); } if (propertyBrowser_ != null) { propertyBrowser_.refresh(); } } catch (Exception e) { ReportingUtils.logError(e); } updateStaticInfo(); updateTitle(); } //TODO: Deprecated @Override public boolean okToAcquire() { return !isLiveModeOn(); } //TODO: Deprecated @Override public void stopAllActivity() { if (this.acquisitionEngine2010_ != null) { this.acquisitionEngine2010_.stop(); } enableLiveMode(false); } /** * Cleans up resources while shutting down * * @param calledByImageJ * @return flag indicating success. Shut down should abort when flag is false */ private boolean cleanupOnClose(boolean calledByImageJ) { // Save config presets if they were changed. if (configChanged_) { Object[] options = {"Yes", "No"}; int n = JOptionPane.showOptionDialog(null, "Save Changed Configuration?", "Micro-Manager", JOptionPane.YES_NO_OPTION, JOptionPane.QUESTION_MESSAGE, null, options, options[0]); if (n == JOptionPane.YES_OPTION) { saveConfigPresets(); // if the configChanged_ flag did not become false, the user // must have cancelled the configuration saving and we should cancel // quitting as well if (configChanged_) { return false; } } } if (liveModeTimer_ != null) liveModeTimer_.stop(); // check needed to avoid deadlock if (!calledByImageJ) { if (!WindowManager.closeAllWindows()) { core_.logMessage("Failed to close some windows"); } } if (profileWin_ != null) { removeMMBackgroundListener(profileWin_); profileWin_.dispose(); } if (scriptPanel_ != null) { removeMMBackgroundListener(scriptPanel_); scriptPanel_.closePanel(); } if (pipelinePanel_ != null) { removeMMBackgroundListener(pipelinePanel_); pipelinePanel_.dispose(); } if (propertyBrowser_ != null) { removeMMBackgroundListener(propertyBrowser_); propertyBrowser_.dispose(); } if (acqControlWin_ != null) { removeMMBackgroundListener(acqControlWin_); acqControlWin_.close(); } if (engine_ != null) { engine_.shutdown(); } if (afMgr_ != null) { afMgr_.closeOptionsDialog(); } engine_.disposeProcessors(); pluginLoader_.disposePlugins(); synchronized (shutdownLock_) { try { if (core_ != null) { ReportingUtils.setCore(null); core_.delete(); core_ = null; } } catch (Exception err) { ReportingUtils.showError(err); } } return true; } private void saveSettings() { Rectangle r = this.getBounds(); mainPrefs_.putInt(MAIN_FRAME_X, r.x); mainPrefs_.putInt(MAIN_FRAME_Y, r.y); mainPrefs_.putInt(MAIN_FRAME_WIDTH, r.width); mainPrefs_.putInt(MAIN_FRAME_HEIGHT, r.height); mainPrefs_.putInt(MAIN_FRAME_DIVIDER_POS, this.splitPane_.getDividerLocation()); mainPrefs_.put(OPEN_ACQ_DIR, openAcqDirectory_); mainPrefs_.put(MAIN_SAVE_METHOD, ImageUtils.getImageStorageClass().getName()); // save field values from the main window // NOTE: automatically restoring these values on startup may cause // problems mainPrefs_.put(MAIN_EXPOSURE, textFieldExp_.getText()); // NOTE: do not save auto shutter state if (afMgr_ != null && afMgr_.getDevice() != null) { mainPrefs_.put(AUTOFOCUS_DEVICE, afMgr_.getDevice().getDeviceName()); } } private void loadConfiguration() { File f = FileDialogs.openFile(this, "Load a config file",MM_CONFIG_FILE); if (f != null) { sysConfigFile_ = f.getAbsolutePath(); configChanged_ = false; setConfigSaveButtonStatus(configChanged_); mainPrefs_.put(SYSTEM_CONFIG_FILE, sysConfigFile_); loadSystemConfiguration(); } } public synchronized boolean closeSequence(boolean calledByImageJ) { if (!this.isRunning()) { if (core_ != null) { core_.logMessage("MMStudioMainFrame::closeSequence called while running_ is false"); } return true; } if (engine_ != null && engine_.isAcquisitionRunning()) { int result = JOptionPane.showConfirmDialog( this, "Acquisition in progress. Are you sure you want to exit and discard all data?", "Micro-Manager", JOptionPane.YES_NO_OPTION, JOptionPane.INFORMATION_MESSAGE); if (result == JOptionPane.NO_OPTION) { return false; } } stopAllActivity(); try { // Close all image windows associated with MM. Canceling saving of // any of these should abort shutdown if (!acqMgr_.closeAllImageWindows()) { return false; } } catch (MMScriptException ex) { // Not sure what to do here... } if (!cleanupOnClose(calledByImageJ)) { return false; } running_ = false; saveSettings(); try { configPad_.saveSettings(); options_.saveSettings(); hotKeys_.saveSettings(); } catch (NullPointerException e) { if (core_ != null) this.logError(e); } // disposing sometimes hangs ImageJ! // this.dispose(); if (options_.closeOnExit_) { if (!runsAsPlugin_) { System.exit(0); } else { ImageJ ij = IJ.getInstance(); if (ij != null) { ij.quit(); } } } else { this.dispose(); } return true; } /* public void applyContrastSettings(ContrastSettings contrast8, ContrastSettings contrast16) { ImagePlus img = WindowManager.getCurrentImage(); if (img == null|| VirtualAcquisitionDisplay.getDisplay(img) == null ) return; if (img.getBytesPerPixel() == 1) VirtualAcquisitionDisplay.getDisplay(img).setChannelContrast(0, contrast8.min, contrast8.max, contrast8.gamma); else VirtualAcquisitionDisplay.getDisplay(img).setChannelContrast(0, contrast16.min, contrast16.max, contrast16.gamma); } */ //TODO: Deprecated @Override public ContrastSettings getContrastSettings() { ImagePlus img = WindowManager.getCurrentImage(); if (img == null || VirtualAcquisitionDisplay.getDisplay(img) == null ) return null; return VirtualAcquisitionDisplay.getDisplay(img).getChannelContrastSettings(0); } /* public boolean is16bit() { ImagePlus ip = WindowManager.getCurrentImage(); if (ip != null && ip.getProcessor() instanceof ShortProcessor) { return true; } return false; } * */ public boolean isRunning() { return running_; } /** * Executes the beanShell script. This script instance only supports * commands directed to the core object. */ private void executeStartupScript() { // execute startup script File f = new File(startupScriptFile_); if (startupScriptFile_.length() > 0 && f.exists()) { WaitDialog waitDlg = new WaitDialog( "Executing startup script, please wait..."); waitDlg.showDialog(); Interpreter interp = new Interpreter(); try { // insert core object only interp.set(SCRIPT_CORE_OBJECT, core_); interp.set(SCRIPT_ACQENG_OBJECT, engine_); interp.set(SCRIPT_GUI_OBJECT, this); // read text file and evaluate interp.eval(TextUtils.readTextFile(startupScriptFile_)); } catch (IOException exc) { ReportingUtils.logError(exc, "Unable to read the startup script (" + startupScriptFile_ + ")."); } catch (EvalError exc) { ReportingUtils.logError(exc); } finally { waitDlg.closeDialog(); } } else { if (startupScriptFile_.length() > 0) ReportingUtils.logMessage("Startup script file ("+startupScriptFile_+") not present."); } } /** * Loads system configuration from the cfg file. */ private boolean loadSystemConfiguration() { boolean result = true; saveMRUConfigFiles(); final WaitDialog waitDlg = new WaitDialog( "Loading system configuration, please wait..."); waitDlg.setAlwaysOnTop(true); waitDlg.showDialog(); this.setEnabled(false); try { if (sysConfigFile_.length() > 0) { GUIUtils.preventDisplayAdapterChangeExceptions(); core_.waitForSystem(); ignorePropertyChanges_ = true; core_.loadSystemConfiguration(sysConfigFile_); ignorePropertyChanges_ = false; GUIUtils.preventDisplayAdapterChangeExceptions(); } } catch (final Exception err) { GUIUtils.preventDisplayAdapterChangeExceptions(); ReportingUtils.showError(err); result = false; } finally { waitDlg.closeDialog(); } setEnabled(true); initializeGUI(); updateSwitchConfigurationMenu(); FileDialogs.storePath(MM_CONFIG_FILE, new File(sysConfigFile_)); return result; } private void saveMRUConfigFiles() { if (0 < sysConfigFile_.length()) { if (MRUConfigFiles_.contains(sysConfigFile_)) { MRUConfigFiles_.remove(sysConfigFile_); } if (maxMRUCfgs_ <= MRUConfigFiles_.size()) { MRUConfigFiles_.remove(maxMRUCfgs_ - 1); } MRUConfigFiles_.add(0, sysConfigFile_); // save the MRU list to the preferences for (Integer icfg = 0; icfg < MRUConfigFiles_.size(); ++icfg) { String value = ""; if (null != MRUConfigFiles_.get(icfg)) { value = MRUConfigFiles_.get(icfg).toString(); } mainPrefs_.put(CFGFILE_ENTRY_BASE + icfg.toString(), value); } } } private void loadMRUConfigFiles() { sysConfigFile_ = mainPrefs_.get(SYSTEM_CONFIG_FILE, sysConfigFile_); // startupScriptFile_ = mainPrefs_.get(STARTUP_SCRIPT_FILE, // startupScriptFile_); MRUConfigFiles_ = new ArrayList<String>(); for (Integer icfg = 0; icfg < maxMRUCfgs_; ++icfg) { String value = ""; value = mainPrefs_.get(CFGFILE_ENTRY_BASE + icfg.toString(), value); if (0 < value.length()) { File ruFile = new File(value); if (ruFile.exists()) { if (!MRUConfigFiles_.contains(value)) { MRUConfigFiles_.add(value); } } } } // initialize MRU list from old persistant data containing only SYSTEM_CONFIG_FILE if (0 < sysConfigFile_.length()) { if (!MRUConfigFiles_.contains(sysConfigFile_)) { // in case persistant data is inconsistent if (maxMRUCfgs_ <= MRUConfigFiles_.size()) { MRUConfigFiles_.remove(maxMRUCfgs_ - 1); } MRUConfigFiles_.add(0, sysConfigFile_); } } } /** * Opens Acquisition dialog. */ private void openAcqControlDialog() { try { if (acqControlWin_ == null) { acqControlWin_ = new AcqControlDlg(engine_, mainPrefs_, this, options_); } if (acqControlWin_.isActive()) { acqControlWin_.setTopPosition(); } acqControlWin_.setVisible(true); acqControlWin_.repaint(); } catch (Exception exc) { ReportingUtils.showError(exc, "\nAcquistion window failed to open due to invalid or corrupted settings.\n" + "Try resetting registry settings to factory defaults (Menu Tools|Options)."); } } private void updateChannelCombos() { if (this.acqControlWin_ != null) { this.acqControlWin_.updateChannelAndGroupCombo(); } } private void runHardwareWizard() { try { if (configChanged_) { Object[] options = {"Yes", "No"}; int n = JOptionPane.showOptionDialog(null, "Save Changed Configuration?", "Micro-Manager", JOptionPane.YES_NO_OPTION, JOptionPane.QUESTION_MESSAGE, null, options, options[0]); if (n == JOptionPane.YES_OPTION) { saveConfigPresets(); } configChanged_ = false; } boolean liveRunning = false; if (isLiveModeOn()) { liveRunning = true; enableLiveMode(false); } // unload all devices before starting configurator core_.reset(); GUIUtils.preventDisplayAdapterChangeExceptions(); // run Configurator ConfiguratorDlg2 cfg2 = null; try { setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); cfg2 = new ConfiguratorDlg2(core_, sysConfigFile_); } finally { setCursor(Cursor.getDefaultCursor()); } if (cfg2 == null) { ReportingUtils.showError("Failed to launch Hardware Configuration Wizard"); return; } cfg2.setVisible(true); GUIUtils.preventDisplayAdapterChangeExceptions(); // re-initialize the system with the new configuration file sysConfigFile_ = cfg2.getFileName(); mainPrefs_.put(SYSTEM_CONFIG_FILE, sysConfigFile_); loadSystemConfiguration(); GUIUtils.preventDisplayAdapterChangeExceptions(); if (liveRunning) { enableLiveMode(liveRunning); } } catch (Exception e) { ReportingUtils.showError(e); } } private void autofocusNow() { if (afMgr_.getDevice() != null) { new Thread() { @Override public void run() { try { boolean lmo = isLiveModeOn(); if (lmo) { enableLiveMode(false); } afMgr_.getDevice().fullFocus(); if (lmo) { enableLiveMode(true); } } catch (MMException ex) { ReportingUtils.logError(ex); } } }.start(); // or any other method from Autofocus.java API } } private class ExecuteAcq implements Runnable { public ExecuteAcq() { } @Override public void run() { if (acqControlWin_ != null) { acqControlWin_.runAcquisition(); } } } private void testForAbortRequests() throws MMScriptException { if (scriptPanel_ != null) { if (scriptPanel_.stopRequestPending()) { throw new MMScriptException("Script interrupted by the user!"); } } } // ////////////////////////////////////////////////////////////////////////// // Script interface // ////////////////////////////////////////////////////////////////////////// @Override public String getVersion() { return MMVersion.VERSION_STRING; } /** * Inserts version info for various components in the Corelog */ @Override public void logStartupProperties() { core_.logMessage("MM Studio version: " + getVersion()); core_.logMessage(core_.getVersionInfo()); core_.logMessage(core_.getAPIVersionInfo()); core_.logMessage("Operating System: " + System.getProperty("os.name") + " (" + System.getProperty("os.arch") + ") " + System.getProperty("os.version")); core_.logMessage("JVM: " + System.getProperty("java.vm.name") + ", version " + System.getProperty("java.version") + ", " + System.getProperty("sun.arch.data.model") + "-bit"); } @Override public void makeActive() { toFront(); } @Override public boolean displayImage(TaggedImage ti) { normalizeTags(ti); return displayTaggedImage(ti, true); } /** * Opens a dialog to record stage positions */ @Override public void showXYPositionList() { checkPosListDlg(); posListDlg_.setVisible(true); } @Override public void setConfigChanged(boolean status) { configChanged_ = status; setConfigSaveButtonStatus(configChanged_); } /** * Lets JComponents register themselves so that their background can be * manipulated */ @Override public void addMMBackgroundListener(Component comp) { if (MMFrames_.contains(comp)) return; MMFrames_.add(comp); } /** * Lets JComponents remove themselves from the list whose background gets * changes */ @Override public void removeMMBackgroundListener(Component comp) { if (!MMFrames_.contains(comp)) return; MMFrames_.remove(comp); } /** * Returns exposure time for the desired preset in the given channelgroup * Acquires its info from the preferences * Same thing is used in MDA window, but this class keeps its own copy * * @param channelGroup * @param channel - * @param defaultExp - default value * @return exposure time */ @Override public double getChannelExposureTime(String channelGroup, String channel, double defaultExp) { return exposurePrefs_.getDouble("Exposure_" + channelGroup + "_" + channel, defaultExp); } /** * Updates the exposure time in the given preset * Will also update current exposure if it the given channel and channelgroup * are the current one * * @param channelGroup - * * @param channel - preset for which to change exposure time * @param exposure - desired exposure time */ @Override public void setChannelExposureTime(String channelGroup, String channel, double exposure) { try { exposurePrefs_.putDouble("Exposure_" + channelGroup + "_" + channel, exposure); if (channelGroup != null && channelGroup.equals(core_.getChannelGroup())) { if (channel != null && !channel.equals("") && channel.equals(core_.getCurrentConfigFromCache(channelGroup))) { textFieldExp_.setText(NumberUtils.doubleToDisplayString(exposure)); setExposure(); } } } catch (Exception ex) { ReportingUtils.logError("Failed to set Exposure prefs using Channelgroup: " + channelGroup + ", channel: " + channel + ", exposure: " + exposure); } } @Override public void enableRoiButtons(final boolean enabled) { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { setRoiButton_.setEnabled(enabled); clearRoiButton_.setEnabled(enabled); } }); } @Override public boolean getAutoreloadOption() { return options_.autoreloadDevices_; } /** * Returns the current background color * @return current background color */ @Override public Color getBackgroundColor() { return guiColors_.background.get((options_.displayBackground_)); } /* * Changes background color of this window and all other MM windows */ @Override public void setBackgroundStyle(String backgroundType) { setBackground(guiColors_.background.get((backgroundType))); paint(MMStudioMainFrame.this.getGraphics()); // sets background of all registered Components for (Component comp:MMFrames_) { if (comp != null) comp.setBackground(guiColors_.background.get(backgroundType)); } } @Override public String getBackgroundStyle() { return options_.displayBackground_; } @Override public ImageWindow getSnapLiveWin() { if (simpleDisplay_ == null) { return null; } return simpleDisplay_.getHyperImage().getWindow(); } /** * @Deprecated - used to be in api/AcquisitionEngine */ public void startAcquisition() throws MMScriptException { testForAbortRequests(); SwingUtilities.invokeLater(new ExecuteAcq()); } @Override public String runAcquisition() throws MMScriptException { if (SwingUtilities.isEventDispatchThread()) { throw new MMScriptException("Acquisition can not be run from this (EDT) thread"); } testForAbortRequests(); if (acqControlWin_ != null) { String name = acqControlWin_.runAcquisition(); try { while (acqControlWin_.isAcquisitionRunning()) { Thread.sleep(50); } } catch (InterruptedException e) { ReportingUtils.showError(e); } return name; } else { throw new MMScriptException( "Acquisition setup window must be open for this command to work."); } } @Override public String runAcquisition(String name, String root) throws MMScriptException { testForAbortRequests(); if (acqControlWin_ != null) { String acqName = acqControlWin_.runAcquisition(name, root); try { while (acqControlWin_.isAcquisitionRunning()) { Thread.sleep(100); } // ensure that the acquisition has finished. // This does not seem to work, needs something better MMAcquisition acq = acqMgr_.getAcquisition(acqName); boolean finished = false; while (!finished) { ImageCache imCache = acq.getImageCache(); if (imCache != null) { if (imCache.isFinished()) { finished = true; } else { Thread.sleep(100); } } } } catch (InterruptedException e) { ReportingUtils.showError(e); } return acqName; } else { throw new MMScriptException( "Acquisition setup window must be open for this command to work."); } } /** * @Deprecated used to be part of api */ public String runAcqusition(String name, String root) throws MMScriptException { return runAcquisition(name, root); } /** * Loads acquisition settings from file * @param path file containing previously saved acquisition settings * @throws MMScriptException */ @Override public void loadAcquisition(String path) throws MMScriptException { testForAbortRequests(); try { engine_.shutdown(); // load protocol if (acqControlWin_ != null) { acqControlWin_.loadAcqSettingsFromFile(path); } } catch (Exception ex) { throw new MMScriptException(ex.getMessage()); } } @Override public void setPositionList(PositionList pl) throws MMScriptException { testForAbortRequests(); // use serialization to clone the PositionList object posList_ = pl; // PositionList.newInstance(pl); SwingUtilities.invokeLater(new Runnable() { @Override public void run() { if (posListDlg_ != null) posListDlg_.setPositionList(posList_); if (engine_ != null) engine_.setPositionList(posList_); if (acqControlWin_ != null) acqControlWin_.updateGUIContents(); } }); } @Override public PositionList getPositionList() throws MMScriptException { testForAbortRequests(); // use serialization to clone the PositionList object return posList_; //PositionList.newInstance(posList_); } @Override public void sleep(long ms) throws MMScriptException { if (scriptPanel_ != null) { if (scriptPanel_.stopRequestPending()) { throw new MMScriptException("Script interrupted by the user!"); } scriptPanel_.sleep(ms); } } @Override public String getUniqueAcquisitionName(String stub) { return acqMgr_.getUniqueAcquisitionName(stub); } //@Override public void openAcquisition(String name, String rootDir, int nrFrames, int nrChannels, int nrSlices, int nrPositions) throws MMScriptException { this.openAcquisition(name, rootDir, nrFrames, nrChannels, nrSlices, nrPositions, true, false); } //@Override public void openAcquisition(String name, String rootDir, int nrFrames, int nrChannels, int nrSlices) throws MMScriptException { openAcquisition(name, rootDir, nrFrames, nrChannels, nrSlices, 0); } //@Override public void openAcquisition(String name, String rootDir, int nrFrames, int nrChannels, int nrSlices, int nrPositions, boolean show) throws MMScriptException { this.openAcquisition(name, rootDir, nrFrames, nrChannels, nrSlices, nrPositions, show, false); } //@Override public void openAcquisition(String name, String rootDir, int nrFrames, int nrChannels, int nrSlices, boolean show) throws MMScriptException { this.openAcquisition(name, rootDir, nrFrames, nrChannels, nrSlices, 0, show, false); } @Override public void openAcquisition(String name, String rootDir, int nrFrames, int nrChannels, int nrSlices, int nrPositions, boolean show, boolean save) throws MMScriptException { acqMgr_.openAcquisition(name, rootDir, show, save); MMAcquisition acq = acqMgr_.getAcquisition(name); acq.setDimensions(nrFrames, nrChannels, nrSlices, nrPositions); } //@Override public void openAcquisition(String name, String rootDir, int nrFrames, int nrChannels, int nrSlices, boolean show, boolean virtual) throws MMScriptException { this.openAcquisition(name, rootDir, nrFrames, nrChannels, nrSlices, 0, show, virtual); } //@Override public String createAcquisition(JSONObject summaryMetadata, boolean diskCached) { return createAcquisition(summaryMetadata, diskCached, false); } @Override @Deprecated public String createAcquisition(JSONObject summaryMetadata, boolean diskCached, boolean displayOff) { return acqMgr_.createAcquisition(summaryMetadata, diskCached, engine_, displayOff); } //@Override public void initializeSimpleAcquisition(String name, int width, int height, int byteDepth, int bitDepth, int multiCamNumCh) throws MMScriptException { MMAcquisition acq = acqMgr_.getAcquisition(name); acq.setImagePhysicalDimensions(width, height, byteDepth, bitDepth, multiCamNumCh); acq.initializeSimpleAcq(); } @Override public void initializeAcquisition(String name, int width, int height, int byteDepth, int bitDepth) throws MMScriptException { MMAcquisition acq = acqMgr_.getAcquisition(name); //number of multi-cam cameras is set to 1 here for backwards compatibility //might want to change this later acq.setImagePhysicalDimensions(width, height, byteDepth, bitDepth, 1); acq.initialize(); } @Override public int getAcquisitionImageWidth(String acqName) throws MMScriptException { MMAcquisition acq = acqMgr_.getAcquisition(acqName); return acq.getWidth(); } @Override public int getAcquisitionImageHeight(String acqName) throws MMScriptException{ MMAcquisition acq = acqMgr_.getAcquisition(acqName); return acq.getHeight(); } @Override public int getAcquisitionImageBitDepth(String acqName) throws MMScriptException { MMAcquisition acq = acqMgr_.getAcquisition(acqName); return acq.getBitDepth(); } @Override public int getAcquisitionImageByteDepth(String acqName) throws MMScriptException{ MMAcquisition acq = acqMgr_.getAcquisition(acqName); return acq.getByteDepth(); } @Override public int getAcquisitionMultiCamNumChannels(String acqName) throws MMScriptException{ MMAcquisition acq = acqMgr_.getAcquisition(acqName); return acq.getMultiCameraNumChannels(); } @Override public Boolean acquisitionExists(String name) { return acqMgr_.acquisitionExists(name); } @Override public void closeAcquisition(String name) throws MMScriptException { acqMgr_.closeAcquisition(name); } /** * @Deprecated use closeAcquisitionWindow instead * @Deprecated - used to be in api/AcquisitionEngine */ public void closeAcquisitionImage5D(String acquisitionName) throws MMScriptException { acqMgr_.closeImageWindow(acquisitionName); } @Override public void closeAcquisitionWindow(String acquisitionName) throws MMScriptException { acqMgr_.closeImageWindow(acquisitionName); } /** * @Deprecated - used to be in api/AcquisitionEngine * Since Burst and normal acquisition are now carried out by the same engine, * loadBurstAcquistion simply calls loadAcquisition * t * @param path - path to file specifying acquisition settings */ public void loadBurstAcquisition(String path) throws MMScriptException { this.loadAcquisition(path); } @Override public void refreshGUI() { updateGUI(true); } @Override public void refreshGUIFromCache() { updateGUI(true, true); } @Override public void setAcquisitionProperty(String acqName, String propertyName, String value) throws MMScriptException { MMAcquisition acq = acqMgr_.getAcquisition(acqName); acq.setProperty(propertyName, value); } public void setAcquisitionSystemState(String acqName, JSONObject md) throws MMScriptException { // acqMgr_.getAcquisition(acqName).setSystemState(md); setAcquisitionSummary(acqName, md); } //@Override public void setAcquisitionSummary(String acqName, JSONObject md) throws MMScriptException { acqMgr_.getAcquisition(acqName).setSummaryProperties(md); } @Override public void setImageProperty(String acqName, int frame, int channel, int slice, String propName, String value) throws MMScriptException { MMAcquisition acq = acqMgr_.getAcquisition(acqName); acq.setProperty(frame, channel, slice, propName, value); } @Override public String getCurrentAlbum() { return acqMgr_.getCurrentAlbum(); } @Override public void enableLiveMode(boolean enable) { if (core_ == null) { return; } if (enable == isLiveModeOn()) { return; } if (enable) { try { if (core_.getCameraDevice().length() == 0) { ReportingUtils.showError("No camera configured"); updateButtonsForLiveMode(false); return; } if (liveModeTimer_ == null) { liveModeTimer_ = new LiveModeTimer(); } liveModeTimer_.begin(); callLiveModeListeners(enable); } catch (Exception e) { ReportingUtils.showError(e); liveModeTimer_.stop(); callLiveModeListeners(false); updateButtonsForLiveMode(false); return; } } else { liveModeTimer_.stop(); callLiveModeListeners(enable); } updateButtonsForLiveMode(enable); } public String createNewAlbum() { return acqMgr_.createNewAlbum(); } public void appendImage(String name, TaggedImage taggedImg) throws MMScriptException { MMAcquisition acq = acqMgr_.getAcquisition(name); int f = 1 + acq.getLastAcquiredFrame(); try { MDUtils.setFrameIndex(taggedImg.tags, f); } catch (JSONException e) { throw new MMScriptException("Unable to set the frame index."); } acq.insertTaggedImage(taggedImg, f, 0, 0); } @Override public void addToAlbum(TaggedImage taggedImg) throws MMScriptException { addToAlbum(taggedImg, null); } public void addToAlbum(TaggedImage taggedImg, JSONObject displaySettings) throws MMScriptException { normalizeTags(taggedImg); acqMgr_.addToAlbum(taggedImg,displaySettings); } public void addImage(String name, Object img, int frame, int channel, int slice) throws MMScriptException { MMAcquisition acq = acqMgr_.getAcquisition(name); acq.insertImage(img, frame, channel, slice); } //@Override public void addImage(String name, TaggedImage taggedImg) throws MMScriptException { MMAcquisition acq = acqMgr_.getAcquisition(name); if (!acq.isInitialized()) { JSONObject tags = taggedImg.tags; // initialize physical dimensions of the image try { int width = tags.getInt(MMTags.Image.WIDTH); int height = tags.getInt(MMTags.Image.HEIGHT); int byteDepth = MDUtils.getDepth(tags); int bitDepth = tags.getInt(MMTags.Image.BIT_DEPTH); initializeAcquisition(name, width, height, byteDepth, bitDepth); } catch (JSONException e) { throw new MMScriptException(e); } } acq.insertImage(taggedImg); } @Override /** * The basic method for adding images to an existing data set. * If the acquisition was not previously initialized, it will attempt to initialize it from the available image data */ public void addImageToAcquisition(String name, int frame, int channel, int slice, int position, TaggedImage taggedImg) throws MMScriptException { // TODO: complete the tag set and initialize the acquisition MMAcquisition acq = acqMgr_.getAcquisition(name); int positions = acq.getPositions(); // check position, for multi-position data set the number of declared positions should be at least 2 if (acq.getPositions() <= 1 && position > 0) { throw new MMScriptException("The acquisition was open as a single position data set.\n" + "Open acqusition with two or more positions in order to crate a multi-position data set."); } // check position, for multi-position data set the number of declared positions should be at least 2 if (acq.getChannels() <= channel) { throw new MMScriptException("This acquisition was opened with " + acq.getChannels() + " channels.\n" + "The channel number must not exceed declared number of positions."); } JSONObject tags = taggedImg.tags; // if the acquisition was not previously initialized, set physical dimensions of the image if (!acq.isInitialized()) { // automatically initialize physical dimensions of the image try { int width = tags.getInt(MMTags.Image.WIDTH); int height = tags.getInt(MMTags.Image.HEIGHT); int byteDepth = MDUtils.getDepth(tags); int bitDepth = byteDepth * 8; if (tags.has(MMTags.Image.BIT_DEPTH)) { bitDepth = tags.getInt(MMTags.Image.BIT_DEPTH); } initializeAcquisition(name, width, height, byteDepth, bitDepth); } catch (JSONException e) { throw new MMScriptException(e); } } // create required coordinate tags try { tags.put(MMTags.Image.FRAME_INDEX, frame); tags.put(MMTags.Image.FRAME, frame); tags.put(MMTags.Image.CHANNEL_INDEX, channel); tags.put(MMTags.Image.SLICE_INDEX, slice); tags.put(MMTags.Image.POS_INDEX, position); if (!tags.has(MMTags.Summary.SLICES_FIRST) && !tags.has(MMTags.Summary.TIME_FIRST)) { // add default setting tags.put(MMTags.Summary.SLICES_FIRST, true); tags.put(MMTags.Summary.TIME_FIRST, false); } if (acq.getPositions() > 1) { // if no position name is defined we need to insert a default one if (tags.has(MMTags.Image.POS_NAME)) { tags.put(MMTags.Image.POS_NAME, "Pos" + position); } } // update frames if necessary if (acq.getFrames() <= frame) { acq.setProperty(MMTags.Summary.FRAMES, Integer.toString(frame + 1)); } } catch (JSONException e) { throw new MMScriptException(e); } // System.out.println("Inserting frame: " + frame + ", channel: " + channel + ", slice: " + slice + ", pos: " + position); acq.insertImage(taggedImg); } @Override /** * A quick way to implicitly snap an image and add it to the data set. Works * in the same way as above. */ public void snapAndAddImage(String name, int frame, int channel, int slice, int position) throws MMScriptException { TaggedImage ti; try { if (core_.isSequenceRunning()) { ti = core_.getLastTaggedImage(); } else { core_.snapImage(); ti = core_.getTaggedImage(); } MDUtils.setChannelIndex(ti.tags, channel); MDUtils.setFrameIndex(ti.tags, frame); MDUtils.setSliceIndex(ti.tags, slice); MDUtils.setPositionIndex(ti.tags, position); MMAcquisition acq = acqMgr_.getAcquisition(name); if (!acq.isInitialized()) { long width = core_.getImageWidth(); long height = core_.getImageHeight(); long depth = core_.getBytesPerPixel(); long bitDepth = core_.getImageBitDepth(); int multiCamNumCh = (int) core_.getNumberOfCameraChannels(); acq.setImagePhysicalDimensions((int) width, (int) height, (int) depth, (int) bitDepth, multiCamNumCh); acq.initialize(); } if (acq.getPositions() > 1) { MDUtils.setPositionName(ti.tags, "Pos" + position); } addImageToAcquisition(name, frame, channel, slice, position, ti); } catch (Exception e) { throw new MMScriptException(e); } } //@Override public void addImage(String name, TaggedImage img, boolean updateDisplay) throws MMScriptException { acqMgr_.getAcquisition(name).insertImage(img, updateDisplay); } //@Override public void addImage(String name, TaggedImage taggedImg, boolean updateDisplay, boolean waitForDisplay) throws MMScriptException { acqMgr_.getAcquisition(name).insertImage(taggedImg, updateDisplay, waitForDisplay); } //@Override public void addImage(String name, TaggedImage taggedImg, int frame, int channel, int slice, int position) throws MMScriptException { try { acqMgr_.getAcquisition(name).insertImage(taggedImg, frame, channel, slice, position); } catch (JSONException ex) { ReportingUtils.showError(ex); } } //@Override public void addImage(String name, TaggedImage taggedImg, int frame, int channel, int slice, int position, boolean updateDisplay) throws MMScriptException { try { acqMgr_.getAcquisition(name).insertImage(taggedImg, frame, channel, slice, position, updateDisplay); } catch (JSONException ex) { ReportingUtils.showError(ex); } } //@Override public void addImage(String name, TaggedImage taggedImg, int frame, int channel, int slice, int position, boolean updateDisplay, boolean waitForDisplay) throws MMScriptException { try { acqMgr_.getAcquisition(name).insertImage(taggedImg, frame, channel, slice, position, updateDisplay, waitForDisplay); } catch (JSONException ex) { ReportingUtils.showError(ex); } } /** * Closes all acquisitions */ @Override public void closeAllAcquisitions() { acqMgr_.closeAll(); } @Override public String[] getAcquisitionNames() { return acqMgr_.getAcqusitionNames(); } @Override @Deprecated public MMAcquisition getAcquisition(String name) throws MMScriptException { return acqMgr_.getAcquisition(name); } @Override public ImageCache getAcquisitionImageCache(String acquisitionName) throws MMScriptException { return getAcquisition(acquisitionName).getImageCache(); } private class ScriptConsoleMessage implements Runnable { String msg_; public ScriptConsoleMessage(String text) { msg_ = text; } @Override public void run() { if (scriptPanel_ != null) scriptPanel_.message(msg_); } } @Override public void message(String text) throws MMScriptException { if (scriptPanel_ != null) { if (scriptPanel_.stopRequestPending()) { throw new MMScriptException("Script interrupted by the user!"); } SwingUtilities.invokeLater(new ScriptConsoleMessage(text)); } } @Override public void clearMessageWindow() throws MMScriptException { if (scriptPanel_ != null) { if (scriptPanel_.stopRequestPending()) { throw new MMScriptException("Script interrupted by the user!"); } scriptPanel_.clearOutput(); } } public void clearOutput() throws MMScriptException { clearMessageWindow(); } public void clear() throws MMScriptException { clearMessageWindow(); } @Override public void setChannelContrast(String title, int channel, int min, int max) throws MMScriptException { MMAcquisition acq = acqMgr_.getAcquisition(title); acq.setChannelContrast(channel, min, max); } @Override public void setChannelName(String title, int channel, String name) throws MMScriptException { MMAcquisition acq = acqMgr_.getAcquisition(title); acq.setChannelName(channel, name); } @Override public void setChannelColor(String title, int channel, Color color) throws MMScriptException { MMAcquisition acq = acqMgr_.getAcquisition(title); acq.setChannelColor(channel, color.getRGB()); } @Override public void setContrastBasedOnFrame(String title, int frame, int slice) throws MMScriptException { MMAcquisition acq = acqMgr_.getAcquisition(title); acq.setContrastBasedOnFrame(frame, slice); } @Override public void setStagePosition(double z) throws MMScriptException { try { core_.setPosition(core_.getFocusDevice(),z); core_.waitForDevice(core_.getFocusDevice()); } catch (Exception e) { throw new MMScriptException(e.getMessage()); } } @Override public void setRelativeStagePosition(double z) throws MMScriptException { try { core_.setRelativePosition(core_.getFocusDevice(), z); core_.waitForDevice(core_.getFocusDevice()); } catch (Exception e) { throw new MMScriptException(e.getMessage()); } } @Override public void setXYStagePosition(double x, double y) throws MMScriptException { try { core_.setXYPosition(core_.getXYStageDevice(), x, y); core_.waitForDevice(core_.getXYStageDevice()); } catch (Exception e) { throw new MMScriptException(e.getMessage()); } } @Override public void setRelativeXYStagePosition(double x, double y) throws MMScriptException { try { core_.setRelativeXYPosition(core_.getXYStageDevice(), x, y); core_.waitForDevice(core_.getXYStageDevice()); } catch (Exception e) { throw new MMScriptException(e.getMessage()); } } @Override public Point2D.Double getXYStagePosition() throws MMScriptException { String stage = core_.getXYStageDevice(); if (stage.length() == 0) { throw new MMScriptException("XY Stage device is not available"); } double x[] = new double[1]; double y[] = new double[1]; try { core_.getXYPosition(stage, x, y); Point2D.Double pt = new Point2D.Double(x[0], y[0]); return pt; } catch (Exception e) { throw new MMScriptException(e.getMessage()); } } @Override public String getXYStageName() { return core_.getXYStageDevice(); } @Override public void setXYOrigin(double x, double y) throws MMScriptException { String xyStage = core_.getXYStageDevice(); try { core_.setAdapterOriginXY(xyStage, x, y); } catch (Exception e) { throw new MMScriptException(e); } } public AcquisitionWrapperEngine getAcquisitionEngine() { return engine_; } @Override public String installAutofocusPlugin(String className) { try { return installAutofocusPlugin(Class.forName(className)); } catch (ClassNotFoundException e) { String msg = "Internal error: AF manager not instantiated."; ReportingUtils.logError(e, msg); return msg; } } public String installAutofocusPlugin(Class<?> autofocus) { String msg = autofocus.getSimpleName() + " module loaded."; if (afMgr_ != null) { afMgr_.setAFPluginClassName(autofocus.getSimpleName()); try { afMgr_.refresh(); } catch (MMException e) { msg = e.getMessage(); ReportingUtils.logError(e); } } else { msg = "Internal error: AF manager not instantiated."; } return msg; } public CMMCore getCore() { return core_; } @Override public IAcquisitionEngine2010 getAcquisitionEngine2010() { try { acquisitionEngine2010LoadingThread_.join(); if (acquisitionEngine2010_ == null) { acquisitionEngine2010_ = (IAcquisitionEngine2010) acquisitionEngine2010Class_.getConstructor(ScriptInterface.class).newInstance(this); } return acquisitionEngine2010_; } catch (Exception e) { ReportingUtils.logError(e); return null; } } @Override public void addImageProcessor(DataProcessor<TaggedImage> processor) { getAcquisitionEngine().addImageProcessor(processor); } @Override public void removeImageProcessor(DataProcessor<TaggedImage> processor) { getAcquisitionEngine().removeImageProcessor(processor); } @Override public ArrayList<DataProcessor<TaggedImage>> getImageProcessorPipeline() { return getAcquisitionEngine().getImageProcessorPipeline(); } public void registerProcessorClass(Class<?> processorClass, String name) { getAcquisitionEngine().registerProcessorClass(processorClass, name); } // NB will need @Override tags once these functions are exposed in the // ScriptInterface. @Override public void setImageProcessorPipeline(List<DataProcessor<TaggedImage>> pipeline) { getAcquisitionEngine().setImageProcessorPipeline(pipeline); } @Override public void setPause(boolean state) { getAcquisitionEngine().setPause(state); } @Override public boolean isPaused() { return getAcquisitionEngine().isPaused(); } @Override public void attachRunnable(int frame, int position, int channel, int slice, Runnable runnable) { getAcquisitionEngine().attachRunnable(frame, position, channel, slice, runnable); } @Override public void clearRunnables() { getAcquisitionEngine().clearRunnables(); } @Override public SequenceSettings getAcquisitionSettings() { if (engine_ == null) return new SequenceSettings(); return engine_.getSequenceSettings(); } // Deprecated; use correctly spelled version. (Used to be part of API.) public SequenceSettings getAcqusitionSettings() { return getAcquisitionSettings(); } @Override public void setAcquisitionSettings(SequenceSettings ss) { if (engine_ == null) return; engine_.setSequenceSettings(ss); acqControlWin_.updateGUIContents(); } // Deprecated; use correctly spelled version. (Used to be part of API.) public void setAcqusitionSettings(SequenceSettings ss) { setAcquisitionSettings(ss); } @Override public String getAcquisitionPath() { if (engine_ == null) return null; return engine_.getImageCache().getDiskLocation(); } @Override public void promptToSaveAcquisition(String name, boolean prompt) throws MMScriptException { getAcquisition(name).promptToSave(prompt); } // Deprecated; use correctly spelled version. (Used to be part of API.) public void promptToSaveAcqusition(String name, boolean prompt) throws MMScriptException { promptToSaveAcquisition(name, prompt); } @Override public void setROI(Rectangle r) throws MMScriptException { boolean liveRunning = false; if (isLiveModeOn()) { liveRunning = true; enableLiveMode(false); } try { core_.setROI(r.x, r.y, r.width, r.height); } catch (Exception e) { throw new MMScriptException(e.getMessage()); } updateStaticInfo(); if (liveRunning) { enableLiveMode(true); } } public void snapAndAddToImage5D() { if (core_.getCameraDevice().length() == 0) { ReportingUtils.showError("No camera configured"); return; } try { if (this.isLiveModeOn()) { copyFromLiveModeToAlbum(simpleDisplay_); } else { doSnap(true); } } catch (Exception ex) { ReportingUtils.logError(ex); } } public void setAcquisitionEngine(AcquisitionWrapperEngine eng) { engine_ = eng; } public void suspendLiveMode() { liveModeSuspended_ = isLiveModeOn(); enableLiveMode(false); } public void resumeLiveMode() { if (liveModeSuspended_) { enableLiveMode(true); } } @Override public Autofocus getAutofocus() { return afMgr_.getDevice(); } @Override public void showAutofocusDialog() { if (afMgr_.getDevice() != null) { afMgr_.showOptionsDialog(); } } @Override public AutofocusManager getAutofocusManager() { return afMgr_; } public void selectConfigGroup(String groupName) { configPad_.setGroup(groupName); } public String regenerateDeviceList() { Cursor oldc = Cursor.getDefaultCursor(); Cursor waitc = new Cursor(Cursor.WAIT_CURSOR); setCursor(waitc); StringBuffer resultFile = new StringBuffer(); MicroscopeModel.generateDeviceListFile(resultFile, core_); //MicroscopeModel.generateDeviceListFile(); setCursor(oldc); return resultFile.toString(); } @Override public void setImageSavingFormat(Class imageSavingClass) throws MMScriptException { if (! (imageSavingClass.equals(TaggedImageStorageDiskDefault.class) || imageSavingClass.equals(TaggedImageStorageMultipageTiff.class))) { throw new MMScriptException("Unrecognized saving class"); } ImageUtils.setImageStorageClass(imageSavingClass); if (acqControlWin_ != null) { acqControlWin_.updateSavingTypeButtons(); } } /** * Allows MMListeners to register themselves */ @Override public void addMMListener(MMListenerInterface newL) { if (MMListeners_.contains(newL)) return; MMListeners_.add(newL); } /** * Allows MMListeners to remove themselves */ @Override public void removeMMListener(MMListenerInterface oldL) { if (!MMListeners_.contains(oldL)) return; MMListeners_.remove(oldL); } @Override public void logMessage(String msg) { ReportingUtils.logMessage(msg); } @Override public void showMessage(String msg) { ReportingUtils.showMessage(msg); } @Override public void logError(Exception e, String msg) { ReportingUtils.logError(e, msg); } @Override public void logError(Exception e) { ReportingUtils.logError(e); } @Override public void logError(String msg) { ReportingUtils.logError(msg); } @Override public void showError(Exception e, String msg) { ReportingUtils.showError(e, msg); } @Override public void showError(Exception e) { ReportingUtils.showError(e); } @Override public void showError(String msg) { ReportingUtils.showError(msg); } }
Fixed a bug causing ProcessorPlugins to not work when selected from a non-root menu item. git-svn-id: 03a8048b5ee8463be5048a3801110fb50f378627@13304 d0ab736e-dc22-4aeb-8dc9-08def0aa14fd
mmstudio/src/org/micromanager/MMStudioMainFrame.java
Fixed a bug causing ProcessorPlugins to not work when selected from a non-root menu item.
Java
mit
598f659accdbe6029d2facf2459aed6154696dad
0
DemigodsRPG/Demigods3
package com.legit2.Demigods; import java.sql.ResultSet; import java.sql.SQLException; import java.util.HashMap; import java.util.Map.Entry; import org.bukkit.OfflinePlayer; import org.bukkit.entity.Player; import com.legit2.Demigods.Utilities.DCharUtil; import com.legit2.Demigods.Utilities.DDataUtil; import com.legit2.Demigods.Utilities.DObjUtil; import com.legit2.Demigods.Utilities.DPlayerUtil; import com.legit2.Demigods.Utilities.DUtil; public class DDatabase { /* * initializeDatabase() : Loads the MySQL or SQLite database. */ public static void initializeDatabase() { // Check if MySQL is enabled in the configuration and if so, attempts to connect. if(DConfig.getSettingBoolean("mysql")) { DMySQL.createConnection(); DMySQL.initializeMySQL(); loadAllData(); } else if(DConfig.getSettingBoolean("sqlite")) { // TODO: SQLite } } /* * uninitializeDatabase() : Unloads the MySQL or SQLite database. */ public static void uninitializeDatabase() { saveAllData(); if(DConfig.getSettingBoolean("mysql") && DMySQL.checkConnection()) { DMySQL.uninitializeMySQL(); } else if(DConfig.getSettingBoolean("sqlite")) { // TODO: SQLite } } /* * addPlayerToDB() : Adds the player to the database. */ public static void addPlayerToDB(OfflinePlayer player) throws SQLException { // Define variables Long firstLoginTime = System.currentTimeMillis(); // Next we add them to the Database if needed if(DConfig.getSettingBoolean("mysql") && DMySQL.checkConnection()) { int playerID = DPlayerUtil.getPlayerID(player); String playerName = player.getName(); String addQuery = "INSERT INTO " + DMySQL.player_table + " (player_id, player_name, player_characters, player_kills, player_deaths, player_firstlogin, player_lastlogin) VALUES (" + playerID + ",'" + playerName + "', NULL, 0, 0," + firstLoginTime + "," + firstLoginTime +");"; DMySQL.runQuery(addQuery); } else if(DConfig.getSettingBoolean("sqlite")) { // TODO: SQLite } } /* * removePlayerFromDB() : Removes the player from the database. */ public static void removePlayerFromDB(OfflinePlayer player) throws SQLException { // Next we add them to the Database if needed if(DConfig.getSettingBoolean("mysql") && DMySQL.checkConnection()) { // TODO: Remove player from MySQL } else if(DConfig.getSettingBoolean("sqlite")) { // TODO: SQLite } } /* * addPlayerToDB() : Adds the player to the database. */ public static void addCharToDB(OfflinePlayer player, int charID) throws SQLException { // Next we add them to the Database if needed if(DConfig.getSettingBoolean("mysql") && DMySQL.checkConnection()) { int playerID = DPlayerUtil.getPlayerID(player); boolean charActive = DCharUtil.isActive(charID); String charName = DCharUtil.getName(charID); String charDeity = DCharUtil.getDeity(charID); String charAlliance = DCharUtil.getAlliance(charID); boolean charImmortal = DCharUtil.getImmortal(charID); int charHP = DCharUtil.getHP(charID); float charExp = DCharUtil.getExp(charID); int charFavor = DCharUtil.getFavor(charID); int charDevotion = DCharUtil.getDevotion(charID); int charAscensions = DCharUtil.getAscensions(charID); double charLastX = 0.0; double charLastY = 0.0; double charLastZ = 0.0; String charLastW = ""; String addQuery = "INSERT INTO " + DMySQL.character_table + "(char_id,player_id,char_active,char_name,char_deity,char_alliance,char_immortal,char_hp,char_exp,char_favor,char_devotion,char_ascensions,char_lastX,char_lastY,char_lastZ,char_lastW)" + "VALUES (" + charID + "," + playerID + "," + charActive + "," + "'" + charName + "'," + "'" + charDeity + "'," + "'" + charAlliance + "'," + charImmortal + "," + charHP + "," + charExp + "," + charFavor + "," + charDevotion + "," + charAscensions + "," + charLastX + "," + charLastY + "," + charLastZ + "," + "'" + charLastW + "'" + ");"; DMySQL.runQuery(addQuery); } else if(DConfig.getSettingBoolean("sqlite")) { // TODO: SQLite } } /* * getPlayerInfo() : Grabs the player info from MySQL/FlatFile and returns (ResultSet)result. */ public static ResultSet getPlayerInfo(String username) throws SQLException { if(DConfig.getSettingBoolean("mysql") && DMySQL.checkConnection()) { // TODO: Return player info from MySQL } else if(DConfig.getSettingBoolean("sqlite")) { // TODO: SQLite } return null; } /* * loadAllData() : Loads all data from database into HashMaps. */ public static void loadAllData() { if(DConfig.getSettingBoolean("mysql") && DMySQL.checkConnection()) { DUtil.info("Loading Demigods data..."); // Define variables int playerCount = 0; int characterCount = 0; long startStopwatch = System.currentTimeMillis(); // Define SELECT queries String selectPlayer = "SELECT * FROM " + DMySQL.player_table + " LEFT JOIN " + DMySQL.playerdata_table + " ON " + DMySQL.player_table + ".player_id = " + DMySQL.playerdata_table + ".player_id;"; ResultSet playerResult = DMySQL.runQuery(selectPlayer); try { while(playerResult.next()) { playerCount++; OfflinePlayer player = DPlayerUtil.definePlayer(playerResult.getString("player_name")); int playerID = playerResult.getInt("player_id"); // Load the main player data DDataUtil.addPlayer(player, playerID); DDataUtil.savePlayerData(player, "player_id", playerResult.getInt("player_id")); DDataUtil.savePlayerData(player, "player_characters", playerResult.getString("player_characters")); DDataUtil.savePlayerData(player, "player_kills", playerResult.getInt("player_kills")); DDataUtil.savePlayerData(player, "player_deaths", playerResult.getInt("player_deaths")); DDataUtil.savePlayerData(player, "player_firstlogin", playerResult.getLong("player_firstlogin")); // Load other player data if(playerResult.getString("datakey") != null) { if(playerResult.getString("datakey").contains("boolean_")) { DDataUtil.savePlayerData(player, playerResult.getString("datakey"), playerResult.getBoolean("datavalue")); } else { DDataUtil.savePlayerData(player, playerResult.getString("datakey"), playerResult.getString("datavalue")); } } String selectCharacter = "SELECT * FROM " + DMySQL.character_table + " LEFT JOIN " + DMySQL.chardata_table + " ON " + DMySQL.character_table + ".char_id = " + DMySQL.chardata_table + ".char_id AND " + DMySQL.character_table + ".player_id=" + playerID + ";"; ResultSet charResult = DMySQL.runQuery(selectCharacter); while(charResult.next()) { characterCount++; int charID = charResult.getInt("char_id"); // Load the main character data DDataUtil.addChar(charID); DDataUtil.saveCharData(charID, "char_owner", charResult.getString("player_id")); DDataUtil.saveCharData(charID, "char_name", charResult.getString("char_name")); DDataUtil.saveCharData(charID, "char_active", charResult.getString("char_active")); DDataUtil.saveCharData(charID, "char_deity", charResult.getString("char_deity")); DDataUtil.saveCharData(charID, "char_alliance", charResult.getString("char_alliance")); DDataUtil.saveCharData(charID, "char_immortal", charResult.getBoolean("char_immortal")); DDataUtil.saveCharData(charID, "char_hp", charResult.getInt("char_hp")); DDataUtil.saveCharData(charID, "char_exp", charResult.getInt("char_exp")); DDataUtil.saveCharData(charID, "char_lastX", charResult.getDouble("char_lastX")); DDataUtil.saveCharData(charID, "char_lastY", charResult.getDouble("char_lastY")); DDataUtil.saveCharData(charID, "char_lastZ", charResult.getDouble("char_lastZ")); DDataUtil.saveCharData(charID, "char_lastW", charResult.getString("char_lastW")); DDataUtil.saveCharData(charID, "char_favor", charResult.getInt("char_favor")); DDataUtil.saveCharData(charID, "char_devotion", charResult.getInt("char_devotion")); DDataUtil.saveCharData(charID, "char_ascensions", charResult.getInt("char_ascensions")); // Load other character data if(charResult.getString("datakey") != null) { if(charResult.getString("datakey").contains("boolean_")) { DDataUtil.saveCharData(charID, charResult.getString("datakey"), charResult.getBoolean("datavalue")); } else { DDataUtil.saveCharData(charID, charResult.getString("datakey"), charResult.getString("datavalue")); } } } } } catch(SQLException e) { // There was an error with the SQL. DUtil.severe("Error while loading Demigods data. (ERR: 1001)"); e.printStackTrace(); } // Stop the timer long stopStopwatch = System.currentTimeMillis(); double totalTime = (double) (stopStopwatch - startStopwatch); // Send data load success message if(DConfig.getSettingBoolean("data_debug")) DUtil.info("Loaded data for " + playerCount + " players and " + characterCount + " characters in " + totalTime/1000 + " seconds."); else DUtil.info("Loaded data for " + playerCount + " players and " + characterCount + " characters."); } else if(DConfig.getSettingBoolean("sqlite")) { // TODO: SQLite } } /* * saveAllData() : Saves all HashMap data to database. */ public static boolean saveAllData() { if(DConfig.getSettingBoolean("mysql") && DMySQL.checkConnection()) { // Define variables int playerCount = 0; long startTimer = System.currentTimeMillis(); // Save plugin-specific data savePlugin(); long stopTimer = System.currentTimeMillis(); double totalTime = (double) (stopTimer - startTimer); if(DConfig.getSettingBoolean("data_debug")) DUtil.info("Demigods plugin data saved in " + totalTime/1000 + " seconds."); else DUtil.info("Demigods plugin data saved."); for(Player player : DUtil.getOnlinePlayers()) { if(savePlayer(player)) playerCount++; } // Stop the timer stopTimer = System.currentTimeMillis(); totalTime = (double) (stopTimer - startTimer); // Send save success message if(DConfig.getSettingBoolean("data_debug")) DUtil.info("Success! Saved " + playerCount + " of " + DMySQL.getRows(DMySQL.runQuery("SELECT * FROM " + DMySQL.player_table + ";")) + " players in " + totalTime/1000 + " seconds."); else DUtil.info("Success! Saved " + playerCount + " of " + DMySQL.getRows(DMySQL.runQuery("SELECT * FROM " + DMySQL.player_table + ";")) + " players."); return true; } else if(DConfig.getSettingBoolean("sqlite")) { // TODO: SQLite } return false; } /* * savePlayerData() : Saves all HashMap data for (OfflinePlayer)player to database. */ public static boolean savePlayer(OfflinePlayer player) { if(DConfig.getSettingBoolean("mysql") && DMySQL.checkConnection()) { int playerID = DPlayerUtil.getPlayerID(player); // Clear tables first DMySQL.runQuery("DELETE FROM " + DMySQL.playerdata_table + " WHERE player_id=" + playerID); // Save their player-specific data HashMap<String, Object> allPlayerData = DDataUtil.getAllPlayerData(player); // Define player-specific variables String playerChars = (String) allPlayerData.get("player_characters"); int playerKills = DObjUtil.toInteger(allPlayerData.get("player_kills")); int playerDeaths = DObjUtil.toInteger(allPlayerData.get("player_deaths")); Long playerLastLogin = (Long) allPlayerData.get("player_lastlogin"); // Update main player table DMySQL.runQuery("UPDATE " + DMySQL.player_table + " SET player_characters='" + playerChars + "',player_kills=" + playerKills + ",player_deaths=" + playerDeaths + ",player_lastlogin=" + playerLastLogin + " WHERE player_id=" + playerID + ";"); // Save miscellaneous player data DMySQL.runQuery("DELETE FROM " + DMySQL.playerdata_table + " WHERE player_id=" + playerID + ";"); for(Entry<String, Object> playerData : allPlayerData.entrySet()) if(!playerData.getKey().contains("player_")) DMySQL.runQuery("INSERT INTO " + DMySQL.playerdata_table + " (player_id, datakey, datavalue) VALUES(" + playerID + ",'" + playerData.getKey() + "','" + playerData.getValue() + "');"); // Save their character-specific data now HashMap<Integer, HashMap<String, Object>> playerCharData = DDataUtil.getAllPlayerChars(player); for(Entry<Integer, HashMap<String, Object>> playerChar : playerCharData.entrySet()) { // Define character-specific variables int charID = playerChar.getKey(); boolean charImmortal = DObjUtil.toBoolean(playerCharData.get(charID).get("char_immortal")); int charHP = DObjUtil.toInteger(playerCharData.get(charID).get("char_hp")); float charExp = DObjUtil.toFloat(playerCharData.get(charID).get("char_exp")); int charFavor = DObjUtil.toInteger(playerCharData.get(charID).get("char_favor")); int charDevotion = DObjUtil.toInteger(playerCharData.get(charID).get("char_devotion")); int charAscensions = DObjUtil.toInteger(playerCharData.get(charID).get("char_ascensions")); Double charLastX = (Double) playerCharData.get(charID).get("char_lastx"); Double charLastY = (Double) playerCharData.get(charID).get("char_lasty"); Double charLastZ = (Double) playerCharData.get(charID).get("char_lastz"); String charLastW = (String) playerCharData.get(charID).get("char_lastw"); // Update main character table DMySQL.runQuery("UPDATE " + DMySQL.character_table + " SET char_immortal=" + charImmortal + ",char_hp=" + charHP + ",char_exp=" + charExp + ",char_favor=" + charFavor + ",char_devotion=" + charDevotion + ",char_ascensions=" + charAscensions + ",char_lastX=" + charLastX + ",char_lastY=" + charLastY + ",char_lastZ=" + charLastZ + ",char_lastW='" + charLastW + "' WHERE char_id=" + charID + ";"); // Save miscellaneous character data HashMap<String, Object> charData = playerChar.getValue(); DMySQL.runQuery("DELETE FROM " + DMySQL.chardata_table + " WHERE char_id=" + charID + ";"); for(Entry<String, Object> character : charData.entrySet()) if(!character.getKey().contains("char_")) DMySQL.runQuery("INSERT INTO " + DMySQL.chardata_table + " (char_id, datakey, datavalue) VALUES(" + charID + ",'" + character.getKey() + "','" + character.getValue() + "');"); } return true; } else if(DConfig.getSettingBoolean("sqlite")) { // TODO: SQLite } return false; } /* * savePluginData() : Saves all HashMap data for the plugin to the database. */ public static boolean savePlugin() { if(DConfig.getSettingBoolean("mysql") && DMySQL.checkConnection()) { // Clear tables first DMySQL.runQuery("TRUNCATE TABLE " + DMySQL.plugindata_table + ";"); // Save their player-specific data HashMap<String, HashMap<String, Object>> allPluginData = DDataUtil.getAllPluginData(); // Save data for(Entry<String, HashMap<String, Object>> pluginData : allPluginData.entrySet()) { String dataID = pluginData.getKey(); for(Entry<String, Object> data : pluginData.getValue().entrySet()) if(!pluginData.getKey().contains("temp_")) { String dataKey = data.getKey(); Object dataValue = data.getValue(); DMySQL.runQuery("INSERT INTO " + DMySQL.plugindata_table + " (data_id, datakey, datavalue) VALUES('" + dataID + "','" + dataKey + "','" + dataValue + "');"); } } return true; } else if(DConfig.getSettingBoolean("sqlite")) { // TODO: SQLite } return false; } /* * removePlayer() : Removes the player completely from the database. */ public static boolean removePlayer(OfflinePlayer player) { if(DConfig.getSettingBoolean("mysql") && DMySQL.checkConnection()) { int playerID = DPlayerUtil.getPlayerID(player); DMySQL.runQuery("DELETE FROM " + DMySQL.player_table + " WHERE player_id=" + playerID + ";"); DMySQL.runQuery("DELETE FROM " + DMySQL.playerdata_table + " WHERE player_id=" + playerID + ";"); DMySQL.runQuery("DELETE FROM " + DMySQL.character_table + " WHERE player_id=" + playerID + ";"); return true; } else if(DConfig.getSettingBoolean("sqlite")) { // TODO: SQLite } return false; } /* * removeChar() : Removes the character completely from the database. */ public static boolean removeChar(int charID) { if(DConfig.getSettingBoolean("mysql") && DMySQL.checkConnection()) { DMySQL.runQuery("DELETE FROM " + DMySQL.character_table + " WHERE char_id=" + charID + ";"); DMySQL.runQuery("DELETE FROM " + DMySQL.chardata_table + " WHERE char_id=" + charID + ";"); return true; } else if(DConfig.getSettingBoolean("sqlite")) { // TODO: SQLite } return false; } }
src/com/legit2/Demigods/DDatabase.java
package com.legit2.Demigods; import java.sql.ResultSet; import java.sql.SQLException; import java.util.HashMap; import java.util.Map.Entry; import org.bukkit.OfflinePlayer; import org.bukkit.entity.Player; import com.legit2.Demigods.Utilities.DCharUtil; import com.legit2.Demigods.Utilities.DDataUtil; import com.legit2.Demigods.Utilities.DObjUtil; import com.legit2.Demigods.Utilities.DPlayerUtil; import com.legit2.Demigods.Utilities.DUtil; public class DDatabase { /* * initializeDatabase() : Loads the MySQL or SQLite database. */ public static void initializeDatabase() { // Check if MySQL is enabled in the configuration and if so, attempts to connect. if(DConfig.getSettingBoolean("mysql")) { DMySQL.createConnection(); DMySQL.initializeMySQL(); loadAllData(); } else if(DConfig.getSettingBoolean("sqlite")) { // TODO: SQLite } } /* * uninitializeDatabase() : Unloads the MySQL or SQLite database. */ public static void uninitializeDatabase() { saveAllData(); if(DConfig.getSettingBoolean("mysql") && DMySQL.checkConnection()) { DMySQL.uninitializeMySQL(); } else if(DConfig.getSettingBoolean("sqlite")) { // TODO: SQLite } } /* * addPlayerToDB() : Adds the player to the database. */ public static void addPlayerToDB(OfflinePlayer player) throws SQLException { // Define variables Long firstLoginTime = System.currentTimeMillis(); // Next we add them to the Database if needed if(DConfig.getSettingBoolean("mysql") && DMySQL.checkConnection()) { int playerID = DPlayerUtil.getPlayerID(player); String playerName = player.getName(); String addQuery = "INSERT INTO " + DMySQL.player_table + " (player_id, player_name, player_characters, player_kills, player_deaths, player_firstlogin, player_lastlogin) VALUES (" + playerID + ",'" + playerName + "', NULL, 0, 0," + firstLoginTime + "," + firstLoginTime +");"; DMySQL.runQuery(addQuery); } else if(DConfig.getSettingBoolean("sqlite")) { // TODO: SQLite } } /* * removePlayerFromDB() : Removes the player from the database. */ public static void removePlayerFromDB(OfflinePlayer player) throws SQLException { // Next we add them to the Database if needed if(DConfig.getSettingBoolean("mysql") && DMySQL.checkConnection()) { // TODO: Remove player from MySQL } else if(DConfig.getSettingBoolean("sqlite")) { // TODO: SQLite } } /* * addPlayerToDB() : Adds the player to the database. */ public static void addCharToDB(OfflinePlayer player, int charID) throws SQLException { // Next we add them to the Database if needed if(DConfig.getSettingBoolean("mysql") && DMySQL.checkConnection()) { int playerID = DPlayerUtil.getPlayerID(player); boolean charActive = DCharUtil.isActive(charID); String charName = DCharUtil.getName(charID); String charDeity = DCharUtil.getDeity(charID); String charAlliance = DCharUtil.getAlliance(charID); boolean charImmortal = DCharUtil.getImmortal(charID); int charHP = DCharUtil.getHP(charID); float charExp = DCharUtil.getExp(charID); int charFavor = DCharUtil.getFavor(charID); int charDevotion = DCharUtil.getDevotion(charID); int charAscensions = DCharUtil.getAscensions(charID); double charLastX = 0.0; double charLastY = 0.0; double charLastZ = 0.0; String charLastW = ""; String addQuery = "INSERT INTO " + DMySQL.character_table + "(char_id,player_id,char_active,char_name,char_deity,char_alliance,char_immortal,char_hp,char_exp,char_favor,char_devotion,char_ascensions,char_lastX,char_lastY,char_lastZ,char_lastW)" + "VALUES (" + charID + "," + playerID + "," + charActive + "," + "'" + charName + "'," + "'" + charDeity + "'," + "'" + charAlliance + "'," + charImmortal + "," + charHP + "," + charExp + "," + charFavor + "," + charDevotion + "," + charAscensions + "," + charLastX + "," + charLastY + "," + charLastZ + "," + "'" + charLastW + "'" + ");"; DMySQL.runQuery(addQuery); } else if(DConfig.getSettingBoolean("sqlite")) { // TODO: SQLite } } /* * getPlayerInfo() : Grabs the player info from MySQL/FlatFile and returns (ResultSet)result. */ public static ResultSet getPlayerInfo(String username) throws SQLException { if(DConfig.getSettingBoolean("mysql") && DMySQL.checkConnection()) { // TODO: Return player info from MySQL } else if(DConfig.getSettingBoolean("sqlite")) { // TODO: SQLite } return null; } /* * loadAllData() : Loads all data from database into HashMaps. */ public static void loadAllData() { if(DConfig.getSettingBoolean("mysql") && DMySQL.checkConnection()) { DUtil.info("Loading Demigods data..."); // Define variables int playerCount = 0; int characterCount = 0; long startStopwatch = System.currentTimeMillis(); // Define SELECT queries String selectPlayer = "SELECT * FROM " + DMySQL.player_table + " LEFT JOIN " + DMySQL.playerdata_table + " ON " + DMySQL.player_table + ".player_id = " + DMySQL.playerdata_table + ".player_id;"; ResultSet playerResult = DMySQL.runQuery(selectPlayer); try { while(playerResult.next()) { playerCount++; OfflinePlayer player = DPlayerUtil.definePlayer(playerResult.getString("player_name")); int playerID = playerResult.getInt("player_id"); // Load the main player data DDataUtil.addPlayer(player, playerID); DDataUtil.savePlayerData(player, "player_id", playerResult.getString("player_id")); DDataUtil.savePlayerData(player, "player_characters", playerResult.getString("player_characters")); DDataUtil.savePlayerData(player, "player_kills", playerResult.getInt("player_kills")); DDataUtil.savePlayerData(player, "player_deaths", playerResult.getInt("player_deaths")); DDataUtil.savePlayerData(player, "player_firstlogin", playerResult.getLong("player_firstlogin")); // Load other player data if(playerResult.getString("datakey") != null) { if(playerResult.getString("datakey").contains("boolean_")) { DDataUtil.savePlayerData(player, playerResult.getString("datakey"), playerResult.getBoolean("datavalue")); } else { DDataUtil.savePlayerData(player, playerResult.getString("datakey"), playerResult.getString("datavalue")); } } String selectCharacter = "SELECT * FROM " + DMySQL.character_table + " LEFT JOIN " + DMySQL.chardata_table + " ON " + DMySQL.character_table + ".char_id = " + DMySQL.chardata_table + ".char_id AND " + DMySQL.character_table + ".player_id=" + playerID + ";"; ResultSet charResult = DMySQL.runQuery(selectCharacter); while(charResult.next()) { characterCount++; int charID = charResult.getInt("char_id"); // Load the main character data DDataUtil.addChar(charID); DDataUtil.saveCharData(charID, "char_owner", charResult.getString("player_id")); DDataUtil.saveCharData(charID, "char_name", charResult.getString("char_name")); DDataUtil.saveCharData(charID, "char_active", charResult.getString("char_active")); DDataUtil.saveCharData(charID, "char_deity", charResult.getString("char_deity")); DDataUtil.saveCharData(charID, "char_alliance", charResult.getString("char_alliance")); DDataUtil.saveCharData(charID, "char_immortal", charResult.getBoolean("char_immortal")); DDataUtil.saveCharData(charID, "char_hp", charResult.getInt("char_hp")); DDataUtil.saveCharData(charID, "char_exp", charResult.getInt("char_exp")); DDataUtil.saveCharData(charID, "char_lastX", charResult.getDouble("char_lastX")); DDataUtil.saveCharData(charID, "char_lastY", charResult.getDouble("char_lastY")); DDataUtil.saveCharData(charID, "char_lastZ", charResult.getDouble("char_lastZ")); DDataUtil.saveCharData(charID, "char_lastW", charResult.getString("char_lastW")); DDataUtil.saveCharData(charID, "char_favor", charResult.getInt("char_favor")); DDataUtil.saveCharData(charID, "char_devotion", charResult.getInt("char_devotion")); DDataUtil.saveCharData(charID, "char_ascensions", charResult.getInt("char_ascensions")); // Load other character data if(charResult.getString("datakey") != null) { if(charResult.getString("datakey").contains("boolean_")) { DDataUtil.saveCharData(charID, charResult.getString("datakey"), charResult.getBoolean("datavalue")); } else { DDataUtil.saveCharData(charID, charResult.getString("datakey"), charResult.getString("datavalue")); } } } } } catch(SQLException e) { // There was an error with the SQL. DUtil.severe("Error while loading Demigods data. (ERR: 1001)"); e.printStackTrace(); } // Stop the timer long stopStopwatch = System.currentTimeMillis(); double totalTime = (double) (stopStopwatch - startStopwatch); // Send data load success message if(DConfig.getSettingBoolean("data_debug")) DUtil.info("Loaded data for " + playerCount + " players and " + characterCount + " characters in " + totalTime/1000 + " seconds."); else DUtil.info("Loaded data for " + playerCount + " players and " + characterCount + " characters."); } else if(DConfig.getSettingBoolean("sqlite")) { // TODO: SQLite } } /* * saveAllData() : Saves all HashMap data to database. */ public static boolean saveAllData() { if(DConfig.getSettingBoolean("mysql") && DMySQL.checkConnection()) { // Define variables int playerCount = 0; long startTimer = System.currentTimeMillis(); // Save plugin-specific data savePlugin(); long stopTimer = System.currentTimeMillis(); double totalTime = (double) (stopTimer - startTimer); if(DConfig.getSettingBoolean("data_debug")) DUtil.info("Demigods plugin data saved in " + totalTime/1000 + " seconds."); else DUtil.info("Demigods plugin data saved."); for(Player player : DUtil.getOnlinePlayers()) { if(savePlayer(player)) playerCount++; } // Stop the timer stopTimer = System.currentTimeMillis(); totalTime = (double) (stopTimer - startTimer); // Send save success message if(DConfig.getSettingBoolean("data_debug")) DUtil.info("Success! Saved " + playerCount + " of " + DMySQL.getRows(DMySQL.runQuery("SELECT * FROM " + DMySQL.player_table + ";")) + " players in " + totalTime/1000 + " seconds."); else DUtil.info("Success! Saved " + playerCount + " of " + DMySQL.getRows(DMySQL.runQuery("SELECT * FROM " + DMySQL.player_table + ";")) + " players."); return true; } else if(DConfig.getSettingBoolean("sqlite")) { // TODO: SQLite } return false; } /* * savePlayerData() : Saves all HashMap data for (OfflinePlayer)player to database. */ public static boolean savePlayer(OfflinePlayer player) { if(DConfig.getSettingBoolean("mysql") && DMySQL.checkConnection()) { int playerID = DPlayerUtil.getPlayerID(player); // Clear tables first DMySQL.runQuery("DELETE FROM " + DMySQL.playerdata_table + " WHERE player_id=" + playerID); // Save their player-specific data HashMap<String, Object> allPlayerData = DDataUtil.getAllPlayerData(player); // Define player-specific variables String playerChars = (String) allPlayerData.get("player_characters"); int playerKills = DObjUtil.toInteger(allPlayerData.get("player_kills")); int playerDeaths = DObjUtil.toInteger(allPlayerData.get("player_deaths")); Long playerLastLogin = (Long) allPlayerData.get("player_lastlogin"); // Update main player table DMySQL.runQuery("UPDATE " + DMySQL.player_table + " SET player_characters='" + playerChars + "',player_kills=" + playerKills + ",player_deaths=" + playerDeaths + ",player_lastlogin=" + playerLastLogin + " WHERE player_id=" + playerID + ";"); // Save miscellaneous player data DMySQL.runQuery("DELETE FROM " + DMySQL.playerdata_table + " WHERE player_id=" + playerID + ";"); for(Entry<String, Object> playerData : allPlayerData.entrySet()) if(!playerData.getKey().contains("player_")) DMySQL.runQuery("INSERT INTO " + DMySQL.playerdata_table + " (player_id, datakey, datavalue) VALUES(" + playerID + ",'" + playerData.getKey() + "','" + playerData.getValue() + "');"); // Save their character-specific data now HashMap<Integer, HashMap<String, Object>> playerCharData = DDataUtil.getAllPlayerChars(player); for(Entry<Integer, HashMap<String, Object>> playerChar : playerCharData.entrySet()) { // Define character-specific variables int charID = playerChar.getKey(); boolean charImmortal = DObjUtil.toBoolean(playerCharData.get(charID).get("char_immortal")); int charHP = DObjUtil.toInteger(playerCharData.get(charID).get("char_hp")); float charExp = DObjUtil.toFloat(playerCharData.get(charID).get("char_exp")); int charFavor = DObjUtil.toInteger(playerCharData.get(charID).get("char_favor")); int charDevotion = DObjUtil.toInteger(playerCharData.get(charID).get("char_devotion")); int charAscensions = DObjUtil.toInteger(playerCharData.get(charID).get("char_ascensions")); Double charLastX = (Double) playerCharData.get(charID).get("char_lastx"); Double charLastY = (Double) playerCharData.get(charID).get("char_lasty"); Double charLastZ = (Double) playerCharData.get(charID).get("char_lastz"); String charLastW = (String) playerCharData.get(charID).get("char_lastw"); // Update main character table DMySQL.runQuery("UPDATE " + DMySQL.character_table + " SET char_immortal=" + charImmortal + ",char_hp=" + charHP + ",char_exp=" + charExp + ",char_favor=" + charFavor + ",char_devotion=" + charDevotion + ",char_ascensions=" + charAscensions + ",char_lastX=" + charLastX + ",char_lastY=" + charLastY + ",char_lastZ=" + charLastZ + ",char_lastW='" + charLastW + "' WHERE char_id=" + charID + ";"); // Save miscellaneous character data HashMap<String, Object> charData = playerChar.getValue(); DMySQL.runQuery("DELETE FROM " + DMySQL.chardata_table + " WHERE char_id=" + charID + ";"); for(Entry<String, Object> character : charData.entrySet()) if(!character.getKey().contains("char_")) DMySQL.runQuery("INSERT INTO " + DMySQL.chardata_table + " (char_id, datakey, datavalue) VALUES(" + charID + ",'" + character.getKey() + "','" + character.getValue() + "');"); } return true; } else if(DConfig.getSettingBoolean("sqlite")) { // TODO: SQLite } return false; } /* * savePluginData() : Saves all HashMap data for the plugin to the database. */ public static boolean savePlugin() { if(DConfig.getSettingBoolean("mysql") && DMySQL.checkConnection()) { // Clear tables first DMySQL.runQuery("TRUNCATE TABLE " + DMySQL.plugindata_table + ";"); // Save their player-specific data HashMap<String, HashMap<String, Object>> allPluginData = DDataUtil.getAllPluginData(); // Save data for(Entry<String, HashMap<String, Object>> pluginData : allPluginData.entrySet()) { String dataID = pluginData.getKey(); for(Entry<String, Object> data : pluginData.getValue().entrySet()) if(!pluginData.getKey().contains("temp_")) { String dataKey = data.getKey(); Object dataValue = data.getValue(); DMySQL.runQuery("INSERT INTO " + DMySQL.plugindata_table + " (data_id, datakey, datavalue) VALUES('" + dataID + "','" + dataKey + "','" + dataValue + "');"); } } return true; } else if(DConfig.getSettingBoolean("sqlite")) { // TODO: SQLite } return false; } /* * removePlayer() : Removes the player completely from the database. */ public static boolean removePlayer(OfflinePlayer player) { if(DConfig.getSettingBoolean("mysql") && DMySQL.checkConnection()) { int playerID = DPlayerUtil.getPlayerID(player); DMySQL.runQuery("DELETE FROM " + DMySQL.player_table + " WHERE player_id=" + playerID + ";"); DMySQL.runQuery("DELETE FROM " + DMySQL.playerdata_table + " WHERE player_id=" + playerID + ";"); DMySQL.runQuery("DELETE FROM " + DMySQL.character_table + " WHERE player_id=" + playerID + ";"); return true; } else if(DConfig.getSettingBoolean("sqlite")) { // TODO: SQLite } return false; } /* * removeChar() : Removes the character completely from the database. */ public static boolean removeChar(int charID) { if(DConfig.getSettingBoolean("mysql") && DMySQL.checkConnection()) { DMySQL.runQuery("DELETE FROM " + DMySQL.character_table + " WHERE char_id=" + charID + ";"); DMySQL.runQuery("DELETE FROM " + DMySQL.chardata_table + " WHERE char_id=" + charID + ";"); return true; } else if(DConfig.getSettingBoolean("sqlite")) { // TODO: SQLite } return false; } }
ANOTHER BUG FIX.
src/com/legit2/Demigods/DDatabase.java
ANOTHER BUG FIX.
Java
mit
38e51e409dbf7498d1f92c531d10c6a99061ba0e
0
tripu/validator,takenspc/validator,tripu/validator,takenspc/validator,validator/validator,YOTOV-LIMITED/validator,validator/validator,YOTOV-LIMITED/validator,validator/validator,sammuelyee/validator,tripu/validator,sammuelyee/validator,YOTOV-LIMITED/validator,sammuelyee/validator,takenspc/validator,takenspc/validator,sammuelyee/validator,sammuelyee/validator,takenspc/validator,YOTOV-LIMITED/validator,validator/validator,validator/validator,tripu/validator,YOTOV-LIMITED/validator,tripu/validator
/* * Copyright (c) 2005 Henri Sivonen * Copyright (c) 2007-2012 Mozilla Foundation * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. */ package nu.validator.servlet; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import nu.validator.messages.MessageEmitterAdapter; import nu.validator.xml.PrudentHttpEntityResolver; import org.apache.log4j.Logger; /** * @version $Id$ * @author hsivonen */ public class VerifierServlet extends HttpServlet { /** * */ private static final long serialVersionUID = 7811043632732680935L; private static final Logger log4j = Logger.getLogger(VerifierServlet.class); static final String GENERIC_HOST = System.getProperty("nu.validator.servlet.host.generic", ""); static final String HTML5_HOST = System.getProperty("nu.validator.servlet.host.html5", ""); static final String PARSETREE_HOST = System.getProperty("nu.validator.servlet.host.parsetree", ""); static final String GENERIC_PATH = System.getProperty("nu.validator.servlet.path.generic", "/"); static final String HTML5_PATH = System.getProperty("nu.validator.servlet.path.html5", "/html5/"); static final String PARSETREE_PATH = System.getProperty("nu.validator.servlet.path.parsetree", "/parsetree/"); static final boolean W3C_BRANDING = "1".equals(System.getProperty("nu.validator.servlet.w3cbranding")); private static final byte[] GENERIC_ROBOTS_TXT; private static final byte[] HTML5_ROBOTS_TXT; private static final byte[] PARSETREE_ROBOTS_TXT; private static final byte[] STYLE_CSS; private static final byte[] SCRIPT_JS; private static final byte[] ICON_PNG; private static final byte[] W3C_PNG; private static final byte[] VNU_PNG; private static final byte[] HTML_PNG; private static final byte[] ABOUT_HTML; static { String aboutPath = System.getProperty( "nu.validator.servlet.path.about", "./validator/site/"); try { GENERIC_ROBOTS_TXT = buildRobotsTxt(GENERIC_HOST, GENERIC_PATH, HTML5_HOST, HTML5_PATH, PARSETREE_HOST, PARSETREE_PATH); HTML5_ROBOTS_TXT = buildRobotsTxt(HTML5_HOST, HTML5_PATH, GENERIC_HOST, GENERIC_PATH, PARSETREE_HOST, PARSETREE_PATH); PARSETREE_ROBOTS_TXT = buildRobotsTxt(PARSETREE_HOST, PARSETREE_PATH, HTML5_HOST, HTML5_PATH, GENERIC_HOST, GENERIC_PATH); } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); } try { STYLE_CSS = readFileFromPathIntoByteArray(aboutPath + "style.css"); SCRIPT_JS = readFileFromPathIntoByteArray(aboutPath + "script.js"); ICON_PNG = readFileFromPathIntoByteArray(aboutPath + "icon.png"); if (W3C_BRANDING) { W3C_PNG = readFileFromPathIntoByteArray(aboutPath + "w3c.png"); VNU_PNG = readFileFromPathIntoByteArray(aboutPath + "vnu.png"); HTML_PNG = readFileFromPathIntoByteArray(aboutPath + "html.png"); ABOUT_HTML = readFileFromPathIntoByteArray(aboutPath + "about.html"); } else { W3C_PNG = VNU_PNG = HTML_PNG = ABOUT_HTML = null; } } catch (IOException e) { throw new RuntimeException(e); } PrudentHttpEntityResolver.setParams( Integer.parseInt(System.getProperty("nu.validator.servlet.connection-timeout","5000")), Integer.parseInt(System.getProperty("nu.validator.servlet.socket-timeout","5000")), 100); PrudentHttpEntityResolver.setUserAgent("Validator.nu/LV"); // force some class loading new VerifierServletTransaction(null, null); new MessageEmitterAdapter(null, false, null, 0, null); } /** * @return * @throws UnsupportedEncodingException */ private static byte[] buildRobotsTxt(String primaryHost, String primaryPath, String secondaryHost, String secondaryPath, String tertiaryHost, String tertiaryPath) throws UnsupportedEncodingException { StringBuilder builder = new StringBuilder(); builder.append("User-agent: *\nDisallow: "); builder.append(primaryPath); builder.append("?\n"); if (primaryHost.equals(secondaryHost)) { builder.append("Disallow: "); builder.append(secondaryPath); builder.append("?\n"); } if (primaryHost.equals(tertiaryHost)) { builder.append("Disallow: "); builder.append(tertiaryPath); builder.append("?\n"); } return builder.toString().getBytes("UTF-8"); } private static byte[] readFileFromPathIntoByteArray(String path) throws IOException { File file = new File(path); byte[] buffer = new byte[(int) file.length()]; InputStream ios = null; try { ios = new FileInputStream(file); if (ios.read(buffer) != buffer.length) { throw new IOException( "Unexpected end of file reached while reading " + path); } } finally { try { if (ios != null) { ios.close(); } } catch (IOException e) { throw new RuntimeException(e); } } return buffer; } private void writeResponse(byte[] buffer, String type, HttpServletResponse response) throws IOException { try { response.setContentType(type); response.setContentLength(buffer.length); response.setDateHeader("Expires", System.currentTimeMillis() + 43200000); // 12 hours OutputStream out = response.getOutputStream(); out.write(buffer); out.flush(); out.close(); } catch (IOException e) { throw new RuntimeException(e); } return; } /** * @see javax.servlet.http.HttpServlet#doGet(javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse) */ @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { if ("/robots.txt".equals(request.getPathInfo())) { String serverName = request.getServerName(); byte[] robotsTxt = null; if (hostMatch(GENERIC_HOST, serverName)) { robotsTxt = GENERIC_ROBOTS_TXT; } else if (hostMatch(HTML5_HOST, serverName)) { robotsTxt = HTML5_ROBOTS_TXT; } else if (hostMatch(PARSETREE_HOST, serverName)) { robotsTxt = PARSETREE_ROBOTS_TXT; } else { response.sendError(HttpServletResponse.SC_NOT_FOUND); return; } writeResponse(robotsTxt, "text/plain; charset=utf-8", response); return; } else if ("/style.css".equals(request.getPathInfo())) { writeResponse(STYLE_CSS, "text/css; charset=utf-8", response); return; } else if ("/script.js".equals(request.getPathInfo())) { writeResponse(SCRIPT_JS, "text/javascript; charset=utf-8", response); return; } else if ("/icon.png".equals(request.getPathInfo())) { writeResponse(ICON_PNG, "image/png", response); return; } else if (W3C_BRANDING && "/w3c.png".equals(request.getPathInfo())) { writeResponse(W3C_PNG, "image/png", response); return; } else if (W3C_BRANDING && "/vnu.png".equals(request.getPathInfo())) { writeResponse(VNU_PNG, "image/png", response); return; } else if (W3C_BRANDING && "/html.png".equals(request.getPathInfo())) { writeResponse(HTML_PNG, "image/png", response); return; } else if (W3C_BRANDING && "/about.html".equals(request.getPathInfo())) { writeResponse(ABOUT_HTML, "text/html; charset=utf-8", response); return; } doPost(request, response); } private boolean hostMatch(String reference, String host) { if ("".equals(reference)) { return true; } else { // XXX case-sensitivity return reference.equalsIgnoreCase(host); } } /** * @see javax.servlet.http.HttpServlet#doOptions(javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse) */ @Override protected void doOptions(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String pathInfo = request.getPathInfo(); if ("*".equals(pathInfo)) { // useless RFC 2616 complication return; } else if ("/robots.txt".equals(pathInfo)) { String serverName = request.getServerName(); if (hostMatch(GENERIC_HOST, serverName) || hostMatch(HTML5_HOST, serverName) || hostMatch(PARSETREE_HOST, serverName)) { sendGetOnlyOptions(request, response); return; } else { response.sendError(HttpServletResponse.SC_NOT_FOUND); return; } } doPost(request, response); } /** * @see javax.servlet.http.HttpServlet#doTrace(javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse) */ @Override protected void doTrace(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { response.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED); } /** * @see javax.servlet.http.HttpServlet#doPost(javax.servlet.http.HttpServletRequest, * javax.servlet.http.HttpServletResponse) */ protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String pathInfo = request.getPathInfo(); if (pathInfo == null) { pathInfo = "/"; // Fix for Jigsaw } String serverName = request.getServerName(); if ("/robots.txt".equals(pathInfo)) { // if we get here, we've got a POST response.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED); return; } log4j.debug("pathInfo: " + pathInfo); log4j.debug("serverName: " + serverName); boolean isOptions = "OPTIONS".equals(request.getMethod()); if ("validator.nu".equals(serverName) && "/html5/".equals(pathInfo)) { response.setStatus(HttpServletResponse.SC_MOVED_PERMANENTLY); String queryString = request.getQueryString(); response.setHeader("Location", "http://html5.validator.nu/" + (queryString == null ? "" : "?" + queryString)); } else if (hostMatch(GENERIC_HOST, serverName) && GENERIC_PATH.equals(pathInfo)) { response.setHeader("Access-Control-Allow-Origin", "*"); if (isOptions) { response.setHeader("Access-Control-Policy-Path", GENERIC_PATH); sendOptions(request, response); } else { new VerifierServletTransaction(request, response).service(); } } else if (hostMatch(HTML5_HOST, serverName) && HTML5_PATH.equals(pathInfo)) { response.setHeader("Access-Control-Allow-Origin", "*"); if (isOptions) { sendOptions(request, response); } else { new Html5ConformanceCheckerTransaction(request, response).service(); } } else if (hostMatch(PARSETREE_HOST, serverName) && PARSETREE_PATH.equals(pathInfo)) { if (isOptions) { sendGetOnlyOptions(request, response); } else { new ParseTreePrinter(request, response).service(); } } else { response.sendError(HttpServletResponse.SC_NOT_FOUND); } } private void sendGetOnlyOptions(HttpServletRequest request, HttpServletResponse response) { response.setHeader("Allow", "GET, HEAD, OPTIONS"); response.setHeader("Access-Control-Allow-Methods", "GET, HEAD, POST, OPTIONS"); response.setContentType("application/octet-stream"); response.setContentLength(0); } private void sendOptions(HttpServletRequest request, HttpServletResponse response) { response.setHeader("Access-Control-Max-Age", "43200"); // 12 hours response.setHeader("Allow", "GET, HEAD, POST, OPTIONS"); response.setHeader("Access-Control-Allow-Methods", "GET, HEAD, POST, OPTIONS"); response.setContentType("application/octet-stream"); response.setContentLength(0); } }
src/nu/validator/servlet/VerifierServlet.java
/* * Copyright (c) 2005 Henri Sivonen * Copyright (c) 2007-2008 Mozilla Foundation * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. */ package nu.validator.servlet; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import nu.validator.messages.MessageEmitterAdapter; import nu.validator.xml.PrudentHttpEntityResolver; import org.apache.log4j.Logger; /** * @version $Id$ * @author hsivonen */ public class VerifierServlet extends HttpServlet { /** * */ private static final long serialVersionUID = 7811043632732680935L; private static final Logger log4j = Logger.getLogger(VerifierServlet.class); static final String GENERIC_HOST = System.getProperty("nu.validator.servlet.host.generic", ""); static final String HTML5_HOST = System.getProperty("nu.validator.servlet.host.html5", ""); static final String PARSETREE_HOST = System.getProperty("nu.validator.servlet.host.parsetree", ""); static final String GENERIC_PATH = System.getProperty("nu.validator.servlet.path.generic", "/"); static final String HTML5_PATH = System.getProperty("nu.validator.servlet.path.html5", "/html5/"); static final String PARSETREE_PATH = System.getProperty("nu.validator.servlet.path.parsetree", "/parsetree/"); static final boolean W3C_BRANDING = "1".equals(System.getProperty("nu.validator.servlet.w3cbranding")); private static final byte[] GENERIC_ROBOTS_TXT; private static final byte[] HTML5_ROBOTS_TXT; private static final byte[] PARSETREE_ROBOTS_TXT; private static final byte[] STYLE_CSS; private static final byte[] SCRIPT_JS; private static final byte[] ICON_PNG; private static final byte[] W3C_PNG; private static final byte[] VNU_PNG; private static final byte[] HTML_PNG; private static final byte[] ABOUT_HTML; static { String aboutPath = System.getProperty( "nu.validator.servlet.path.about", "./validator/site/"); try { GENERIC_ROBOTS_TXT = buildRobotsTxt(GENERIC_HOST, GENERIC_PATH, HTML5_HOST, HTML5_PATH, PARSETREE_HOST, PARSETREE_PATH); HTML5_ROBOTS_TXT = buildRobotsTxt(HTML5_HOST, HTML5_PATH, GENERIC_HOST, GENERIC_PATH, PARSETREE_HOST, PARSETREE_PATH); PARSETREE_ROBOTS_TXT = buildRobotsTxt(PARSETREE_HOST, PARSETREE_PATH, HTML5_HOST, HTML5_PATH, GENERIC_HOST, GENERIC_PATH); } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); } try { STYLE_CSS = readFileFromPathIntoByteArray(aboutPath + "style.css"); SCRIPT_JS = readFileFromPathIntoByteArray(aboutPath + "script.js"); ICON_PNG = readFileFromPathIntoByteArray(aboutPath + "icon.png"); if (W3C_BRANDING) { W3C_PNG = readFileFromPathIntoByteArray(aboutPath + "w3c.png"); VNU_PNG = readFileFromPathIntoByteArray(aboutPath + "vnu.png"); HTML_PNG = readFileFromPathIntoByteArray(aboutPath + "html.png"); ABOUT_HTML = readFileFromPathIntoByteArray(aboutPath + "about.html"); } else { W3C_PNG = VNU_PNG = HTML_PNG = ABOUT_HTML = null; } } catch (IOException e) { throw new RuntimeException(e); } PrudentHttpEntityResolver.setParams( Integer.parseInt(System.getProperty("nu.validator.servlet.connection-timeout","5000")), Integer.parseInt(System.getProperty("nu.validator.servlet.socket-timeout","5000")), 100); PrudentHttpEntityResolver.setUserAgent("Validator.nu/LV"); // force some class loading new VerifierServletTransaction(null, null); new MessageEmitterAdapter(null, false, null, 0, null); } /** * @return * @throws UnsupportedEncodingException */ private static byte[] buildRobotsTxt(String primaryHost, String primaryPath, String secondaryHost, String secondaryPath, String tertiaryHost, String tertiaryPath) throws UnsupportedEncodingException { StringBuilder builder = new StringBuilder(); builder.append("User-agent: *\nDisallow: "); builder.append(primaryPath); builder.append("?\n"); if (primaryHost.equals(secondaryHost)) { builder.append("Disallow: "); builder.append(secondaryPath); builder.append("?\n"); } if (primaryHost.equals(tertiaryHost)) { builder.append("Disallow: "); builder.append(tertiaryPath); builder.append("?\n"); } return builder.toString().getBytes("UTF-8"); } private static byte[] readFileFromPathIntoByteArray(String path) throws IOException { File file = new File(path); byte[] buffer = new byte[(int) file.length()]; InputStream ios = null; try { ios = new FileInputStream(file); if (ios.read(buffer) != buffer.length) { throw new IOException( "Unexpected end of file reached while reading " + path); } } finally { try { if (ios != null) { ios.close(); } } catch (IOException e) { throw new RuntimeException(e); } } return buffer; } private void writeResponse(byte[] buffer, String type, HttpServletResponse response) throws IOException { try { response.setContentType(type); response.setContentLength(buffer.length); response.setDateHeader("Expires", System.currentTimeMillis() + 43200000); // 12 hours OutputStream out = response.getOutputStream(); out.write(buffer); out.flush(); out.close(); } catch (IOException e) { throw new RuntimeException(e); } return; } /** * @see javax.servlet.http.HttpServlet#doGet(javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse) */ @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { if ("/robots.txt".equals(request.getPathInfo())) { String serverName = request.getServerName(); byte[] robotsTxt = null; if (hostMatch(GENERIC_HOST, serverName)) { robotsTxt = GENERIC_ROBOTS_TXT; } else if (hostMatch(HTML5_HOST, serverName)) { robotsTxt = HTML5_ROBOTS_TXT; } else if (hostMatch(PARSETREE_HOST, serverName)) { robotsTxt = PARSETREE_ROBOTS_TXT; } else { response.sendError(HttpServletResponse.SC_NOT_FOUND); return; } writeResponse(robotsTxt, "text/plain; charset=utf-8", response); return; } else if ("/style.css".equals(request.getPathInfo())) { writeResponse(STYLE_CSS, "text/css; charset=utf-8", response); return; } else if ("/script.js".equals(request.getPathInfo())) { writeResponse(SCRIPT_JS, "text/javascript charset=utf-8", response); return; } else if ("/icon.png".equals(request.getPathInfo())) { writeResponse(ICON_PNG, "image/png", response); return; } else if (W3C_BRANDING && "/w3c.png".equals(request.getPathInfo())) { writeResponse(W3C_PNG, "image/png", response); return; } else if (W3C_BRANDING && "/vnu.png".equals(request.getPathInfo())) { writeResponse(VNU_PNG, "image/png", response); return; } else if (W3C_BRANDING && "/html.png".equals(request.getPathInfo())) { writeResponse(HTML_PNG, "image/png", response); return; } else if (W3C_BRANDING && "/about.html".equals(request.getPathInfo())) { writeResponse(ABOUT_HTML, "text/html; charset=utf-8", response); return; } doPost(request, response); } private boolean hostMatch(String reference, String host) { if ("".equals(reference)) { return true; } else { // XXX case-sensitivity return reference.equalsIgnoreCase(host); } } /** * @see javax.servlet.http.HttpServlet#doOptions(javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse) */ @Override protected void doOptions(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String pathInfo = request.getPathInfo(); if ("*".equals(pathInfo)) { // useless RFC 2616 complication return; } else if ("/robots.txt".equals(pathInfo)) { String serverName = request.getServerName(); if (hostMatch(GENERIC_HOST, serverName) || hostMatch(HTML5_HOST, serverName) || hostMatch(PARSETREE_HOST, serverName)) { sendGetOnlyOptions(request, response); return; } else { response.sendError(HttpServletResponse.SC_NOT_FOUND); return; } } doPost(request, response); } /** * @see javax.servlet.http.HttpServlet#doTrace(javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse) */ @Override protected void doTrace(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { response.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED); } /** * @see javax.servlet.http.HttpServlet#doPost(javax.servlet.http.HttpServletRequest, * javax.servlet.http.HttpServletResponse) */ protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String pathInfo = request.getPathInfo(); if (pathInfo == null) { pathInfo = "/"; // Fix for Jigsaw } String serverName = request.getServerName(); if ("/robots.txt".equals(pathInfo)) { // if we get here, we've got a POST response.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED); return; } log4j.debug("pathInfo: " + pathInfo); log4j.debug("serverName: " + serverName); boolean isOptions = "OPTIONS".equals(request.getMethod()); if ("validator.nu".equals(serverName) && "/html5/".equals(pathInfo)) { response.setStatus(HttpServletResponse.SC_MOVED_PERMANENTLY); String queryString = request.getQueryString(); response.setHeader("Location", "http://html5.validator.nu/" + (queryString == null ? "" : "?" + queryString)); } else if (hostMatch(GENERIC_HOST, serverName) && GENERIC_PATH.equals(pathInfo)) { response.setHeader("Access-Control-Allow-Origin", "*"); if (isOptions) { response.setHeader("Access-Control-Policy-Path", GENERIC_PATH); sendOptions(request, response); } else { new VerifierServletTransaction(request, response).service(); } } else if (hostMatch(HTML5_HOST, serverName) && HTML5_PATH.equals(pathInfo)) { response.setHeader("Access-Control-Allow-Origin", "*"); if (isOptions) { sendOptions(request, response); } else { new Html5ConformanceCheckerTransaction(request, response).service(); } } else if (hostMatch(PARSETREE_HOST, serverName) && PARSETREE_PATH.equals(pathInfo)) { if (isOptions) { sendGetOnlyOptions(request, response); } else { new ParseTreePrinter(request, response).service(); } } else { response.sendError(HttpServletResponse.SC_NOT_FOUND); } } private void sendGetOnlyOptions(HttpServletRequest request, HttpServletResponse response) { response.setHeader("Allow", "GET, HEAD, OPTIONS"); response.setHeader("Access-Control-Allow-Methods", "GET, HEAD, POST, OPTIONS"); response.setContentType("application/octet-stream"); response.setContentLength(0); } private void sendOptions(HttpServletRequest request, HttpServletResponse response) { response.setHeader("Access-Control-Max-Age", "43200"); // 12 hours response.setHeader("Allow", "GET, HEAD, POST, OPTIONS"); response.setHeader("Access-Control-Allow-Methods", "GET, HEAD, POST, OPTIONS"); response.setContentType("application/octet-stream"); response.setContentLength(0); } }
Fixed typo and updated copyright date.
src/nu/validator/servlet/VerifierServlet.java
Fixed typo and updated copyright date.
Java
mit
7898f22db3b41754ee7ad97c29d79d32cb766fc0
0
frc2399/2015-code
package org.usfirst.frc.team2399.robot; import org.usfirst.frc.team2399.robot.commands.DriveAutoZone; //import org.usfirst.frc.team2399.robot.OI; import org.usfirst.frc.team2399.robot.subsystems.DriveTrain; import org.usfirst.frc.team2399.robot.subsystems.Elevator; import edu.wpi.first.wpilibj.DigitalInput; import edu.wpi.first.wpilibj.Gyro; import edu.wpi.first.wpilibj.IterativeRobot; import edu.wpi.first.wpilibj.Joystick; import edu.wpi.first.wpilibj.buttons.Button; import edu.wpi.first.wpilibj.command.Command; import edu.wpi.first.wpilibj.command.Scheduler; import edu.wpi.first.wpilibj.command.Subsystem; import edu.wpi.first.wpilibj.livewindow.LiveWindow; import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; /** * The VM is configured to automatically run this class, and to call the * functions corresponding to each mode, as described in the IterativeRobot * documentation. If you change the name of this class or the package after * creating this project, you must also update the manifest file in the resource * directory. */ // THIS CLASS HAS REPLACED COMMANDBASE/COMMANDS public class Robot extends IterativeRobot { // established static variables public static OI oi; public static DriveTrain driveTrain; // public static Button reduceSpeedButt; public static Elevator elevatorFront; public static Elevator elevatorRear; public static Joystick joystick; // established contact switches public static DigitalInput contactSwitchOne = new DigitalInput( RobotMap.CONTACT_SWITCH1ID); public static DigitalInput contactSwitchTwo = new DigitalInput( RobotMap.CONTACT_SWITCH2ID); public static DigitalInput contactSwitchThree = new DigitalInput( RobotMap.CONTACT_SWITCH3ID); public static DigitalInput contactSwitchFour = new DigitalInput( RobotMap.CONTACT_SWITCH4ID); private Command autoncommand; /** * This function is run when the robot is first started up and should be * used for any initialization code. */ // if you want to take out an auton mode, comment out autoncommand = new public void robotInit() { // established new instances of drivetrain, elevator, OI and an // autonomus command driveTrain = new DriveTrain(); elevatorFront = new Elevator(RobotMap.ELEVATORFRONT_JAGUARID); elevatorRear = new Elevator(RobotMap.ELEVATORREAR_JAGUARID); oi = new OI(); autoncommand = new DriveAutoZone(); // smartdashboard values for drivetrain and elevator SmartDashboard.putData("Drive Train", driveTrain); SmartDashboard.putData("Front Elevator", elevatorFront); SmartDashboard.putData("Rear Elevator", elevatorRear); // instantiate the command used for the autonomous period } // established wait command for later use private void WaitCommmand(double d) { } // When Contact switches are pushed for at least 0.005 seconds, they will // show up as Pressed on SmartDashboard. // public void reduceSpeedButt() { // if (reduceSpeedButt.get() == true){ // WaitCommmand(0.005); // x = .5 * x; // y = .5 * y; // twist = .5 * twist; // driveTrain.driveFieldOriented(x, y, twist); // } // } public void contactSwitchOne() { if (contactSwitchOne.get() == true) { WaitCommmand(0.005); SmartDashboard.putBoolean("Contact Limit Switch One Pressed", contactSwitchOne.get()); } } public void contactSwitchTwo() { if (contactSwitchTwo.get() == true) { WaitCommmand(0.005); SmartDashboard.putBoolean("Contact Limit Switch Two Pressed", contactSwitchTwo.get()); } } public void contactSwitchThree() { if (contactSwitchThree.get() == true) { WaitCommmand(0.005); SmartDashboard.putBoolean("Contact Limit Switch Three Pressed", contactSwitchThree.get()); } } public void contactSwitchFour() { if (contactSwitchFour.get() == true) { WaitCommmand(0.005); SmartDashboard.putBoolean("Contact Switch Four Pressed", contactSwitchFour.get()); } } // made it so Contact Switches would return true when pressed protected boolean contactSwitchOneReturnTrue() { if (contactSwitchOne.get() == true) { return true; } else { return false; } } protected boolean contactSwitchTwoReturnTrue() { if (contactSwitchTwo.get() == true) { return true; } else { return false; } } protected boolean contactSwitchThreeReturnTrue() { if (contactSwitchThree.get() == true) { return true; } else { return false; } } protected boolean contactSwitchFourReturnTrue() { if (contactSwitchFour.get() == true) { return true; } else { return false; } } // TODO figure out what this is so we can write a better comment public void disabledPeriodic() { Scheduler.getInstance().run(); } // schedule the autonomous command (example) public void autonomousInit() { if (autoncommand != null) { autoncommand.start(); } } /** * This function is called periodically during autonomous */ public void autonomousPeriodic() { Scheduler.getInstance().run(); } public void teleopInit() { // This makes sure that the autonomous stops running when // teleop starts running. If you want the autonomous to // continue until interrupted by another command, remove // this line or comment it out. } /** * This function is called when the disabled button is hit. You can use it * to reset subsystems before shutting down. */ public void disabledInit() { } /** * This function is called periodically during operator control */ public void teleopPeriodic() { Scheduler.getInstance().run(); } /** * This function is called periodically during test mode */ public void testPeriodic() { LiveWindow.run(); } }
src/org/usfirst/frc/team2399/robot/Robot.java
package org.usfirst.frc.team2399.robot; import org.usfirst.frc.team2399.robot.commands.DriveAutoZone; //import org.usfirst.frc.team2399.robot.OI; import org.usfirst.frc.team2399.robot.subsystems.DriveTrain; import org.usfirst.frc.team2399.robot.subsystems.Elevator; import edu.wpi.first.wpilibj.DigitalInput; import edu.wpi.first.wpilibj.Gyro; import edu.wpi.first.wpilibj.IterativeRobot; import edu.wpi.first.wpilibj.Joystick; import edu.wpi.first.wpilibj.buttons.Button; import edu.wpi.first.wpilibj.command.Command; import edu.wpi.first.wpilibj.command.Scheduler; import edu.wpi.first.wpilibj.command.Subsystem; import edu.wpi.first.wpilibj.livewindow.LiveWindow; import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; /** * The VM is configured to automatically run this class, and to call the * functions corresponding to each mode, as described in the IterativeRobot * documentation. If you change the name of this class or the package after * creating this project, you must also update the manifest file in the resource * directory. */ // THIS CLASS HAS REPLACED COMMANDBASE/COMMANDS public class Robot extends IterativeRobot { // established static variables public static OI oi; public static DriveTrain driveTrain; // public static Button reduceSpeedButt; public static Elevator elevatorFront; public static Elevator elevatorRear; public static Joystick joystick; // established contact switches public static DigitalInput contactSwitchOne = new DigitalInput( RobotMap.CONTACT_SWITCH1ID); public static DigitalInput contactSwitchTwo = new DigitalInput( RobotMap.CONTACT_SWITCH2ID); public static DigitalInput contactSwitchThree = new DigitalInput( RobotMap.CONTACT_SWITCH3ID); public static DigitalInput contactSwitchFour = new DigitalInput( RobotMap.CONTACT_SWITCH4ID); private Command autoncommand; /** * This function is run when the robot is first started up and should be * used for any initialization code. */ // if you want to take out an auton mode, comment out autoncommand = new public void robotInit() { // established new instances of drivetrain, elevator, OI and an // autonomus command driveTrain = new DriveTrain(); elevatorFront = new Elevator(RobotMap.ELEVATORFRONT_JAGUARID); elevatorRear = new Elevator(RobotMap.ELEVATORREAR_JAGUARID); oi = new OI(); autoncommand = new DriveAutoZone(); // smartdashboard values for drivetrain and elevator SmartDashboard.putData("Drive Train", driveTrain); SmartDashboard.putData("Elevator", elevatorFront); SmartDashboard.putData("Elevator", elevatorRear); // instantiate the command used for the autonomous period } // established wait command for later use private void WaitCommmand(double d) { } // When Contact switches are pushed for at least 0.005 seconds, they will // show up as Pressed on SmartDashboard. // public void reduceSpeedButt() { // if (reduceSpeedButt.get() == true){ // WaitCommmand(0.005); // x = .5 * x; // y = .5 * y; // twist = .5 * twist; // driveTrain.driveFieldOriented(x, y, twist); // } // } public void contactSwitchOne() { if (contactSwitchOne.get() == true) { WaitCommmand(0.005); SmartDashboard.putBoolean("Contact Limit Switch One Pressed", contactSwitchOne.get()); } } public void contactSwitchTwo() { if (contactSwitchTwo.get() == true) { WaitCommmand(0.005); SmartDashboard.putBoolean("Contact Limit Switch Two Pressed", contactSwitchTwo.get()); } } public void contactSwitchThree() { if (contactSwitchThree.get() == true) { WaitCommmand(0.005); SmartDashboard.putBoolean("Contact Limit Switch Three Pressed", contactSwitchThree.get()); } } public void contactSwitchFour() { if (contactSwitchFour.get() == true) { WaitCommmand(0.005); SmartDashboard.putBoolean("Contact Switch Four Pressed", contactSwitchFour.get()); } } // made it so Contact Switches would return true when pressed protected boolean contactSwitchOneReturnTrue() { if (contactSwitchOne.get() == true) { return true; } else { return false; } } protected boolean contactSwitchTwoReturnTrue() { if (contactSwitchTwo.get() == true) { return true; } else { return false; } } protected boolean contactSwitchThreeReturnTrue() { if (contactSwitchThree.get() == true) { return true; } else { return false; } } protected boolean contactSwitchFourReturnTrue() { if (contactSwitchFour.get() == true) { return true; } else { return false; } } // TODO figure out what this is so we can write a better comment public void disabledPeriodic() { Scheduler.getInstance().run(); } // schedule the autonomous command (example) public void autonomousInit() { if (autoncommand != null) { autoncommand.start(); } } /** * This function is called periodically during autonomous */ public void autonomousPeriodic() { Scheduler.getInstance().run(); } public void teleopInit() { // This makes sure that the autonomous stops running when // teleop starts running. If you want the autonomous to // continue until interrupted by another command, remove // this line or comment it out. } /** * This function is called when the disabled button is hit. You can use it * to reset subsystems before shutting down. */ public void disabledInit() { } /** * This function is called periodically during operator control */ public void teleopPeriodic() { Scheduler.getInstance().run(); } /** * This function is called periodically during test mode */ public void testPeriodic() { LiveWindow.run(); } }
Changed Elevator Names for SmartDashboard
src/org/usfirst/frc/team2399/robot/Robot.java
Changed Elevator Names for SmartDashboard
Java
mit
a0047cc1f82e099effbbb04bd6049e7e460b2381
0
tariq1890/tacoco,tariq1890/tacoco,tariq1890/tacoco,spideruci/tacoco,tariq1890/tacoco,spideruci/tacoco
package org.spideruci.tacoco; import java.io.File; import java.io.IOException; import java.lang.reflect.Method; import java.net.URI; import java.net.URL; import java.net.URLClassLoader; import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.SimpleFileVisitor; import java.nio.file.attribute.BasicFileAttributes; import java.util.ArrayList; import org.junit.runner.JUnitCore; public final class TacocoRunner { public static void main(String[] args) { // int sleepLength = 2000; // System.out.println("Testing Thread.sleep()"); // System.out.println("Going to sleep for "+sleepLength+" ms."); // try // { // Thread.sleep(sleepLength); // } // catch(InterruptedException e) // { // System.out.println(e.getMessage()); // } // System.out.println("Done sleeping."); try { addPath(args[0]); } catch (Exception e1) { // TODO Auto-generated catch block e1.printStackTrace(); } JUnitCore core = new JUnitCore(); core.addListener(new TacocoListener()); for(String testClass : getClasses(args[0])){ try { core.run(Class.forName(testClass)); } catch (ClassNotFoundException e) { e.printStackTrace(); } } System.out.println(System.getProperty("java.class.path")); } public static void addPath(String s) throws Exception { File f = new File(s); URI u = f.toURI(); URLClassLoader urlClassLoader = (URLClassLoader) ClassLoader.getSystemClassLoader(); Class<URLClassLoader> urlClass = URLClassLoader.class; Method method = urlClass.getDeclaredMethod("addURL", new Class[]{URL.class}); method.setAccessible(true); method.invoke(urlClassLoader, new Object[]{u.toURL()}); } public static ArrayList<String> getClasses(final String p){ final ArrayList<String> ret = new ArrayList<String>(); try { Files.walkFileTree(Paths.get(p), new SimpleFileVisitor<Path>() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { String str = file.toString(); if(str.endsWith(".class") && !str.matches("(.*)\\$(.*)")) { System.out.println(str.replaceAll(p.endsWith("/")?p:p+"/","").replace('/','.').replaceAll("\\.class","")); ret.add(str.replaceAll(p.endsWith("/")?p:p+"/","").replace('/','.').replaceAll("\\.class","")); } return FileVisitResult.CONTINUE; } }); } catch (IOException e) { e.printStackTrace(); } return ret; } }
src/main/java/org/spideruci/tacoco/TacocoRunner.java
package org.spideruci.tacoco; import java.io.File; import java.io.IOException; import java.lang.reflect.Method; import java.net.URI; import java.net.URL; import java.net.URLClassLoader; import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.SimpleFileVisitor; import java.nio.file.attribute.BasicFileAttributes; import java.util.ArrayList; import org.junit.runner.JUnitCore; public final class TacocoRunner { public static void main(String[] args) { // int sleepLength = 2000; // System.out.println("Testing Thread.sleep()"); // System.out.println("Going to sleep for "+sleepLength+" ms."); // try // { // Thread.sleep(sleepLength); // } // catch(InterruptedException e) // { // System.out.println(e.getMessage()); // } // System.out.println("Done sleeping."); try { addPath(args[0]); } catch (Exception e1) { // TODO Auto-generated catch block e1.printStackTrace(); } JUnitCore core = new JUnitCore(); core.addListener(new TacocoListener()); for(String testClass : getClasses(args[0])){ try { core.run(Class.forName(testClass)); } catch (ClassNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } } System.out.println(System.getProperty("java.class.path")); } public static void addPath(String s) throws Exception { File f = new File(s); URI u = f.toURI(); URLClassLoader urlClassLoader = (URLClassLoader) ClassLoader.getSystemClassLoader(); Class<URLClassLoader> urlClass = URLClassLoader.class; Method method = urlClass.getDeclaredMethod("addURL", new Class[]{URL.class}); method.setAccessible(true); method.invoke(urlClassLoader, new Object[]{u.toURL()}); } public static ArrayList<String> getClasses(final String p){ final ArrayList<String> ret = new ArrayList<String>(); try { Files.walkFileTree(Paths.get(p), new SimpleFileVisitor<Path>() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { String str = file.toString(); if(str.endsWith(".class") && !str.matches("(.*)\\$(.*)")) { System.out.println(str.replaceAll(p.endsWith("/")?p:p+"/","").replace('/','.').replaceAll("\\.class","")); ret.add(str.replaceAll(p.endsWith("/")?p:p+"/","").replace('/','.').replaceAll("\\.class","")); } return FileVisitResult.CONTINUE; } }); } catch (IOException e) { e.printStackTrace(); } return ret; } }
bug fix
src/main/java/org/spideruci/tacoco/TacocoRunner.java
bug fix
Java
mit
368c8a894a4b7031a21f1c6a7541fa2627be88f8
0
university-information-system/uis,university-information-system/uis,university-information-system/uis,university-information-system/uis
package at.ac.tuwien.inso.service; import at.ac.tuwien.inso.entity.*; import org.springframework.security.access.prepost.PreAuthorize; import java.util.*; public interface StudyPlanService { /** * creates a new study plan * may throw a ValidationException if study plans name, or optional, mandatory or freechoice ects values are null or empty or <=0 * * @param studyPlan * @return */ @PreAuthorize("hasRole('ADMIN')") StudyPlan create(StudyPlan studyPlan); /** * returns a list of all StudyPlans. * user must be authorized * @return */ @PreAuthorize("isAuthenticated()") List<StudyPlan> findAll(); /** * returns the StudyPlan with the corresponding id * may throw a BusinessObjectNotFoundException if there is no StudyPlan with this id * @param id should not be null and not <1 * @return */ @PreAuthorize("isAuthenticated()") StudyPlan findOne(Long id); /** * try to find all SubjectsForStudyPlan by a study plan id. * should be ordered by semester recommendation * user must be authorized * * @param id. should not be null and not <1 * @return */ @PreAuthorize("isAuthenticated()") List<SubjectForStudyPlan> getSubjectsForStudyPlan(Long id); /** * * returns a list of grades for the subjects for the CURRENTLY LOGGED IN STUDENT. * user needs to be authenticated * @param id. should not be null and not <1 * @return */ @PreAuthorize("isAuthenticated()") List<SubjectWithGrade> getSubjectsWithGradesForStudyPlan(Long id); /** * adds a subject to a study plan. * user needs to be ADMIN * * @param subjectForStudyPlan. should contain a subject that is not null and has a id that is not <1. also should contain a study plan that is not null and has an id that is not null and not <1 */ @PreAuthorize("hasRole('ADMIN')") void addSubjectToStudyPlan(SubjectForStudyPlan subjectForStudyPlan); /** * returns all available subjects for the study plan with the id. the subjects can be filtered with the query string * the search strategy of the query should be byNameContainingIgnoreCase(query) * * user has to be authenticated * * @param id. should not be null and not <1 * @param query * @return */ @PreAuthorize("isAuthenticated()") List<Subject> getAvailableSubjectsForStudyPlan(Long id, String query); /** * disables the study plan of the given id. * * user needs role ADMIN * may throw BusinessObjectNotFoundException if the study plan with this id does not exists * may throw a ValidationException if the id is not correct * * @param id. should not be null and not <1 */ @PreAuthorize("hasRole('ADMIN')") StudyPlan disableStudyPlan(Long id); /** * removes a given subject s from the study plan sp * user need role ADMIN * * @author m.pazourek * @param sp should not be null and the sp.id should not be <1 and not null * @param s should have an id */ @PreAuthorize("hasRole('ADMIN')") void removeSubjectFromStudyPlan(StudyPlan sp, Subject s); }
src/main/java/at/ac/tuwien/inso/service/StudyPlanService.java
package at.ac.tuwien.inso.service; import at.ac.tuwien.inso.entity.*; import org.springframework.security.access.prepost.PreAuthorize; import java.util.*; public interface StudyPlanService { /** * creates a new study plan * may throw a ValidationException if study plans name, or optional, mandatory or freechoice ects values are null or empty or <=0 * * @param studyPlan * @return */ @PreAuthorize("hasRole('ADMIN')") StudyPlan create(StudyPlan studyPlan); /** * returns a list of all StudyPlans. * user must be authorized * @return */ @PreAuthorize("isAuthenticated()") List<StudyPlan> findAll(); /** * returns the StudyPlan with the corresponding id * may throw a BusinessObjectNotFoundException if there is no StudyPlan with this id * @param id should not be null and not <1 * @return */ @PreAuthorize("isAuthenticated()") StudyPlan findOne(Long id); /** * try to find all SubjectsForStudyPlan by a study plan id. * should be ordered by semester recommendation * user must be authorized * * @param id. should not be null and not <1 * @return */ @PreAuthorize("isAuthenticated()") List<SubjectForStudyPlan> getSubjectsForStudyPlan(Long id); /** * * @param id * @return */ @PreAuthorize("isAuthenticated()") List<SubjectWithGrade> getSubjectsWithGradesForStudyPlan(Long id); @PreAuthorize("hasRole('ADMIN')") void addSubjectToStudyPlan(SubjectForStudyPlan subjectForStudyPlan); /** * returns all available subjects for the study plan with the id. the subjects can be filtered with the query string * the search strategy of the query should be byNameContainingIgnoreCase(query) * * user has to be authenticated * * @param id. should not be null and not <1 * @param query * @return */ @PreAuthorize("isAuthenticated()") List<Subject> getAvailableSubjectsForStudyPlan(Long id, String query); /** * disables the study plan of the given id. * * user needs role ADMIN * may throw BusinessObjectNotFoundException if the study plan with this id does not exists * may throw a ValidationException if the id is not correct * * @param id. should not be null and not <1 */ @PreAuthorize("hasRole('ADMIN')") StudyPlan disableStudyPlan(Long id); /** * removes a given subject s from the study plan sp * user need role ADMIN * * @author m.pazourek * @param sp should not be null and the sp.id should not be <1 and not null * @param s should have an id */ @PreAuthorize("hasRole('ADMIN')") void removeSubjectFromStudyPlan(StudyPlan sp, Subject s); }
added java doc
src/main/java/at/ac/tuwien/inso/service/StudyPlanService.java
added java doc
Java
epl-1.0
4968059ce9dfe6a576075226e0ec18fb99137094
0
Charling-Huang/birt,sguan-actuate/birt,sguan-actuate/birt,rrimmana/birt-1,rrimmana/birt-1,sguan-actuate/birt,Charling-Huang/birt,rrimmana/birt-1,sguan-actuate/birt,rrimmana/birt-1,sguan-actuate/birt,Charling-Huang/birt,rrimmana/birt-1,Charling-Huang/birt,Charling-Huang/birt
/******************************************************************************* * Copyright (c) 2004 Actuate Corporation. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Actuate Corporation - initial API and implementation *******************************************************************************/ package org.eclipse.birt.report.designer.internal.ui.dnd; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.eclipse.birt.report.designer.core.model.SessionHandleAdapter; import org.eclipse.birt.report.designer.core.model.schematic.HandleAdapterFactory; import org.eclipse.birt.report.designer.core.model.schematic.ListBandProxy; import org.eclipse.birt.report.designer.core.model.schematic.TableHandleAdapter; import org.eclipse.birt.report.designer.core.model.views.data.DataSetItemModel; import org.eclipse.birt.report.designer.internal.ui.editors.schematic.editparts.ReportElementEditPart; import org.eclipse.birt.report.designer.internal.ui.util.DataSetManager; import org.eclipse.birt.report.designer.internal.ui.util.ExceptionHandler; import org.eclipse.birt.report.designer.ui.newelement.DesignElementFactory; import org.eclipse.birt.report.designer.util.DEUtil; import org.eclipse.birt.report.designer.util.DNDUtil; import org.eclipse.birt.report.model.api.CellHandle; import org.eclipse.birt.report.model.api.DataItemHandle; import org.eclipse.birt.report.model.api.DataSetHandle; import org.eclipse.birt.report.model.api.DesignElementHandle; import org.eclipse.birt.report.model.api.FreeFormHandle; import org.eclipse.birt.report.model.api.GridHandle; import org.eclipse.birt.report.model.api.GroupHandle; import org.eclipse.birt.report.model.api.LabelHandle; import org.eclipse.birt.report.model.api.ListHandle; import org.eclipse.birt.report.model.api.ModuleHandle; import org.eclipse.birt.report.model.api.ReportItemHandle; import org.eclipse.birt.report.model.api.RowHandle; import org.eclipse.birt.report.model.api.ScalarParameterHandle; import org.eclipse.birt.report.model.api.SlotHandle; import org.eclipse.birt.report.model.api.TableGroupHandle; import org.eclipse.birt.report.model.api.TableHandle; import org.eclipse.birt.report.model.api.activity.SemanticException; import org.eclipse.birt.report.model.api.elements.DesignChoiceConstants; import org.eclipse.birt.report.model.api.elements.ReportDesignConstants; import org.eclipse.gef.EditPart; import org.eclipse.jface.util.Assert; import org.eclipse.jface.viewers.ISelection; import org.eclipse.jface.viewers.IStructuredSelection; import org.eclipse.jface.viewers.StructuredSelection; /** * Utility for creation from data view to layout */ public class InsertInLayoutUtil { /** * Rule interface for defining insertion rule */ abstract static interface InsertInLayoutRule { public boolean canInsert( ); public Object getInsertPosition( ); public void insert( Object object ) throws SemanticException; } /** * * Rule for inserting label after inserting data set column */ static class LabelAddRule implements InsertInLayoutRule { private Object container; private CellHandle newTarget; public LabelAddRule( Object container ) { this.container = container; } /* * (non-Javadoc) * * @see org.eclipse.birt.report.designer.internal.ui.views.actions.InsertInLayoutAction.InsertInLayoutRule#canInsert() */ public boolean canInsert( ) { if ( container instanceof SlotHandle ) { container = ( (SlotHandle) container ).getElementHandle( ); } if ( !( container instanceof CellHandle ) ) return false; CellHandle cell = (CellHandle) container; // Validates source position of data item boolean canInsert = false; if ( cell.getContainer( ).getContainer( ) instanceof TableGroupHandle ) { canInsert = true; } else { if ( cell.getContainer( ).getContainerSlotHandle( ).getSlotID( ) == TableHandle.DETAIL_SLOT ) { canInsert = true; } } // Validates column count and gets the target if ( canInsert ) { TableHandle table = null; if ( cell.getContainer( ).getContainer( ) instanceof TableHandle ) { table = (TableHandle) cell.getContainer( ).getContainer( ); } else { table = (TableHandle) cell.getContainer( ) .getContainer( ) .getContainer( ); } SlotHandle header = table.getHeader( ); if ( header != null && header.getCount( ) > 0 ) { int columnNum = HandleAdapterFactory.getInstance( ) .getCellHandleAdapter( cell ) .getColumnNumber( ); newTarget = (CellHandle) HandleAdapterFactory.getInstance( ) .getTableHandleAdapter( table ) .getCell( 1, columnNum, false ); return newTarget != null && newTarget.getContent( ).getCount( ) == 0; } } return false; } /** * Returns new Label insert position in form of <code>CellHandle</code> */ public Object getInsertPosition( ) { return newTarget; } /* * (non-Javadoc) * * @see org.eclipse.birt.report.designer.internal.ui.dnd.InsertInLayoutUtil.InsertInLayoutRule#insert() */ public void insert( Object object ) throws SemanticException { Assert.isTrue( object instanceof DesignElementHandle ); newTarget.addElement( (DesignElementHandle) object, CellHandle.CONTENT_SLOT ); } } /** * * Rule for inserting multiple data into table, and populating adjacent * cells */ static class MultiItemsExpandRule implements InsertInLayoutRule { private Object[] items; private Object target; private int focusIndex = 0; public MultiItemsExpandRule( Object[] items, Object target ) { this.items = items; this.target = target; } /* * (non-Javadoc) * * @see org.eclipse.birt.report.designer.internal.ui.views.actions.InsertInLayoutAction.InsertInLayoutRule#canInsert() */ public boolean canInsert( ) { return items != null && items.length > 1 && target != null && ( target instanceof DesignElementHandle || target instanceof ListBandProxy ); } /** * * Returns multiple insert positions in form of array */ public Object getInsertPosition( ) { Object[] positions = new Object[items.length]; if ( target instanceof CellHandle ) { CellHandle firstCell = (CellHandle) target; TableHandleAdapter tableAdapter = HandleAdapterFactory.getInstance( ) .getTableHandleAdapter( getTableHandle( firstCell ) ); int currentColumn = HandleAdapterFactory.getInstance( ) .getCellHandleAdapter( firstCell ) .getColumnNumber( ); int currentRow = HandleAdapterFactory.getInstance( ) .getCellHandleAdapter( firstCell ) .getRowNumber( ); int columnDiff = currentColumn + items.length - tableAdapter.getColumnCount( ) - 1; // Insert columns if table can not contain all items if ( columnDiff > 0 ) { int insertColumn = tableAdapter.getColumnCount( ); try { tableAdapter.insertColumns( columnDiff, insertColumn ); } catch ( SemanticException e ) { ExceptionHandler.handle( e ); return null; } } for ( int i = 0; i < positions.length; i++ ) { positions[i] = tableAdapter.getCell( currentRow, currentColumn++ ); } focusIndex = 0; } else { for ( int i = 0; i < positions.length; i++ ) { positions[i] = target; } focusIndex = items.length - 1; } return positions; } protected TableHandle getTableHandle( CellHandle firstCell ) { DesignElementHandle tableContainer = firstCell.getContainer( ) .getContainer( ); if ( tableContainer instanceof TableHandle ) { return (TableHandle) tableContainer; } return (TableHandle) tableContainer.getContainer( ); } /** * Returns the index of the focus element in the items */ public int getFocusIndex( ) { return focusIndex; } /* * (non-Javadoc) * * @see org.eclipse.birt.report.designer.internal.ui.dnd.InsertInLayoutUtil.InsertInLayoutRule#insert() */ public void insert( Object object ) throws SemanticException { // TODO Auto-generated method stub } } /** * * Rule for setting key when inserting data set column to group handle */ static class GroupKeySetRule implements InsertInLayoutRule { private Object container; private DataSetItemModel dataSetColumn; public GroupKeySetRule( Object container, DataSetItemModel dataSetColumn ) { this.container = container; this.dataSetColumn = dataSetColumn; } /* * (non-Javadoc) * * @see org.eclipse.birt.report.designer.internal.ui.dnd.InsertInLayoutUtil.InsertInLayoutRule#canInsert() */ public boolean canInsert( ) { return getGroupContainer( container ) != null && getGroupHandle( container ).getKeyExpr( ) == null && ( getGroupContainer( container ).getDataSet( ) == getDataSetHandle( dataSetColumn ) || getGroupContainer( container ).getDataSet( ) == null ); } /* * (non-Javadoc) * * @see org.eclipse.birt.report.designer.internal.ui.dnd.InsertInLayoutUtil.InsertInLayoutRule#getInsertPosition() */ public Object getInsertPosition( ) { return null; } /* * (non-Javadoc) * * @see org.eclipse.birt.report.designer.internal.ui.dnd.InsertInLayoutUtil.InsertInLayoutRule#insert(java.lang.Object) */ public void insert( Object object ) throws SemanticException { Assert.isTrue( object instanceof DataSetItemModel ); Assert.isTrue( object == dataSetColumn || object == null ); getGroupContainer( container ).setDataSet( getDataSetHandle( dataSetColumn ) ); getGroupHandle( container ).setKeyExpr( DEUtil.getExpression( dataSetColumn ) ); } protected DataSetHandle getDataSetHandle( DataSetItemModel model ) { return (DataSetHandle) model.getParent( ); } protected GroupHandle getGroupHandle( Object target ) { DesignElementHandle handle = null; if ( target instanceof CellHandle ) { handle = ( (CellHandle) target ).getContainer( ).getContainer( ); } else if ( target instanceof ListBandProxy ) { handle = ( (ListBandProxy) target ).getElemtHandle( ); } if ( handle instanceof GroupHandle ) { return (GroupHandle) handle; } return null; } protected ReportItemHandle getGroupContainer( Object target ) { GroupHandle group = getGroupHandle( target ); if ( group != null && group.getContainer( ) instanceof ReportItemHandle ) return (ReportItemHandle) group.getContainer( ); return null; } } /** * Creates a object to insert. * * @param insertObj * object insert to layout * @param target * insert target, like cell or ListBandProxy * @param targetParent * insert target's non-dummy container, like table or list * @return new object in layout * @throws SemanticException */ public static DesignElementHandle performInsert( Object insertObj, Object target, Object targetParent ) throws SemanticException { Assert.isNotNull( insertObj ); Assert.isNotNull( target ); if ( insertObj instanceof DataSetHandle ) { return performInsertDataSet( (DataSetHandle) insertObj ); } else if ( insertObj instanceof DataSetItemModel ) { return performInsertDataSetColumn( (DataSetItemModel) insertObj, target, targetParent ); } else if ( insertObj instanceof ScalarParameterHandle ) { return performInsertParameter( (ScalarParameterHandle) insertObj ); } else if ( insertObj instanceof String ) { // Such as invalid group key return performInsertString( (String) insertObj, target ); } else if ( insertObj instanceof Object[] ) { return performMultiInsert( (Object[]) insertObj, target, targetParent ); } else if ( insertObj instanceof IStructuredSelection ) { return performMultiInsert( ( (IStructuredSelection) insertObj ).toArray( ), target, targetParent ); } return null; } /** * Creates a object, "Add" operation to layout needs to handle later. * <p> * Must make sure operation legal before execution. * </p> * * @param insertObj * object insert to layout * @param editPart * target EditPart * @return new object in layout * @throws SemanticException */ public static DesignElementHandle performInsert( Object insertObj, EditPart editPart ) throws SemanticException { Assert.isNotNull( insertObj ); Assert.isNotNull( editPart ); return performInsert( insertObj, editPart.getModel( ), editPart.getParent( ).getModel( ) ); } /** * Creates multiple objects * * @param array * multiple creation source * @param target * @param targetParent * @return first creation in layout * @throws SemanticException */ protected static DesignElementHandle performMultiInsert( Object[] array, Object target, Object targetParent ) throws SemanticException { DesignElementHandle result = null; MultiItemsExpandRule rule = new MultiItemsExpandRule( array, target ); if ( rule.canInsert( ) ) { Object[] positions = (Object[]) rule.getInsertPosition( ); if ( positions != null ) { for ( int i = 0; i < array.length; i++ ) { DesignElementHandle newObj = performInsert( array[i], positions[i], targetParent ); if ( i == rule.getFocusIndex( ) ) { result = newObj; } else { DNDUtil.addElementHandle( positions[i], newObj ); } } } } else if ( array.length != 0 ) { result = performInsert( array[0], target, targetParent ); } return result; } protected static DataItemHandle performInsertParameter( ScalarParameterHandle model ) throws SemanticException { // DataItemHandle dataHandle = SessionHandleAdapter.getInstance( ) // .getReportDesignHandle( ) // .getElementFactory( ) // .newDataItem( null ); DataItemHandle dataHandle = DesignElementFactory.getInstance( ) .newDataItem( null ); dataHandle.setValueExpr( DEUtil.getExpression( model ) ); return dataHandle; } /** * Inserts dataset column into the target. Add label or group key if * possible * * @param model * column item * @param target * insert target like cell or ListBandProxy * @param targetParent * target container like table or list * @return to be inserted data item * @throws SemanticException */ protected static DataItemHandle performInsertDataSetColumn( DataSetItemModel model, Object target, Object targetParent ) throws SemanticException { // DataItemHandle dataHandle = SessionHandleAdapter.getInstance( ) // .getReportDesignHandle( ) // .getElementFactory( ) // .newDataItem( null ); DataItemHandle dataHandle = DesignElementFactory.getInstance( ) .newDataItem( null ); DataSetHandle dataSet = (DataSetHandle) model.getParent( ); dataHandle.setValueExpr( DEUtil.getExpression( model ) ); if ( targetParent instanceof ReportItemHandle ) { ReportItemHandle container = (ReportItemHandle) targetParent; if ( !DEUtil.getDataSetList( container ).contains( dataSet ) ) { if ( container.getDataSet( ) == null ) { container.setDataSet( dataSet ); } } } else { // Adds dataset to the single query in the top level of the report dataHandle.setDataSet( dataSet ); } InsertInLayoutRule rule = new LabelAddRule( target ); if ( rule.canInsert( ) ) { // LabelHandle label = SessionHandleAdapter.getInstance( ) // .getReportDesignHandle( ) // .getElementFactory( ) // .newLabel( null ); LabelHandle label = DesignElementFactory.getInstance( ) .newLabel( null ); label.setText( model.getDisplayName( ) ); rule.insert( label ); } rule = new GroupKeySetRule( target, model ); if ( rule.canInsert( ) ) { rule.insert( model ); } return dataHandle; } /** * Inserts invalid column string into the target. Add label if possible * * @param expression * invalid column or other expression * @param target * insert target like cell or ListBandProxy * @return to be inserted data item * @throws SemanticException */ protected static DesignElementHandle performInsertString( String expression, Object target ) throws SemanticException { // DataItemHandle dataHandle = SessionHandleAdapter.getInstance( ) // .getReportDesignHandle( ) // .getElementFactory( ) // .newDataItem( null ); DataItemHandle dataHandle = DesignElementFactory.getInstance( ) .newDataItem( null ); dataHandle.setValueExpr( expression ); InsertInLayoutRule rule = new LabelAddRule( target ); if ( rule.canInsert( ) ) { // LabelHandle label = SessionHandleAdapter.getInstance( ) // .getReportDesignHandle( ) // .getElementFactory( ) // .newLabel( null ); LabelHandle label = DesignElementFactory.getInstance( ) .newLabel( null ); label.setText( expression ); rule.insert( label ); } return dataHandle; } protected static TableHandle performInsertDataSet( DataSetHandle model ) throws SemanticException { DataSetItemModel[] columns = DataSetManager.getCurrentInstance( ) .getColumns( model, false ); if ( columns == null || columns.length == 0 ) { return null; } // TableHandle tableHandle = SessionHandleAdapter.getInstance( ) // .getReportDesignHandle( ) // .getElementFactory( ) // .newTableItem( null, columns.length ); TableHandle tableHandle = DesignElementFactory.getInstance( ) .newTableItem( null, columns.length ); setInitWidth( tableHandle ); insertToCell( tableHandle.getHeader( ), columns, true ); insertToCell( tableHandle.getDetail( ), columns, false ); tableHandle.setDataSet( model ); return tableHandle; } /** * Validates object can be inserted to layout. Support the multiple. * * @param insertObj * single inserted object or multi-objects * @param targetPart * @return if can be inserted to layout */ public static boolean handleValidateInsertToLayout( Object insertObj, EditPart targetPart ) { if ( targetPart == null ) { return false; } if ( insertObj instanceof Object[] ) { Object[] array = (Object[]) insertObj; if ( !checkSameDataSetInMultiColumns( array ) ) { return false; } for ( int i = 0; i < array.length; i++ ) { if ( !handleValidateInsertToLayout( array[i], targetPart ) ) { return false; } } return true; } else if ( insertObj instanceof IStructuredSelection ) { return handleValidateInsertToLayout( ( (IStructuredSelection) insertObj ).toArray( ), targetPart ); } else if ( insertObj instanceof DataSetHandle ) { return isHandleValid( (DataSetHandle) insertObj ) && ( (DataSetHandle) insertObj ).getDataSource( ) != null && handleValidateDataSet( targetPart ); } else if ( insertObj instanceof DataSetItemModel ) { return handleValidateDataSetColumn( (DataSetItemModel) insertObj, targetPart ); } else if ( insertObj instanceof ScalarParameterHandle ) { return isHandleValid( (ScalarParameterHandle) insertObj ) && handleValidateParameter( targetPart ); } return false; } /** * Checks if all the DataSetColumn has the same DataSet. * * @param array * all elements * @return false if not same; true if every column has the same DataSet or * the element is not an instance of DataSetColumn */ protected static boolean checkSameDataSetInMultiColumns( Object[] array ) { if ( array == null ) return false; Object dataSet = null; for ( int i = 0; i < array.length; i++ ) { if ( array[i] instanceof DataSetItemModel ) { Object currDataSet = ( (DataSetItemModel) array[i] ).getParent( ); if ( currDataSet == null ) { return false; } if ( dataSet == null ) { dataSet = currDataSet; } else { if ( dataSet != currDataSet ) { return false; } } } } return true; } /** * Validates container of drop target from data set in data view * * @param dropPart * @return validate result */ protected static boolean handleValidateDataSetDropContainer( EditPart dropPart ) { if ( dropPart.getParent( ) == null ) { return false; } Object container = dropPart.getParent( ).getModel( ); return ( container instanceof GridHandle || container instanceof TableHandle || container instanceof FreeFormHandle || container instanceof ListHandle || dropPart.getModel( ) instanceof ModuleHandle ); } /** * Validates container of drop target from data set column in data view * * @param dropPart * @return validate result */ protected static boolean handleValidateDataSetColumnDropContainer( EditPart dropPart ) { if ( dropPart.getParent( ) == null ) { return false; } Object container = dropPart.getParent( ).getModel( ); return ( container instanceof GridHandle || container instanceof TableHandle || container instanceof FreeFormHandle || container instanceof ListHandle || dropPart.getModel( ) instanceof ModuleHandle ); } /** * Validates container of drop target from scalar parameter in data view * * @param dropPart * @return validate result */ protected static boolean handleValidateParameterDropContainer( EditPart dropPart ) { if ( dropPart.getParent( ) == null ) { return false; } Object container = dropPart.getParent( ).getModel( ); return ( container instanceof GridHandle || container instanceof TableHandle || container instanceof FreeFormHandle || container instanceof ListHandle || dropPart.getModel( ) instanceof ModuleHandle ); } /** * Validates drop target from data set in data view. * * @return validate result */ protected static boolean handleValidateDataSet( EditPart target ) { return handleValidateDataSetDropContainer( target ) && DNDUtil.handleValidateTargetCanContainType( target.getModel( ), ReportDesignConstants.TABLE_ITEM ); } /** * Validates drop target from data set column in data view. * * @return validate result */ protected static boolean handleValidateDataSetColumn( DataSetItemModel insertObj, EditPart target ) { if ( handleValidateDataSetColumnDropContainer( target ) && DNDUtil.handleValidateTargetCanContainType( target.getModel( ), ReportDesignConstants.DATA_ITEM ) ) { // Validates target is report root if ( target.getModel( ) instanceof ModuleHandle ) { return true; } // Validates target's dataset is null or the same with the inserted DesignElementHandle handle = (DesignElementHandle) target.getParent( ) .getModel( ); if ( handle instanceof ReportItemHandle && ( (ReportItemHandle) handle ).getDataSet( ) == null ) { return true; } return DEUtil.getDataSetList( handle ) .contains( insertObj.getParent( ) ); } return false; } /** * Validates drop target from scalar parameter in data view. * * @return validate result */ protected static boolean handleValidateParameter( EditPart target ) { return handleValidateParameterDropContainer( target ) && DNDUtil.handleValidateTargetCanContainType( target.getModel( ), ReportDesignConstants.DATA_ITEM ); } /** * Validates drag source from data view to layout. Support the multiple. * * @return validate result */ public static boolean handleValidateInsert( Object insertObj ) { if ( insertObj instanceof Object[] ) { Object[] array = (Object[]) insertObj; if(array.length == 0) { return false; } for ( int i = 0; i < array.length; i++ ) { if ( !handleValidateInsert( array[i] ) ) return false; } return true; } else if ( insertObj instanceof IStructuredSelection ) { return handleValidateInsert( ( (IStructuredSelection) insertObj ).toArray( ) ); } // else if ( insertObj instanceof ParameterHandle ) // { // if ( ( (ParameterHandle) insertObj ).getRoot( ) instanceof LibraryHandle ) // return false; // } return insertObj instanceof DataSetHandle || insertObj instanceof DataSetItemModel || insertObj instanceof ScalarParameterHandle; } protected static void insertToCell( SlotHandle slot, DataSetItemModel[] columns, boolean isLabel ) { for ( int i = 0; i < slot.getCount( ); i++ ) { SlotHandle cells = ( (RowHandle) slot.get( i ) ).getCells( ); for ( int j = 0; j < cells.getCount( ); j++ ) { CellHandle cell = (CellHandle) cells.get( j ); try { if ( isLabel ) { LabelHandle labelItemHandle = SessionHandleAdapter.getInstance( ) .getReportDesignHandle( ) .getElementFactory( ) .newLabel( null ); // LabelHandle labelItemHandle = DesignElementFactory.getInstance( ) // .newLabel( null ); labelItemHandle.setText( columns[j].getDisplayName( ) ); cell.addElement( labelItemHandle, cells.getSlotID( ) ); } else { DataItemHandle dataHandle = SessionHandleAdapter.getInstance( ) .getReportDesignHandle( ) .getElementFactory( ) .newDataItem( null ); // DataItemHandle dataHandle = DesignElementFactory.getInstance( ) // .newDataItem( null ); dataHandle.setValueExpr( DEUtil.getExpression( columns[j] ) ); cell.addElement( dataHandle, cells.getSlotID( ) ); } } catch ( Exception e ) { ExceptionHandler.handle( e ); } } } } /** * Sets initial width to new object * * @param object * new object */ public static void setInitWidth( Object object ) { int percentAll = 100; try { if ( object instanceof TableHandle ) { TableHandle table = (TableHandle) object; table.setWidth( percentAll + DesignChoiceConstants.UNITS_PERCENTAGE ); } else if ( object instanceof GridHandle ) { GridHandle grid = (GridHandle) object; grid.setWidth( percentAll + DesignChoiceConstants.UNITS_PERCENTAGE ); } else return; } catch ( SemanticException e ) { ExceptionHandler.handle( e ); } } protected static boolean isHandleValid( DesignElementHandle handle ) { return handle.isValid( ) && handle.getSemanticErrors( ).isEmpty( ); } /** * Converts edit part selection into model selection. * * @param selection * edit part * @return model, return Collections.EMPTY_LIST if selection is null or * empty. */ public static IStructuredSelection editPart2Model( ISelection selection ) { if ( selection == null || !( selection instanceof IStructuredSelection ) ) return new StructuredSelection( Collections.EMPTY_LIST ); List list = ( (IStructuredSelection) selection ).toList( ); List resultList = new ArrayList( ); for ( int i = 0; i < list.size( ); i++ ) { Object obj = list.get( i ); if ( obj instanceof ReportElementEditPart ) { Object model = ( (ReportElementEditPart) obj ).getModel( ); if ( model instanceof ListBandProxy ) { model = ( (ListBandProxy) model ).getSlotHandle( ); } resultList.add( model ); } } return new StructuredSelection( resultList ); } }
UI/org.eclipse.birt.report.designer.ui/src/org/eclipse/birt/report/designer/internal/ui/dnd/InsertInLayoutUtil.java
/******************************************************************************* * Copyright (c) 2004 Actuate Corporation. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Actuate Corporation - initial API and implementation *******************************************************************************/ package org.eclipse.birt.report.designer.internal.ui.dnd; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.eclipse.birt.report.designer.core.model.schematic.HandleAdapterFactory; import org.eclipse.birt.report.designer.core.model.schematic.ListBandProxy; import org.eclipse.birt.report.designer.core.model.schematic.TableHandleAdapter; import org.eclipse.birt.report.designer.core.model.views.data.DataSetItemModel; import org.eclipse.birt.report.designer.internal.ui.editors.schematic.editparts.ReportElementEditPart; import org.eclipse.birt.report.designer.internal.ui.util.DataSetManager; import org.eclipse.birt.report.designer.internal.ui.util.ExceptionHandler; import org.eclipse.birt.report.designer.ui.newelement.DesignElementFactory; import org.eclipse.birt.report.designer.util.DEUtil; import org.eclipse.birt.report.designer.util.DNDUtil; import org.eclipse.birt.report.model.api.CellHandle; import org.eclipse.birt.report.model.api.DataItemHandle; import org.eclipse.birt.report.model.api.DataSetHandle; import org.eclipse.birt.report.model.api.DesignElementHandle; import org.eclipse.birt.report.model.api.FreeFormHandle; import org.eclipse.birt.report.model.api.GridHandle; import org.eclipse.birt.report.model.api.GroupHandle; import org.eclipse.birt.report.model.api.LabelHandle; import org.eclipse.birt.report.model.api.ListHandle; import org.eclipse.birt.report.model.api.ModuleHandle; import org.eclipse.birt.report.model.api.ReportItemHandle; import org.eclipse.birt.report.model.api.RowHandle; import org.eclipse.birt.report.model.api.ScalarParameterHandle; import org.eclipse.birt.report.model.api.SlotHandle; import org.eclipse.birt.report.model.api.TableGroupHandle; import org.eclipse.birt.report.model.api.TableHandle; import org.eclipse.birt.report.model.api.activity.SemanticException; import org.eclipse.birt.report.model.api.elements.DesignChoiceConstants; import org.eclipse.birt.report.model.api.elements.ReportDesignConstants; import org.eclipse.gef.EditPart; import org.eclipse.jface.util.Assert; import org.eclipse.jface.viewers.ISelection; import org.eclipse.jface.viewers.IStructuredSelection; import org.eclipse.jface.viewers.StructuredSelection; /** * Utility for creation from data view to layout */ public class InsertInLayoutUtil { /** * Rule interface for defining insertion rule */ abstract static interface InsertInLayoutRule { public boolean canInsert( ); public Object getInsertPosition( ); public void insert( Object object ) throws SemanticException; } /** * * Rule for inserting label after inserting data set column */ static class LabelAddRule implements InsertInLayoutRule { private Object container; private CellHandle newTarget; public LabelAddRule( Object container ) { this.container = container; } /* * (non-Javadoc) * * @see org.eclipse.birt.report.designer.internal.ui.views.actions.InsertInLayoutAction.InsertInLayoutRule#canInsert() */ public boolean canInsert( ) { if ( container instanceof SlotHandle ) { container = ( (SlotHandle) container ).getElementHandle( ); } if ( !( container instanceof CellHandle ) ) return false; CellHandle cell = (CellHandle) container; // Validates source position of data item boolean canInsert = false; if ( cell.getContainer( ).getContainer( ) instanceof TableGroupHandle ) { canInsert = true; } else { if ( cell.getContainer( ).getContainerSlotHandle( ).getSlotID( ) == TableHandle.DETAIL_SLOT ) { canInsert = true; } } // Validates column count and gets the target if ( canInsert ) { TableHandle table = null; if ( cell.getContainer( ).getContainer( ) instanceof TableHandle ) { table = (TableHandle) cell.getContainer( ).getContainer( ); } else { table = (TableHandle) cell.getContainer( ) .getContainer( ) .getContainer( ); } SlotHandle header = table.getHeader( ); if ( header != null && header.getCount( ) > 0 ) { int columnNum = HandleAdapterFactory.getInstance( ) .getCellHandleAdapter( cell ) .getColumnNumber( ); newTarget = (CellHandle) HandleAdapterFactory.getInstance( ) .getTableHandleAdapter( table ) .getCell( 1, columnNum, false ); return newTarget != null && newTarget.getContent( ).getCount( ) == 0; } } return false; } /** * Returns new Label insert position in form of <code>CellHandle</code> */ public Object getInsertPosition( ) { return newTarget; } /* * (non-Javadoc) * * @see org.eclipse.birt.report.designer.internal.ui.dnd.InsertInLayoutUtil.InsertInLayoutRule#insert() */ public void insert( Object object ) throws SemanticException { Assert.isTrue( object instanceof DesignElementHandle ); newTarget.addElement( (DesignElementHandle) object, CellHandle.CONTENT_SLOT ); } } /** * * Rule for inserting multiple data into table, and populating adjacent * cells */ static class MultiItemsExpandRule implements InsertInLayoutRule { private Object[] items; private Object target; private int focusIndex = 0; public MultiItemsExpandRule( Object[] items, Object target ) { this.items = items; this.target = target; } /* * (non-Javadoc) * * @see org.eclipse.birt.report.designer.internal.ui.views.actions.InsertInLayoutAction.InsertInLayoutRule#canInsert() */ public boolean canInsert( ) { return items != null && items.length > 1 && target != null && ( target instanceof DesignElementHandle || target instanceof ListBandProxy ); } /** * * Returns multiple insert positions in form of array */ public Object getInsertPosition( ) { Object[] positions = new Object[items.length]; if ( target instanceof CellHandle ) { CellHandle firstCell = (CellHandle) target; TableHandleAdapter tableAdapter = HandleAdapterFactory.getInstance( ) .getTableHandleAdapter( getTableHandle( firstCell ) ); int currentColumn = HandleAdapterFactory.getInstance( ) .getCellHandleAdapter( firstCell ) .getColumnNumber( ); int currentRow = HandleAdapterFactory.getInstance( ) .getCellHandleAdapter( firstCell ) .getRowNumber( ); int columnDiff = currentColumn + items.length - tableAdapter.getColumnCount( ) - 1; // Insert columns if table can not contain all items if ( columnDiff > 0 ) { int insertColumn = tableAdapter.getColumnCount( ); try { tableAdapter.insertColumns( columnDiff, insertColumn ); } catch ( SemanticException e ) { ExceptionHandler.handle( e ); return null; } } for ( int i = 0; i < positions.length; i++ ) { positions[i] = tableAdapter.getCell( currentRow, currentColumn++ ); } focusIndex = 0; } else { for ( int i = 0; i < positions.length; i++ ) { positions[i] = target; } focusIndex = items.length - 1; } return positions; } protected TableHandle getTableHandle( CellHandle firstCell ) { DesignElementHandle tableContainer = firstCell.getContainer( ) .getContainer( ); if ( tableContainer instanceof TableHandle ) { return (TableHandle) tableContainer; } return (TableHandle) tableContainer.getContainer( ); } /** * Returns the index of the focus element in the items */ public int getFocusIndex( ) { return focusIndex; } /* * (non-Javadoc) * * @see org.eclipse.birt.report.designer.internal.ui.dnd.InsertInLayoutUtil.InsertInLayoutRule#insert() */ public void insert( Object object ) throws SemanticException { // TODO Auto-generated method stub } } /** * * Rule for setting key when inserting data set column to group handle */ static class GroupKeySetRule implements InsertInLayoutRule { private Object container; private DataSetItemModel dataSetColumn; public GroupKeySetRule( Object container, DataSetItemModel dataSetColumn ) { this.container = container; this.dataSetColumn = dataSetColumn; } /* * (non-Javadoc) * * @see org.eclipse.birt.report.designer.internal.ui.dnd.InsertInLayoutUtil.InsertInLayoutRule#canInsert() */ public boolean canInsert( ) { return getGroupContainer( container ) != null && getGroupHandle( container ).getKeyExpr( ) == null && ( getGroupContainer( container ).getDataSet( ) == getDataSetHandle( dataSetColumn ) || getGroupContainer( container ).getDataSet( ) == null ); } /* * (non-Javadoc) * * @see org.eclipse.birt.report.designer.internal.ui.dnd.InsertInLayoutUtil.InsertInLayoutRule#getInsertPosition() */ public Object getInsertPosition( ) { return null; } /* * (non-Javadoc) * * @see org.eclipse.birt.report.designer.internal.ui.dnd.InsertInLayoutUtil.InsertInLayoutRule#insert(java.lang.Object) */ public void insert( Object object ) throws SemanticException { Assert.isTrue( object instanceof DataSetItemModel ); Assert.isTrue( object == dataSetColumn || object == null ); getGroupContainer( container ).setDataSet( getDataSetHandle( dataSetColumn ) ); getGroupHandle( container ).setKeyExpr( DEUtil.getExpression( dataSetColumn ) ); } protected DataSetHandle getDataSetHandle( DataSetItemModel model ) { return (DataSetHandle) model.getParent( ); } protected GroupHandle getGroupHandle( Object target ) { DesignElementHandle handle = null; if ( target instanceof CellHandle ) { handle = ( (CellHandle) target ).getContainer( ).getContainer( ); } else if ( target instanceof ListBandProxy ) { handle = ( (ListBandProxy) target ).getElemtHandle( ); } if ( handle instanceof GroupHandle ) { return (GroupHandle) handle; } return null; } protected ReportItemHandle getGroupContainer( Object target ) { GroupHandle group = getGroupHandle( target ); if ( group != null && group.getContainer( ) instanceof ReportItemHandle ) return (ReportItemHandle) group.getContainer( ); return null; } } /** * Creates a object to insert. * * @param insertObj * object insert to layout * @param target * insert target, like cell or ListBandProxy * @param targetParent * insert target's non-dummy container, like table or list * @return new object in layout * @throws SemanticException */ public static DesignElementHandle performInsert( Object insertObj, Object target, Object targetParent ) throws SemanticException { Assert.isNotNull( insertObj ); Assert.isNotNull( target ); if ( insertObj instanceof DataSetHandle ) { return performInsertDataSet( (DataSetHandle) insertObj ); } else if ( insertObj instanceof DataSetItemModel ) { return performInsertDataSetColumn( (DataSetItemModel) insertObj, target, targetParent ); } else if ( insertObj instanceof ScalarParameterHandle ) { return performInsertParameter( (ScalarParameterHandle) insertObj ); } else if ( insertObj instanceof String ) { // Such as invalid group key return performInsertString( (String) insertObj, target ); } else if ( insertObj instanceof Object[] ) { return performMultiInsert( (Object[]) insertObj, target, targetParent ); } else if ( insertObj instanceof IStructuredSelection ) { return performMultiInsert( ( (IStructuredSelection) insertObj ).toArray( ), target, targetParent ); } return null; } /** * Creates a object, "Add" operation to layout needs to handle later. * <p> * Must make sure operation legal before execution. * </p> * * @param insertObj * object insert to layout * @param editPart * target EditPart * @return new object in layout * @throws SemanticException */ public static DesignElementHandle performInsert( Object insertObj, EditPart editPart ) throws SemanticException { Assert.isNotNull( insertObj ); Assert.isNotNull( editPart ); return performInsert( insertObj, editPart.getModel( ), editPart.getParent( ).getModel( ) ); } /** * Creates multiple objects * * @param array * multiple creation source * @param target * @param targetParent * @return first creation in layout * @throws SemanticException */ protected static DesignElementHandle performMultiInsert( Object[] array, Object target, Object targetParent ) throws SemanticException { DesignElementHandle result = null; MultiItemsExpandRule rule = new MultiItemsExpandRule( array, target ); if ( rule.canInsert( ) ) { Object[] positions = (Object[]) rule.getInsertPosition( ); if ( positions != null ) { for ( int i = 0; i < array.length; i++ ) { DesignElementHandle newObj = performInsert( array[i], positions[i], targetParent ); if ( i == rule.getFocusIndex( ) ) { result = newObj; } else { DNDUtil.addElementHandle( positions[i], newObj ); } } } } else if ( array.length != 0 ) { result = performInsert( array[0], target, targetParent ); } return result; } protected static DataItemHandle performInsertParameter( ScalarParameterHandle model ) throws SemanticException { // DataItemHandle dataHandle = SessionHandleAdapter.getInstance( ) // .getReportDesignHandle( ) // .getElementFactory( ) // .newDataItem( null ); DataItemHandle dataHandle = DesignElementFactory.getInstance( ) .newDataItem( null ); dataHandle.setValueExpr( DEUtil.getExpression( model ) ); return dataHandle; } /** * Inserts dataset column into the target. Add label or group key if * possible * * @param model * column item * @param target * insert target like cell or ListBandProxy * @param targetParent * target container like table or list * @return to be inserted data item * @throws SemanticException */ protected static DataItemHandle performInsertDataSetColumn( DataSetItemModel model, Object target, Object targetParent ) throws SemanticException { // DataItemHandle dataHandle = SessionHandleAdapter.getInstance( ) // .getReportDesignHandle( ) // .getElementFactory( ) // .newDataItem( null ); DataItemHandle dataHandle = DesignElementFactory.getInstance( ) .newDataItem( null ); DataSetHandle dataSet = (DataSetHandle) model.getParent( ); dataHandle.setValueExpr( DEUtil.getExpression( model ) ); if ( targetParent instanceof ReportItemHandle ) { ReportItemHandle container = (ReportItemHandle) targetParent; if ( !DEUtil.getDataSetList( container ).contains( dataSet ) ) { if ( container.getDataSet( ) == null ) { container.setDataSet( dataSet ); } } } else { // Adds dataset to the single query in the top level of the report dataHandle.setDataSet( dataSet ); } InsertInLayoutRule rule = new LabelAddRule( target ); if ( rule.canInsert( ) ) { // LabelHandle label = SessionHandleAdapter.getInstance( ) // .getReportDesignHandle( ) // .getElementFactory( ) // .newLabel( null ); LabelHandle label = DesignElementFactory.getInstance( ) .newLabel( null ); label.setText( model.getDisplayName( ) ); rule.insert( label ); } rule = new GroupKeySetRule( target, model ); if ( rule.canInsert( ) ) { rule.insert( model ); } return dataHandle; } /** * Inserts invalid column string into the target. Add label if possible * * @param expression * invalid column or other expression * @param target * insert target like cell or ListBandProxy * @return to be inserted data item * @throws SemanticException */ protected static DesignElementHandle performInsertString( String expression, Object target ) throws SemanticException { // DataItemHandle dataHandle = SessionHandleAdapter.getInstance( ) // .getReportDesignHandle( ) // .getElementFactory( ) // .newDataItem( null ); DataItemHandle dataHandle = DesignElementFactory.getInstance( ) .newDataItem( null ); dataHandle.setValueExpr( expression ); InsertInLayoutRule rule = new LabelAddRule( target ); if ( rule.canInsert( ) ) { // LabelHandle label = SessionHandleAdapter.getInstance( ) // .getReportDesignHandle( ) // .getElementFactory( ) // .newLabel( null ); LabelHandle label = DesignElementFactory.getInstance( ) .newLabel( null ); label.setText( expression ); rule.insert( label ); } return dataHandle; } protected static TableHandle performInsertDataSet( DataSetHandle model ) throws SemanticException { DataSetItemModel[] columns = DataSetManager.getCurrentInstance( ) .getColumns( model, false ); if ( columns == null || columns.length == 0 ) { return null; } // TableHandle tableHandle = SessionHandleAdapter.getInstance( ) // .getReportDesignHandle( ) // .getElementFactory( ) // .newTableItem( null, columns.length ); TableHandle tableHandle = DesignElementFactory.getInstance( ) .newTableItem( null, columns.length ); setInitWidth( tableHandle ); insertToCell( tableHandle.getHeader( ), columns, true ); insertToCell( tableHandle.getDetail( ), columns, false ); tableHandle.setDataSet( model ); return tableHandle; } /** * Validates object can be inserted to layout. Support the multiple. * * @param insertObj * single inserted object or multi-objects * @param targetPart * @return if can be inserted to layout */ public static boolean handleValidateInsertToLayout( Object insertObj, EditPart targetPart ) { if ( targetPart == null ) { return false; } if ( insertObj instanceof Object[] ) { Object[] array = (Object[]) insertObj; if ( !checkSameDataSetInMultiColumns( array ) ) { return false; } for ( int i = 0; i < array.length; i++ ) { if ( !handleValidateInsertToLayout( array[i], targetPart ) ) { return false; } } return true; } else if ( insertObj instanceof IStructuredSelection ) { return handleValidateInsertToLayout( ( (IStructuredSelection) insertObj ).toArray( ), targetPart ); } else if ( insertObj instanceof DataSetHandle ) { return isHandleValid( (DataSetHandle) insertObj ) && ( (DataSetHandle) insertObj ).getDataSource( ) != null && handleValidateDataSet( targetPart ); } else if ( insertObj instanceof DataSetItemModel ) { return handleValidateDataSetColumn( (DataSetItemModel) insertObj, targetPart ); } else if ( insertObj instanceof ScalarParameterHandle ) { return isHandleValid( (ScalarParameterHandle) insertObj ) && handleValidateParameter( targetPart ); } return false; } /** * Checks if all the DataSetColumn has the same DataSet. * * @param array * all elements * @return false if not same; true if every column has the same DataSet or * the element is not an instance of DataSetColumn */ protected static boolean checkSameDataSetInMultiColumns( Object[] array ) { if ( array == null ) return false; Object dataSet = null; for ( int i = 0; i < array.length; i++ ) { if ( array[i] instanceof DataSetItemModel ) { Object currDataSet = ( (DataSetItemModel) array[i] ).getParent( ); if ( currDataSet == null ) { return false; } if ( dataSet == null ) { dataSet = currDataSet; } else { if ( dataSet != currDataSet ) { return false; } } } } return true; } /** * Validates container of drop target from data set in data view * * @param dropPart * @return validate result */ protected static boolean handleValidateDataSetDropContainer( EditPart dropPart ) { if ( dropPart.getParent( ) == null ) { return false; } Object container = dropPart.getParent( ).getModel( ); return ( container instanceof GridHandle || container instanceof TableHandle || container instanceof FreeFormHandle || container instanceof ListHandle || dropPart.getModel( ) instanceof ModuleHandle ); } /** * Validates container of drop target from data set column in data view * * @param dropPart * @return validate result */ protected static boolean handleValidateDataSetColumnDropContainer( EditPart dropPart ) { if ( dropPart.getParent( ) == null ) { return false; } Object container = dropPart.getParent( ).getModel( ); return ( container instanceof GridHandle || container instanceof TableHandle || container instanceof FreeFormHandle || container instanceof ListHandle || dropPart.getModel( ) instanceof ModuleHandle ); } /** * Validates container of drop target from scalar parameter in data view * * @param dropPart * @return validate result */ protected static boolean handleValidateParameterDropContainer( EditPart dropPart ) { if ( dropPart.getParent( ) == null ) { return false; } Object container = dropPart.getParent( ).getModel( ); return ( container instanceof GridHandle || container instanceof TableHandle || container instanceof FreeFormHandle || container instanceof ListHandle || dropPart.getModel( ) instanceof ModuleHandle ); } /** * Validates drop target from data set in data view. * * @return validate result */ protected static boolean handleValidateDataSet( EditPart target ) { return handleValidateDataSetDropContainer( target ) && DNDUtil.handleValidateTargetCanContainType( target.getModel( ), ReportDesignConstants.TABLE_ITEM ); } /** * Validates drop target from data set column in data view. * * @return validate result */ protected static boolean handleValidateDataSetColumn( DataSetItemModel insertObj, EditPart target ) { if ( handleValidateDataSetColumnDropContainer( target ) && DNDUtil.handleValidateTargetCanContainType( target.getModel( ), ReportDesignConstants.DATA_ITEM ) ) { // Validates target is report root if ( target.getModel( ) instanceof ModuleHandle ) { return true; } // Validates target's dataset is null or the same with the inserted DesignElementHandle handle = (DesignElementHandle) target.getParent( ) .getModel( ); if ( handle instanceof ReportItemHandle && ( (ReportItemHandle) handle ).getDataSet( ) == null ) { return true; } return DEUtil.getDataSetList( handle ) .contains( insertObj.getParent( ) ); } return false; } /** * Validates drop target from scalar parameter in data view. * * @return validate result */ protected static boolean handleValidateParameter( EditPart target ) { return handleValidateParameterDropContainer( target ) && DNDUtil.handleValidateTargetCanContainType( target.getModel( ), ReportDesignConstants.DATA_ITEM ); } /** * Validates drag source from data view to layout. Support the multiple. * * @return validate result */ public static boolean handleValidateInsert( Object insertObj ) { if ( insertObj instanceof Object[] ) { Object[] array = (Object[]) insertObj; if(array.length == 0) { return false; } for ( int i = 0; i < array.length; i++ ) { if ( !handleValidateInsert( array[i] ) ) return false; } return true; } else if ( insertObj instanceof IStructuredSelection ) { return handleValidateInsert( ( (IStructuredSelection) insertObj ).toArray( ) ); } // else if ( insertObj instanceof ParameterHandle ) // { // if ( ( (ParameterHandle) insertObj ).getRoot( ) instanceof LibraryHandle ) // return false; // } return insertObj instanceof DataSetHandle || insertObj instanceof DataSetItemModel || insertObj instanceof ScalarParameterHandle; } protected static void insertToCell( SlotHandle slot, DataSetItemModel[] columns, boolean isLabel ) { for ( int i = 0; i < slot.getCount( ); i++ ) { SlotHandle cells = ( (RowHandle) slot.get( i ) ).getCells( ); for ( int j = 0; j < cells.getCount( ); j++ ) { CellHandle cell = (CellHandle) cells.get( j ); try { if ( isLabel ) { // LabelHandle labelItemHandle = // SessionHandleAdapter.getInstance( ) // .getReportDesignHandle( ) // .getElementFactory( ) // .newLabel( null ); LabelHandle labelItemHandle = DesignElementFactory.getInstance( ) .newLabel( null ); labelItemHandle.setText( columns[j].getDisplayName( ) ); cell.addElement( labelItemHandle, cells.getSlotID( ) ); } else { // DataItemHandle dataHandle = // SessionHandleAdapter.getInstance( ) // .getReportDesignHandle( ) // .getElementFactory( ) // .newDataItem( null ); DataItemHandle dataHandle = DesignElementFactory.getInstance( ) .newDataItem( null ); dataHandle.setValueExpr( DEUtil.getExpression( columns[j] ) ); cell.addElement( dataHandle, cells.getSlotID( ) ); } } catch ( Exception e ) { ExceptionHandler.handle( e ); } } } } /** * Sets initial width to new object * * @param object * new object */ public static void setInitWidth( Object object ) { int percentAll = 100; try { if ( object instanceof TableHandle ) { TableHandle table = (TableHandle) object; table.setWidth( percentAll + DesignChoiceConstants.UNITS_PERCENTAGE ); } else if ( object instanceof GridHandle ) { GridHandle grid = (GridHandle) object; grid.setWidth( percentAll + DesignChoiceConstants.UNITS_PERCENTAGE ); } else return; } catch ( SemanticException e ) { ExceptionHandler.handle( e ); } } protected static boolean isHandleValid( DesignElementHandle handle ) { return handle.isValid( ) && handle.getSemanticErrors( ).isEmpty( ); } /** * Converts edit part selection into model selection. * * @param selection * edit part * @return model, return Collections.EMPTY_LIST if selection is null or * empty. */ public static IStructuredSelection editPart2Model( ISelection selection ) { if ( selection == null || !( selection instanceof IStructuredSelection ) ) return new StructuredSelection( Collections.EMPTY_LIST ); List list = ( (IStructuredSelection) selection ).toList( ); List resultList = new ArrayList( ); for ( int i = 0; i < list.size( ); i++ ) { Object obj = list.get( i ); if ( obj instanceof ReportElementEditPart ) { Object model = ( (ReportElementEditPart) obj ).getModel( ); if ( model instanceof ListBandProxy ) { model = ( (ListBandProxy) model ).getSlotHandle( ); } resultList.add( model ); } } return new StructuredSelection( resultList ); } }
-Summary: Fix the bug: If data and label's default name is defined in preference, if you drag a dataset from data explorer to layout, it'll be error. - Bugzilla Bug (s) Resolved: - Description: - Tests Description: - Notes to Build Team: - Notes to Developers: - Notes to QA: - Notes to Documentation: - Files Added: - Files Edited:
UI/org.eclipse.birt.report.designer.ui/src/org/eclipse/birt/report/designer/internal/ui/dnd/InsertInLayoutUtil.java
-Summary: Fix the bug: If data and label's default name is defined in preference, if you drag a dataset from data explorer to layout, it'll be error.
Java
epl-1.0
35c397e1d83812c6185ac63d4ab1527a280d3506
0
bfg-repo-cleaner-demos/eclipselink.runtime-bfg-strip-big-blobs,bfg-repo-cleaner-demos/eclipselink.runtime-bfg-strip-big-blobs,bfg-repo-cleaner-demos/eclipselink.runtime-bfg-strip-big-blobs
/******************************************************************************* * Copyright (c) 1998, 2010 Oracle. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * bdoughan - June 25/2009 - 2.0 - Initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm.record.namespaces; import java.util.HashSet; import java.util.Set; import javax.xml.stream.XMLStreamReader; import org.eclipse.persistence.oxm.XMLConstants; /** * An UnmarshalNamespaceResolver that delegates all work to a NamespaceContext. * This is useful when using XML input from sources such as StAX. */ public class UnmarshalNamespaceContext implements UnmarshalNamespaceResolver { private XMLStreamReader xmlStreamReader; private Set<String> prefixes; public UnmarshalNamespaceContext() { this.prefixes = new HashSet(); } public UnmarshalNamespaceContext(XMLStreamReader anXMLStreamReader) { this.xmlStreamReader = anXMLStreamReader; this.prefixes = new HashSet(); } public String getNamespaceURI(String prefix) { if(null == prefix) { prefix = XMLConstants.EMPTY_STRING; } try { String namespaceURI = xmlStreamReader.getNamespaceURI(prefix); if(null == namespaceURI) { return xmlStreamReader.getAttributeValue(XMLConstants.XMLNS_URL, prefix); } return namespaceURI; } catch(IllegalStateException e) { return null; } } public String getPrefix(String namespaceURI) { return xmlStreamReader.getNamespaceContext().getPrefix(namespaceURI); } /** * The underlying NamespaceContext is responsible for maintaining the * appropriate prefix/URI associations. */ public void push(String prefix, String namespaceURI) { prefixes.add(prefix); } /** * The underlying NamespaceContext is responsible for maintaining the * appropriate prefix/URI associations. */ public void pop(String prefix) { if(null!= getNamespaceURI(prefix)) { prefixes.remove(prefix); } } public Set<String> getPrefixes() { return prefixes; } public XMLStreamReader getXmlStreamReader() { return xmlStreamReader; } public void setXmlStreamReader(XMLStreamReader xmlStreamReader) { this.xmlStreamReader = xmlStreamReader; } }
foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/oxm/record/namespaces/UnmarshalNamespaceContext.java
/******************************************************************************* * Copyright (c) 1998, 2010 Oracle. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * bdoughan - June 25/2009 - 2.0 - Initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm.record.namespaces; import java.util.HashSet; import java.util.Set; import javax.xml.stream.XMLStreamReader; import org.eclipse.persistence.oxm.XMLConstants; /** * An UnmarshalNamespaceResolver that delegates all work to a NamespaceContext. * This is useful when using XML input from sources such as StAX. */ public class UnmarshalNamespaceContext implements UnmarshalNamespaceResolver { private XMLStreamReader xmlStreamReader; private Set<String> prefixes; public UnmarshalNamespaceContext() { this.prefixes = new HashSet(); } public UnmarshalNamespaceContext(XMLStreamReader anXMLStreamReader) { this.xmlStreamReader = anXMLStreamReader; this.prefixes = new HashSet(); } public String getNamespaceURI(String prefix) { if(null == prefix) { prefix = XMLConstants.EMPTY_STRING; } try { return xmlStreamReader.getNamespaceURI(prefix); } catch(IllegalStateException e) { return null; } } public String getPrefix(String namespaceURI) { return xmlStreamReader.getNamespaceContext().getPrefix(namespaceURI); } /** * The underlying NamespaceContext is responsible for maintaining the * appropriate prefix/URI associations. */ public void push(String prefix, String namespaceURI) { prefixes.add(prefix); } /** * The underlying NamespaceContext is responsible for maintaining the * appropriate prefix/URI associations. */ public void pop(String prefix) { if(null!= getNamespaceURI(prefix)) { prefixes.remove(prefix); } } public Set<String> getPrefixes() { return prefixes; } public XMLStreamReader getXmlStreamReader() { return xmlStreamReader; } public void setXmlStreamReader(XMLStreamReader xmlStreamReader) { this.xmlStreamReader = xmlStreamReader; } }
Fix for Bug 306027 - Namespace prefix for xsi:type value may not resolve when namespace declaration on current element using StAX Former-commit-id: 671dd10e2b090f449a712f15a8e00624a814e681
foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/oxm/record/namespaces/UnmarshalNamespaceContext.java
Fix for Bug 306027 - Namespace prefix for xsi:type value may not resolve when namespace declaration on current element using StAX
Java
epl-1.0
7af04fde1d44b1f18f0cbb03ca8176499c9f5054
0
ESSICS/cs-studio,css-iter/cs-studio,ControlSystemStudio/cs-studio,ESSICS/cs-studio,css-iter/cs-studio,css-iter/cs-studio,ControlSystemStudio/cs-studio,css-iter/cs-studio,ControlSystemStudio/cs-studio,ESSICS/cs-studio,ControlSystemStudio/cs-studio,ControlSystemStudio/cs-studio,css-iter/cs-studio,ESSICS/cs-studio,ControlSystemStudio/cs-studio,css-iter/cs-studio,ESSICS/cs-studio,ESSICS/cs-studio,ESSICS/cs-studio,css-iter/cs-studio
/******************************************************************************* * Copyright (c) 2013 Oak Ridge National Laboratory. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html ******************************************************************************/ package org.csstudio.opibuilder.converter.writer; import java.util.Arrays; import java.util.LinkedHashMap; import java.util.logging.Logger; import org.csstudio.opibuilder.converter.model.EdmColor; import org.csstudio.opibuilder.converter.model.EdmWidget; import org.csstudio.opibuilder.converter.model.Edm_activeButtonClass; import org.w3c.dom.Element; /** * XML conversion class for Edm_activeRectangleClass * * @author Lei Hu, Xihui Chen */ public class Opi_activeButtonClass extends OpiWidget { private static Logger log = Logger .getLogger("org.csstudio.opibuilder.converter.writer.Opi_activeButtonClass"); private static final String typeId = "BoolButton"; private static final String name = "EDM Button"; private static final String version = "1.0"; /** * Converts the Edm_activeRectangleClass to OPI Rectangle widget XML. */ public Opi_activeButtonClass(Context con, Edm_activeButtonClass r) { super(con, r); setTypeId(typeId); setName(name); setVersion(version); if (r.getControlPv() != null) { new OpiString(widgetContext, "pv_name", convertPVName(r.getControlPv())); createOnOffColorRule(r, convertPVName(r.getControlPv()), "background_color", r.getOnColor(), r.getOffColor(), "OnOffBackgroundRule"); } new OpiColor(widgetContext, "on_color", r.getOnColor(), r); new OpiColor(widgetContext, "off_color", r.getOffColor(), r); if (r.getOnLabel() != null) new OpiString(widgetContext, "on_label", r.getOnLabel()); if (r.getOffLabel() != null) new OpiString(widgetContext, "off_label", r.getOffLabel()); new OpiBoolean(widgetContext, "show_boolean_label", true); new OpiBoolean(widgetContext, "toggle_button", r.getButtonType() == null || r.getButtonType().equals("toggle")); if (r.getAttribute("controlBitsPos").isExistInEDL()) { new OpiInt(widgetContext, "data_type", r.getControlBitsPos() < 0 ? 0 : r.getControlBitsPos()); }else{ new OpiInt(widgetContext, "data_type", 1); new OpiString(widgetContext, "on_state", "1"); new OpiString(widgetContext, "off_state", "0"); } new OpiBoolean(widgetContext, "show_led", false); new OpiBoolean(widgetContext, "square_button", true); log.config("Edm_activeButtonClass written."); } /** * Create a rule that make a color property alarm sensitive. * * @param edmWidgetClass * @param edmAlarmAttr * @param edmAlarmPVAttr * @param opiProperty */ protected void createOnOffColorRule(EdmWidget edmWidgetClass, String pvName, String opiProperty, EdmColor onColor, EdmColor offColor, String ruleName) { LinkedHashMap<String, Element> expressions = new LinkedHashMap<String, Element>(); Element valueNode; Element colorNode; valueNode = widgetContext.getDocument().createElement("value"); colorNode = widgetContext.getDocument().createElement("color"); colorNode.setAttribute("name", offColor.getName()); colorNode.setAttribute("red", "" + OpiColor.colorComponentTo8Bits(offColor.getRed())); colorNode.setAttribute("green", "" + OpiColor.colorComponentTo8Bits(offColor.getGreen())); colorNode.setAttribute("blue", "" + OpiColor.colorComponentTo8Bits(offColor.getBlue())); valueNode.appendChild(colorNode); expressions.put("pv0==0", valueNode); valueNode = widgetContext.getDocument().createElement("value"); colorNode = widgetContext.getDocument().createElement("color"); colorNode.setAttribute("name", onColor.getName()); colorNode.setAttribute("red", "" + OpiColor.colorComponentTo8Bits(onColor.getRed())); colorNode.setAttribute("green", "" + OpiColor.colorComponentTo8Bits(onColor.getGreen())); colorNode.setAttribute("blue", "" + OpiColor.colorComponentTo8Bits(onColor.getBlue())); valueNode.appendChild(colorNode); expressions.put("true", valueNode); new OpiRule(widgetContext, ruleName, opiProperty, false, Arrays.asList(pvName), expressions); } }
applications/opibuilder/opibuilder-plugins/org.csstudio.opibuilder.converter/src/main/org/csstudio/opibuilder/converter/writer/Opi_activeButtonClass.java
/******************************************************************************* * Copyright (c) 2013 Oak Ridge National Laboratory. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html ******************************************************************************/ package org.csstudio.opibuilder.converter.writer; import java.util.Arrays; import java.util.LinkedHashMap; import java.util.logging.Logger; import org.csstudio.opibuilder.converter.model.EdmColor; import org.csstudio.opibuilder.converter.model.EdmWidget; import org.csstudio.opibuilder.converter.model.Edm_activeButtonClass; import org.w3c.dom.Element; /** * XML conversion class for Edm_activeRectangleClass * * @author Lei Hu, Xihui Chen */ public class Opi_activeButtonClass extends OpiWidget { private static Logger log = Logger .getLogger("org.csstudio.opibuilder.converter.writer.Opi_activeButtonClass"); private static final String typeId = "BoolButton"; private static final String name = "EDM Button"; private static final String version = "1.0"; /** * Converts the Edm_activeRectangleClass to OPI Rectangle widget XML. */ public Opi_activeButtonClass(Context con, Edm_activeButtonClass r) { super(con, r); setTypeId(typeId); setName(name); setVersion(version); if (r.getControlPv() != null) { new OpiString(widgetContext, "pv_name", convertPVName(r.getControlPv())); createOnOffColorRule(r, convertPVName(r.getControlPv()), "background_color", r.getOnColor(), r.getOffColor(), "OnOffBackgroundRule"); } new OpiColor(widgetContext, "on_color", r.getOnColor(), r); new OpiColor(widgetContext, "off_color", r.getOffColor(), r); if (r.getOnLabel() != null) new OpiString(widgetContext, "on_label", r.getOnLabel()); if (r.getOffLabel() != null) new OpiString(widgetContext, "off_label", r.getOffLabel()); new OpiBoolean(widgetContext, "show_boolean_label", true); new OpiBoolean(widgetContext, "toggle_button", r.getButtonType() != null && r.getButtonType().equals("push")); if (r.getAttribute("controlBitsPos").isExistInEDL()) { new OpiInt(widgetContext, "data_type", r.getControlBitsPos() < 0 ? 0 : r.getControlBitsPos()); }else{ new OpiInt(widgetContext, "data_type", 1); new OpiString(widgetContext, "on_state", "1"); new OpiString(widgetContext, "off_state", "0"); } new OpiBoolean(widgetContext, "show_led", false); new OpiBoolean(widgetContext, "square_button", true); log.config("Edm_activeButtonClass written."); } /** * Create a rule that make a color property alarm sensitive. * * @param edmWidgetClass * @param edmAlarmAttr * @param edmAlarmPVAttr * @param opiProperty */ protected void createOnOffColorRule(EdmWidget edmWidgetClass, String pvName, String opiProperty, EdmColor onColor, EdmColor offColor, String ruleName) { LinkedHashMap<String, Element> expressions = new LinkedHashMap<String, Element>(); Element valueNode; Element colorNode; valueNode = widgetContext.getDocument().createElement("value"); colorNode = widgetContext.getDocument().createElement("color"); colorNode.setAttribute("name", offColor.getName()); colorNode.setAttribute("red", "" + OpiColor.colorComponentTo8Bits(offColor.getRed())); colorNode.setAttribute("green", "" + OpiColor.colorComponentTo8Bits(offColor.getGreen())); colorNode.setAttribute("blue", "" + OpiColor.colorComponentTo8Bits(offColor.getBlue())); valueNode.appendChild(colorNode); expressions.put("pv0==0", valueNode); valueNode = widgetContext.getDocument().createElement("value"); colorNode = widgetContext.getDocument().createElement("color"); colorNode.setAttribute("name", onColor.getName()); colorNode.setAttribute("red", "" + OpiColor.colorComponentTo8Bits(onColor.getRed())); colorNode.setAttribute("green", "" + OpiColor.colorComponentTo8Bits(onColor.getGreen())); colorNode.setAttribute("blue", "" + OpiColor.colorComponentTo8Bits(onColor.getBlue())); valueNode.appendChild(colorNode); expressions.put("true", valueNode); new OpiRule(widgetContext, ruleName, opiProperty, false, Arrays.asList(pvName), expressions); } }
o.c.o.converter: invert incorrect logic on BoolButton. Previously "push" or "toggle" behaviour wasn't correctly converted.
applications/opibuilder/opibuilder-plugins/org.csstudio.opibuilder.converter/src/main/org/csstudio/opibuilder/converter/writer/Opi_activeButtonClass.java
o.c.o.converter: invert incorrect logic on BoolButton.
Java
mpl-2.0
fa07d7bcf242383bf8460df93193a784fec34fd9
0
msteinhoff/hello-world
c80f7828-cb8e-11e5-a1e9-00264a111016
src/main/java/HelloWorld.java
c8029ca1-cb8e-11e5-893c-00264a111016
Did a thing
src/main/java/HelloWorld.java
Did a thing
Java
agpl-3.0
6050ef21b25ef1c205ee9f2a86714714428b7998
0
donsunsoft/axelor-development-kit,donsunsoft/axelor-development-kit,donsunsoft/axelor-development-kit,donsunsoft/axelor-development-kit
/** * Axelor Business Solutions * * Copyright (C) 2005-2015 Axelor (<http://axelor.com>). * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License, version 3, * as published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package com.axelor.mail.db.repo; import java.util.List; import com.axelor.auth.db.User; import com.axelor.db.JpaRepository; import com.axelor.mail.db.MailFlags; import com.axelor.mail.db.MailMessage; public class MailFlagsRepository extends JpaRepository<MailFlags> { public MailFlagsRepository() { super(MailFlags.class); } public MailFlags findBy(MailMessage message, User user) { return all().filter("self.message = :message AND self.user = :user") .bind("message", message) .bind("user", user).fetchOne(); } @Override public MailFlags save(MailFlags entity) { final MailFlags flags = super.save(entity); final MailMessage message = flags.getMessage(); final MailMessage root = message.getRoot(); if (flags.getIsStarred() == Boolean.FALSE) { // message is root, so unflag children if (root == null) { List<MailFlags> childFlags = all().filter("self.message.root.id = ?", message.getId()).fetch(); for (MailFlags child : childFlags) { child.setIsStarred(flags.getIsStarred()); } } } if (root == null) { return flags; } MailFlags rootFlags = findBy(root, flags.getUser()); if (rootFlags == null) { rootFlags = new MailFlags(); rootFlags.setMessage(root); rootFlags.setUser(flags.getUser()); super.save(rootFlags); } rootFlags.setIsStarred(flags.getIsStarred()); // mark root as unread if (flags.getIsRead() != Boolean.TRUE) { rootFlags.setIsRead(false); } return flags; } }
axelor-core/src/main/java/com/axelor/mail/db/repo/MailFlagsRepository.java
/** * Axelor Business Solutions * * Copyright (C) 2005-2015 Axelor (<http://axelor.com>). * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License, version 3, * as published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package com.axelor.mail.db.repo; import java.util.List; import com.axelor.auth.db.User; import com.axelor.db.JpaRepository; import com.axelor.mail.db.MailFlags; import com.axelor.mail.db.MailMessage; public class MailFlagsRepository extends JpaRepository<MailFlags> { public MailFlagsRepository() { super(MailFlags.class); } public MailFlags findBy(MailMessage message, User user) { return all().filter("self.message = :message AND self.user = :user") .bind("message", message) .bind("user", user).fetchOne(); } @Override public MailFlags save(MailFlags entity) { final MailFlags flags = super.save(entity); final MailMessage message = flags.getMessage(); final MailMessage root = message.getRoot(); if (flags.getIsStarred() == Boolean.FALSE) { // message is root, so unflag children if (root == null) { List<MailFlags> childFlags = all().filter("self.message.root.id = ?", message.getId()).fetch(); for (MailFlags child : childFlags) { child.setIsStarred(flags.getIsStarred()); } } } MailFlags rootFlags = findBy(root, flags.getUser()); if (rootFlags == null) { rootFlags = new MailFlags(); rootFlags.setMessage(root); rootFlags.setUser(flags.getUser()); super.save(rootFlags); } rootFlags.setIsStarred(flags.getIsStarred()); // mark root as unread if (root != null && flags.getIsRead() != Boolean.TRUE) { rootFlags.setIsRead(false); } return flags; } }
Fix creation of flags for null root message
axelor-core/src/main/java/com/axelor/mail/db/repo/MailFlagsRepository.java
Fix creation of flags for null root message
Java
agpl-3.0
2fa48e023f55af0f3e6bd42ffd78fe0e07514964
0
CompilerWorks/spliceengine,CompilerWorks/spliceengine,CompilerWorks/spliceengine,splicemachine/spliceengine,CompilerWorks/spliceengine,splicemachine/spliceengine,CompilerWorks/spliceengine,splicemachine/spliceengine,splicemachine/spliceengine,splicemachine/spliceengine,CompilerWorks/spliceengine,splicemachine/spliceengine,splicemachine/spliceengine
package com.splicemachine.derby.impl.sql.execute.operations; import com.splicemachine.constants.SpliceConstants; import com.splicemachine.constants.bytes.BytesUtil; import com.splicemachine.derby.hbase.SpliceDriver; import com.splicemachine.derby.hbase.SpliceObserverInstructions; import com.splicemachine.derby.iapi.sql.execute.SinkingOperation; import com.splicemachine.derby.iapi.sql.execute.SpliceOperation; import com.splicemachine.derby.iapi.sql.execute.SpliceOperationContext; import com.splicemachine.derby.iapi.sql.execute.SpliceRuntimeContext; import com.splicemachine.derby.iapi.storage.RowProvider; import com.splicemachine.derby.iapi.storage.ScanBoundary; import com.splicemachine.derby.impl.sql.execute.operations.distinctscalar.DistinctAggregateBuffer; import com.splicemachine.derby.impl.sql.execute.operations.distinctscalar.DistinctScalarAggregateIterator; import com.splicemachine.derby.impl.sql.execute.operations.distinctscalar.SingleDistinctScalarAggregateIterator; import com.splicemachine.derby.impl.sql.execute.operations.framework.EmptyRowSupplier; import com.splicemachine.derby.impl.sql.execute.operations.framework.GroupedRow; import com.splicemachine.derby.impl.sql.execute.operations.framework.SourceIterator; import com.splicemachine.derby.impl.sql.execute.operations.framework.SpliceWarningCollector; import com.splicemachine.derby.impl.storage.BaseHashAwareScanBoundary; import com.splicemachine.derby.impl.storage.ClientResultScanner; import com.splicemachine.derby.impl.storage.DistributedClientScanProvider; import com.splicemachine.derby.impl.storage.RegionAwareScanner; import com.splicemachine.derby.impl.storage.RowProviders; import com.splicemachine.derby.impl.storage.SpliceResultScanner; import com.splicemachine.derby.metrics.OperationMetric; import com.splicemachine.derby.metrics.OperationRuntimeStats; import com.splicemachine.derby.utils.DerbyBytesUtil; import com.splicemachine.derby.utils.ScanIterator; import com.splicemachine.derby.utils.Scans; import com.splicemachine.derby.utils.SpliceUtils; import com.splicemachine.derby.utils.StandardIterator; import com.splicemachine.derby.utils.StandardSupplier; import com.splicemachine.derby.utils.marshall.*; import com.splicemachine.derby.utils.marshall.dvd.DescriptorSerializer; import com.splicemachine.derby.utils.marshall.dvd.VersionedSerializers; import com.splicemachine.encoding.MultiFieldDecoder; import com.splicemachine.job.JobResults; import com.splicemachine.metrics.TimeView; import com.splicemachine.si.api.TxnView; import com.splicemachine.utils.IntArrays; import com.splicemachine.utils.SpliceLogUtils; import com.splicemachine.hash.HashFunctions; import com.splicemachine.pipeline.exception.Exceptions; import com.splicemachine.db.iapi.error.StandardException; import com.splicemachine.db.iapi.services.io.FormatableArrayHolder; import com.splicemachine.db.iapi.services.loader.GeneratedMethod; import com.splicemachine.db.iapi.sql.execute.ExecPreparedStatement; import com.splicemachine.db.iapi.sql.execute.ExecRow; import com.splicemachine.db.iapi.store.access.ColumnOrdering; import com.splicemachine.db.iapi.types.DataValueDescriptor; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.log4j.Logger; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; /** * * The Distinct Scalar Aggregate is a three step process. The steps occur as a CombinedRowProvider (first 2 steps) and then * the reduce scan combines the last results. * * * * Step 1 Reading from Source and Writing to temp buckets with extraUniqueSequenceID prefix (Not needed in the case that data is sorted) * If Distinct Keys Match * Merge Non Distinct Aggregates * Else * add to buffer * Write to temp buckets * * Sorted * * Step2: Shuffle Intermediate Results to temp with uniqueSequenceID prefix * * If Keys Match * Merge Non Distinct Aggregates * else * Merge Distinct and Non Distinct Aggregates * Write to temp buckets * * Step 3: Combine N outputs * Merge Distinct and Non Distinct Aggregates * Flow through output of stack * * @author Scott Fines * Created on: 5/21/13 */ public class DistinctScalarAggregateOperation extends GenericAggregateOperation{ private static final long serialVersionUID=1l; private byte[] extraUniqueSequenceID; private boolean isInSortedOrder; private int orderItem; private int[] keyColumns; private static final Logger LOG = Logger.getLogger(DistinctScalarAggregateOperation.class); private byte[] currentKey; private Scan baseScan; private DistinctScalarAggregateIterator step1Aggregator; private SingleDistinctScalarAggregateIterator step2Aggregator; private SingleDistinctScalarAggregateIterator step3Aggregator; private boolean step2Closed; private boolean step3Closed; private DistinctAggregateBuffer buffer; private SpliceResultScanner scanner; private int step2Bucket; protected static final String NAME = DistinctScalarAggregateOperation.class.getSimpleName().replaceAll("Operation",""); @Override public String getName() { return NAME; } @SuppressWarnings("UnusedDeclaration") public DistinctScalarAggregateOperation(){} @SuppressWarnings("UnusedParameters") public DistinctScalarAggregateOperation(SpliceOperation source, boolean isInSortedOrder, int aggregateItem, int orderItem, GeneratedMethod rowAllocator, int maxRowSize, int resultSetNumber, boolean singleInputRow, double optimizerEstimatedRowCount, double optimizerEstimatedCost) throws StandardException{ super(source,aggregateItem,source.getActivation(),rowAllocator,resultSetNumber,optimizerEstimatedRowCount,optimizerEstimatedCost); this.orderItem = orderItem; this.isInSortedOrder = false; // XXX TODO Jleach: Optimize when data is already sorted. try { init(SpliceOperationContext.newContext(source.getActivation())); } catch (IOException e) { throw Exceptions.parseException(e); } } @Override public ExecRow getExecRowDefinition() throws StandardException { ExecRow clone = sourceExecIndexRow.getClone(); // Set the default values to 0 in case a ProjectRestrictOperation has set the default values to 1. // That is done to avoid division by zero exceptions when executing a projection for defining the rows // before execution. SpliceUtils.populateDefaultValues(clone.getRowArray(), 0); return clone; } @Override public RowProvider getReduceRowProvider(SpliceOperation top, PairDecoder rowDecoder, SpliceRuntimeContext spliceRuntimeContext, boolean returnDefaultValue) throws StandardException, IOException { if (LOG.isTraceEnabled()) SpliceLogUtils.trace(LOG, "getReduceRowProvider"); buildReduceScan(uniqueSequenceID); if(top!=this && top instanceof SinkingOperation){ // If being written to a table, it can be distributed serializeSource=false; SpliceUtils.setInstructions(reduceScan, activation, top, spliceRuntimeContext); serializeSource=true; byte[] tempTableBytes = SpliceDriver.driver().getTempTable().getTempTableName(); return new DistributedClientScanProvider("distinctScalarAggregateReduce",tempTableBytes,reduceScan,rowDecoder, spliceRuntimeContext); }else{ /* * Scanning back to client, the last aggregation has to be performed on the client because we cannot do server side buffering when * data is being passed back to the client due to the fact that HBase is a forward only scan in the case of interuptions. */ return RowProviders.openedSourceProvider(top,LOG,spliceRuntimeContext); } } @Override public RowProvider getMapRowProvider(SpliceOperation top, PairDecoder rowDecoder, SpliceRuntimeContext spliceRuntimeContext) throws StandardException, IOException { buildReduceScan(extraUniqueSequenceID); boolean serializeSourceTemp = serializeSource; serializeSource = spliceRuntimeContext.isFirstStepInMultistep(); SpliceUtils.setInstructions(reduceScan, activation, top, spliceRuntimeContext); serializeSource = serializeSourceTemp; byte[] tempTableBytes = SpliceDriver.driver().getTempTable().getTempTableName(); return new DistributedClientScanProvider("distinctScalarAggregateMap",tempTableBytes,reduceScan,rowDecoder, spliceRuntimeContext); } @Override protected JobResults doShuffle(SpliceRuntimeContext runtimeContext ) throws StandardException,IOException { long start = System.currentTimeMillis(); RowProvider provider; TxnView txn = runtimeContext.getTxn(); if (!isInSortedOrder) { SpliceRuntimeContext firstStep = SpliceRuntimeContext.generateSinkRuntimeContext(txn, true); firstStep.setStatementInfo(runtimeContext.getStatementInfo()); SpliceRuntimeContext secondStep = SpliceRuntimeContext.generateSinkRuntimeContext(txn,false); step2Bucket = SpliceDriver.driver().getTempTable().getCurrentSpread().bucketIndex(secondStep.getHashBucket()); secondStep.setStatementInfo(runtimeContext.getStatementInfo()); final RowProvider step1 = source.getMapRowProvider(this, OperationUtils.getPairDecoder(this, runtimeContext), firstStep); // Step 1 final RowProvider step2 = getMapRowProvider(this, OperationUtils.getPairDecoder(this, runtimeContext), secondStep); // Step 2 provider = RowProviders.combineInSeries(step1, step2); } else { SpliceRuntimeContext secondStep = SpliceRuntimeContext.generateSinkRuntimeContext(txn,false); secondStep.setStatementInfo(runtimeContext.getStatementInfo()); provider = source.getMapRowProvider(this, OperationUtils.getPairDecoder(this, runtimeContext), secondStep); // Step 1 } nextTime+= System.currentTimeMillis()-start; SpliceObserverInstructions soi = SpliceObserverInstructions.create(getActivation(),this,runtimeContext); return provider.shuffleRows(soi,OperationUtils.cleanupSubTasks(this)); } private void buildReduceScan(byte[] uniqueSequenceID) throws StandardException { try{ reduceScan = Scans.buildPrefixRangeScan(uniqueSequenceID, null); //no transaction needed //make sure that we filter out failed tasks if (failedTasks.size() > 0) { reduceScan.setFilter(derbyFactory.getSuccessFilter(failedTasks)); } } catch (IOException e) { throw Exceptions.parseException(e); } } @Override public void close() throws StandardException, IOException { if(scanner!=null) scanner.close(); super.close(); // TODO: check why we cal source.close() even though we don't call source.open() from open() if(source!=null) source.close(); } @Override public byte[] getUniqueSequenceId() { return uniqueSequenceID; } private ExecRow getStep1Row(final SpliceRuntimeContext spliceRuntimeContext) throws StandardException, IOException { if (step1Aggregator == null) { buffer = new DistinctAggregateBuffer(SpliceConstants.ringBufferSize, aggregates,new EmptyRowSupplier(aggregateContext),new SpliceWarningCollector(activation),DistinctAggregateBuffer.STEP.ONE,spliceRuntimeContext); DescriptorSerializer[] serializers = VersionedSerializers.latestVersion(false).getSerializers(getExecRowDefinition()); KeyEncoder encoder = new KeyEncoder(NoOpPrefix.INSTANCE,BareKeyHash.encoder(keyColumns,null,serializers),NoOpPostfix.INSTANCE); step1Aggregator = new DistinctScalarAggregateIterator(buffer,new SourceIterator(source),encoder); step1Aggregator.open(); timer = spliceRuntimeContext.newTimer(); } timer.startTiming(); GroupedRow row = step1Aggregator.next(spliceRuntimeContext); if(row==null){ currentKey=null; clearCurrentRow(); step1Aggregator.close(); timer.stopTiming(); stopExecutionTime = System.currentTimeMillis(); return null; } currentKey = row.getGroupingKey(); ExecRow execRow = row.getRow(); setCurrentRow(execRow); timer.tick(1); return execRow; } private ExecRow getStep2Row(final SpliceRuntimeContext spliceRuntimeContext) throws StandardException, IOException { if (step2Closed) return null; if(step2Aggregator==null){ scanner = getResultScanner(keyColumns,spliceRuntimeContext,extraUniqueSequenceID); StandardIterator<ExecRow> sourceIterator = new ScanIterator(scanner,OperationUtils.getPairDecoder(this,spliceRuntimeContext)); step2Aggregator = new SingleDistinctScalarAggregateIterator(sourceIterator,new EmptyRowSupplier(aggregateContext),new SpliceWarningCollector(activation),aggregates); step2Aggregator.open(); timer = spliceRuntimeContext.newTimer(); } timer.startTiming(); step2Closed = true; boolean shouldClose = true; try{ GroupedRow row = step2Aggregator.next(spliceRuntimeContext); if(row==null) { clearCurrentRow(); timer.stopTiming(); stopExecutionTime = System.currentTimeMillis(); return null; } //don't close the aggregator unless you have no more data shouldClose =false; ExecRow execRow = row.getRow(); setCurrentRow(execRow); timer.tick(1); return execRow; } finally{ if(shouldClose) step2Aggregator.close(); } } private boolean matchesSpliceRuntimeBucket(final SpliceRuntimeContext spliceRuntimeContext) { boolean retval = true; if (region != null) { byte[] startKey = region.getStartKey(); // see if this region was used to write intermediate results from step 2 SpreadBucket currentSpread = SpliceDriver.driver().getTempTable().getCurrentSpread(); int thisBucket = startKey.length > 0 ? currentSpread.bucketIndex(startKey[0]) : 0; if (step2Bucket != thisBucket) { retval = false; } } return retval; } private ExecRow getStep3Row(final SpliceRuntimeContext spliceRuntimeContext) throws StandardException, IOException { if (step3Closed) return null; if (!matchesSpliceRuntimeBucket(spliceRuntimeContext)) { return null; } if(step3Aggregator==null){ scanner = getResultScanner(keyColumns,spliceRuntimeContext,uniqueSequenceID); StandardIterator<ExecRow> sourceIterator = new ScanIterator(scanner,OperationUtils.getPairDecoder(this,spliceRuntimeContext)); step3Aggregator = new SingleDistinctScalarAggregateIterator(sourceIterator,new EmptyRowSupplier(aggregateContext),new SpliceWarningCollector(activation),aggregates); step3Aggregator.open(); timer = spliceRuntimeContext.newTimer(); } try{ timer.startTiming(); GroupedRow row = step3Aggregator.next(spliceRuntimeContext); step3Closed = true; if(row==null){ clearCurrentRow(); timer.stopTiming(); stopExecutionTime = System.currentTimeMillis(); return null; } ExecRow execRow = row.getRow(); setCurrentRow(execRow); timer.tick(1); return execRow; }finally{ step3Aggregator.close(); } } public ExecRow getNextSinkRow(final SpliceRuntimeContext spliceRuntimeContext) throws StandardException, IOException { if (LOG.isTraceEnabled()) SpliceLogUtils.trace(LOG, "getNextSinkRow"); if (spliceRuntimeContext.isFirstStepInMultistep()) return getStep1Row(spliceRuntimeContext); else return getStep2Row(spliceRuntimeContext); } @Override public ExecRow nextRow(SpliceRuntimeContext spliceRuntimeContext) throws StandardException, IOException { if (LOG.isTraceEnabled()) SpliceLogUtils.trace(LOG, "getNextRow"); return getStep3Row(spliceRuntimeContext); } @Override public KeyEncoder getKeyEncoder(final SpliceRuntimeContext spliceRuntimeContext) throws StandardException { DataHash hash = new SuppliedDataHash(new StandardSupplier<byte[]>() { @Override public byte[] get() throws StandardException { return currentKey; } }); final HashPrefix prefix = spliceRuntimeContext.isFirstStepInMultistep() ? new BucketingPrefix(new FixedPrefix(extraUniqueSequenceID), HashFunctions.murmur3(0),SpliceDriver.driver().getTempTable().getCurrentSpread()) : new FixedBucketPrefix(spliceRuntimeContext.getHashBucket(),new FixedPrefix(uniqueSequenceID)); final KeyPostfix uniquePostfix = new UniquePostfix(spliceRuntimeContext.getCurrentTaskId(),operationInformation.getUUIDGenerator()); return new KeyEncoder(prefix,spliceRuntimeContext.isFirstStepInMultistep()?hash:getKeyHash(spliceRuntimeContext),uniquePostfix) { @Override public KeyDecoder getDecoder(){ try { return new KeyDecoder(getKeyHashDecoder(),prefix.getPrefixLength()); } catch (StandardException e) { SpliceLogUtils.logAndThrowRuntime(LOG,e); } return null; }}; } private KeyHashDecoder getKeyHashDecoder() throws StandardException { ExecRow execRowDefinition = getExecRowDefinition(); DescriptorSerializer[] serializers = VersionedSerializers.latestVersion(false).getSerializers(execRowDefinition); int[] rowColumns = IntArrays.intersect(keyColumns, execRowDefinition.nColumns()); return EntryDataDecoder.decoder(rowColumns, null,serializers); } public DataHash getKeyHash(SpliceRuntimeContext spliceRuntimeContext) throws StandardException { ExecRow execRowDefinition = getExecRowDefinition(); int[] rowColumns = IntArrays.intersect(keyColumns, execRowDefinition.nColumns()); DescriptorSerializer[] serializers = VersionedSerializers.latestVersion(false).getSerializers(execRowDefinition); return BareKeyHash.encoder(rowColumns, null, serializers); } @Override public DataHash getRowHash(SpliceRuntimeContext spliceRuntimeContext) throws StandardException { ExecRow execRowDefinition = getExecRowDefinition(); int[] rowColumns = IntArrays.complement(keyColumns, execRowDefinition.nColumns()); DescriptorSerializer[] serializers = VersionedSerializers.latestVersion(false).getSerializers(execRowDefinition); return BareKeyHash.encoder(rowColumns,null,serializers); } @Override public void writeExternal(ObjectOutput out) throws IOException { super.writeExternal(out); out.writeBoolean(isInSortedOrder); out.writeInt(orderItem); out.writeInt(extraUniqueSequenceID.length); out.write(extraUniqueSequenceID); out.writeInt(step2Bucket); } @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { super.readExternal(in); isInSortedOrder = in.readBoolean(); orderItem = in.readInt(); extraUniqueSequenceID = new byte[in.readInt()]; in.readFully(extraUniqueSequenceID); step2Bucket = in.readInt(); } @Override public void init(SpliceOperationContext context) throws StandardException, IOException { super.init(context); ExecPreparedStatement gsps = activation.getPreparedStatement(); ColumnOrdering[] order = (ColumnOrdering[]) ((FormatableArrayHolder)gsps.getSavedObject(orderItem)).getArray(ColumnOrdering.class); keyColumns = new int[order.length]; for(int index=0;index<order.length;index++){ keyColumns[index] = order[index].getColumnId(); } baseScan = context.getScan(); startExecutionTime = System.currentTimeMillis(); } @Override protected int getNumMetrics() { int size = super.getNumMetrics(); if(buffer!=null) size++; else if(step3Aggregator!=null) size+=2; if(step2Aggregator!=null ||step1Aggregator!=null) size++; if(scanner!=null) size+=10; return size; } @Override protected void updateStats(OperationRuntimeStats stats) { if(buffer!=null){ stats.addMetric(OperationMetric.FILTERED_ROWS,buffer.getRowsMerged()); stats.setBufferFillRatio(buffer.getMaxFillRatio()); }else if(step3Aggregator!=null){ stats.addMetric(OperationMetric.FILTERED_ROWS,step3Aggregator.getRowsRead()); //stats.addMetric(OperationMetric.INPUT_ROWS, step3Aggregator.getRowsRead()); } if(step1Aggregator!=null){ stats.addMetric(OperationMetric.INPUT_ROWS,step1Aggregator.getRowsRead()); }else if(step2Aggregator!=null){ //stats.addMetric(OperationMetric.INPUT_ROWS, step2Aggregator.getRowsRead()); } if(step3Aggregator!=null){ stats.addMetric(OperationMetric.OUTPUT_ROWS, timer.getNumEvents()); } if(scanner!=null){ stats.addMetric(OperationMetric.LOCAL_SCAN_ROWS,scanner.getLocalRowsRead()); stats.addMetric(OperationMetric.LOCAL_SCAN_BYTES,scanner.getLocalBytesRead()); TimeView localView = scanner.getLocalReadTime(); stats.addMetric(OperationMetric.LOCAL_SCAN_WALL_TIME,localView.getWallClockTime()); stats.addMetric(OperationMetric.LOCAL_SCAN_CPU_TIME,localView.getCpuTime()); stats.addMetric(OperationMetric.LOCAL_SCAN_USER_TIME,localView.getUserTime()); stats.addMetric(OperationMetric.REMOTE_SCAN_ROWS,scanner.getRemoteRowsRead()); stats.addMetric(OperationMetric.REMOTE_SCAN_BYTES,scanner.getRemoteBytesRead()); TimeView remoteView = scanner.getLocalReadTime(); stats.addMetric(OperationMetric.REMOTE_SCAN_WALL_TIME,remoteView.getWallClockTime()); stats.addMetric(OperationMetric.REMOTE_SCAN_CPU_TIME,remoteView.getCpuTime()); stats.addMetric(OperationMetric.REMOTE_SCAN_USER_TIME,remoteView.getUserTime()); } super.updateStats(stats); } @Override public void open() throws StandardException, IOException { super.open(); this.extraUniqueSequenceID = operationInformation.getUUIDGenerator().nextBytes(); if(step3Aggregator!=null){ step3Aggregator.close(); step3Aggregator = null; } step3Closed = false; } private SpliceResultScanner getResultScanner(final int[] keyColumns,SpliceRuntimeContext spliceRuntimeContext, final byte[] uniqueID) throws StandardException { if(!spliceRuntimeContext.isSink()){ byte[] tempTableBytes = SpliceDriver.driver().getTempTable().getTempTableName(); buildReduceScan(uniqueID); return new ClientResultScanner(tempTableBytes,reduceScan,true,spliceRuntimeContext); } //we are under another sink, so we need to use a RegionAwareScanner final DataValueDescriptor[] cols = sourceExecIndexRow.getRowArray(); ScanBoundary boundary = new BaseHashAwareScanBoundary(SpliceConstants.DEFAULT_FAMILY_BYTES){ @Override public byte[] getStartKey(Result result) { MultiFieldDecoder fieldDecoder = MultiFieldDecoder.wrap(result.getRow()); fieldDecoder.seek(uniqueID.length+1); int adjusted = DerbyBytesUtil.skip(fieldDecoder,keyColumns,cols); fieldDecoder.reset(); return fieldDecoder.slice(adjusted+uniqueID.length+1); } @Override public byte[] getStopKey(Result result) { byte[] start = getStartKey(result); BytesUtil.unsignedIncrement(start, start.length - 1); return start; } }; // reset baseScan to bucket# + uniqueId byte[] regionStart = region.getStartKey(); if(regionStart == null || regionStart.length == 0) { regionStart = new byte[1]; regionStart[0] = 0; } byte[] start = new byte[regionStart.length+uniqueID.length]; System.arraycopy(regionStart, 0, start, 0, regionStart.length); System.arraycopy(uniqueID, 0, start, regionStart.length, uniqueID.length); try { baseScan = Scans.buildPrefixRangeScan(start, null); } catch (IOException e) { throw Exceptions.parseException(e); } //don't use a transaction for this, since we are reading from temp return RegionAwareScanner.create(null,region,baseScan,SpliceConstants.TEMP_TABLE_BYTES,boundary,spliceRuntimeContext); } @Override public String toString() { return String.format("DistinctScalarAggregateOperation {resultSetNumber=%d, source=%s}", resultSetNumber, source); } }
splice_machine/src/main/java/com/splicemachine/derby/impl/sql/execute/operations/DistinctScalarAggregateOperation.java
package com.splicemachine.derby.impl.sql.execute.operations; import com.splicemachine.constants.SpliceConstants; import com.splicemachine.constants.bytes.BytesUtil; import com.splicemachine.derby.hbase.SpliceDriver; import com.splicemachine.derby.hbase.SpliceObserverInstructions; import com.splicemachine.derby.iapi.sql.execute.SinkingOperation; import com.splicemachine.derby.iapi.sql.execute.SpliceOperation; import com.splicemachine.derby.iapi.sql.execute.SpliceOperationContext; import com.splicemachine.derby.iapi.sql.execute.SpliceRuntimeContext; import com.splicemachine.derby.iapi.storage.RowProvider; import com.splicemachine.derby.iapi.storage.ScanBoundary; import com.splicemachine.derby.impl.sql.execute.operations.distinctscalar.DistinctAggregateBuffer; import com.splicemachine.derby.impl.sql.execute.operations.distinctscalar.DistinctScalarAggregateIterator; import com.splicemachine.derby.impl.sql.execute.operations.distinctscalar.SingleDistinctScalarAggregateIterator; import com.splicemachine.derby.impl.sql.execute.operations.framework.EmptyRowSupplier; import com.splicemachine.derby.impl.sql.execute.operations.framework.GroupedRow; import com.splicemachine.derby.impl.sql.execute.operations.framework.SourceIterator; import com.splicemachine.derby.impl.sql.execute.operations.framework.SpliceWarningCollector; import com.splicemachine.derby.impl.storage.BaseHashAwareScanBoundary; import com.splicemachine.derby.impl.storage.ClientResultScanner; import com.splicemachine.derby.impl.storage.DistributedClientScanProvider; import com.splicemachine.derby.impl.storage.RegionAwareScanner; import com.splicemachine.derby.impl.storage.RowProviders; import com.splicemachine.derby.impl.storage.SpliceResultScanner; import com.splicemachine.derby.metrics.OperationMetric; import com.splicemachine.derby.metrics.OperationRuntimeStats; import com.splicemachine.derby.utils.DerbyBytesUtil; import com.splicemachine.derby.utils.ScanIterator; import com.splicemachine.derby.utils.Scans; import com.splicemachine.derby.utils.SpliceUtils; import com.splicemachine.derby.utils.StandardIterator; import com.splicemachine.derby.utils.StandardSupplier; import com.splicemachine.derby.utils.marshall.*; import com.splicemachine.derby.utils.marshall.dvd.DescriptorSerializer; import com.splicemachine.derby.utils.marshall.dvd.VersionedSerializers; import com.splicemachine.encoding.MultiFieldDecoder; import com.splicemachine.job.JobResults; import com.splicemachine.metrics.TimeView; import com.splicemachine.si.api.TxnView; import com.splicemachine.utils.IntArrays; import com.splicemachine.utils.SpliceLogUtils; import com.splicemachine.hash.HashFunctions; import com.splicemachine.pipeline.exception.Exceptions; import com.splicemachine.db.iapi.error.StandardException; import com.splicemachine.db.iapi.services.io.FormatableArrayHolder; import com.splicemachine.db.iapi.services.loader.GeneratedMethod; import com.splicemachine.db.iapi.sql.execute.ExecPreparedStatement; import com.splicemachine.db.iapi.sql.execute.ExecRow; import com.splicemachine.db.iapi.store.access.ColumnOrdering; import com.splicemachine.db.iapi.types.DataValueDescriptor; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.log4j.Logger; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; /** * * The Distinct Scalar Aggregate is a three step process. The steps occur as a CombinedRowProvider (first 2 steps) and then * the reduce scan combines the last results. * * * * Step 1 Reading from Source and Writing to temp buckets with extraUniqueSequenceID prefix (Not needed in the case that data is sorted) * If Distinct Keys Match * Merge Non Distinct Aggregates * Else * add to buffer * Write to temp buckets * * Sorted * * Step2: Shuffle Intermediate Results to temp with uniqueSequenceID prefix * * If Keys Match * Merge Non Distinct Aggregates * else * Merge Distinct and Non Distinct Aggregates * Write to temp buckets * * Step 3: Combine N outputs * Merge Distinct and Non Distinct Aggregates * Flow through output of stack * * @author Scott Fines * Created on: 5/21/13 */ public class DistinctScalarAggregateOperation extends GenericAggregateOperation{ private static final long serialVersionUID=1l; private byte[] extraUniqueSequenceID; private boolean isInSortedOrder; private int orderItem; private int[] keyColumns; private static final Logger LOG = Logger.getLogger(DistinctScalarAggregateOperation.class); private byte[] currentKey; private Scan baseScan; private DistinctScalarAggregateIterator step1Aggregator; private SingleDistinctScalarAggregateIterator step2Aggregator; private SingleDistinctScalarAggregateIterator step3Aggregator; private boolean step2Closed; private boolean step3Closed; private DistinctAggregateBuffer buffer; private SpliceResultScanner scanner; private int step2Bucket; protected static final String NAME = DistinctScalarAggregateOperation.class.getSimpleName().replaceAll("Operation",""); @Override public String getName() { return NAME; } @SuppressWarnings("UnusedDeclaration") public DistinctScalarAggregateOperation(){} @SuppressWarnings("UnusedParameters") public DistinctScalarAggregateOperation(SpliceOperation source, boolean isInSortedOrder, int aggregateItem, int orderItem, GeneratedMethod rowAllocator, int maxRowSize, int resultSetNumber, boolean singleInputRow, double optimizerEstimatedRowCount, double optimizerEstimatedCost) throws StandardException{ super(source,aggregateItem,source.getActivation(),rowAllocator,resultSetNumber,optimizerEstimatedRowCount,optimizerEstimatedCost); this.orderItem = orderItem; this.isInSortedOrder = false; // XXX TODO Jleach: Optimize when data is already sorted. try { init(SpliceOperationContext.newContext(source.getActivation())); } catch (IOException e) { throw Exceptions.parseException(e); } } @Override public ExecRow getExecRowDefinition() throws StandardException { ExecRow clone = sourceExecIndexRow.getClone(); // Set the default values to 0 in case a ProjectRestrictOperation has set the default values to 1. // That is done to avoid division by zero exceptions when executing a projection for defining the rows // before execution. SpliceUtils.populateDefaultValues(clone.getRowArray(), 0); return clone; } @Override public RowProvider getReduceRowProvider(SpliceOperation top, PairDecoder rowDecoder, SpliceRuntimeContext spliceRuntimeContext, boolean returnDefaultValue) throws StandardException, IOException { if (LOG.isTraceEnabled()) SpliceLogUtils.trace(LOG, "getReduceRowProvider"); buildReduceScan(uniqueSequenceID); if(top!=this && top instanceof SinkingOperation){ // If being written to a table, it can be distributed serializeSource=false; SpliceUtils.setInstructions(reduceScan, activation, top, spliceRuntimeContext); serializeSource=true; byte[] tempTableBytes = SpliceDriver.driver().getTempTable().getTempTableName(); return new DistributedClientScanProvider("distinctScalarAggregateReduce",tempTableBytes,reduceScan,rowDecoder, spliceRuntimeContext); }else{ /* * Scanning back to client, the last aggregation has to be performed on the client because we cannot do server side buffering when * data is being passed back to the client due to the fact that HBase is a forward only scan in the case of interuptions. */ return RowProviders.openedSourceProvider(top,LOG,spliceRuntimeContext); } } @Override public RowProvider getMapRowProvider(SpliceOperation top, PairDecoder rowDecoder, SpliceRuntimeContext spliceRuntimeContext) throws StandardException, IOException { buildReduceScan(extraUniqueSequenceID); boolean serializeSourceTemp = serializeSource; serializeSource = spliceRuntimeContext.isFirstStepInMultistep(); SpliceUtils.setInstructions(reduceScan, activation, top, spliceRuntimeContext); serializeSource = serializeSourceTemp; byte[] tempTableBytes = SpliceDriver.driver().getTempTable().getTempTableName(); return new DistributedClientScanProvider("distinctScalarAggregateMap",tempTableBytes,reduceScan,rowDecoder, spliceRuntimeContext); } @Override protected JobResults doShuffle(SpliceRuntimeContext runtimeContext ) throws StandardException,IOException { long start = System.currentTimeMillis(); RowProvider provider; TxnView txn = runtimeContext.getTxn(); if (!isInSortedOrder) { SpliceRuntimeContext firstStep = SpliceRuntimeContext.generateSinkRuntimeContext(txn, true); firstStep.setStatementInfo(runtimeContext.getStatementInfo()); SpliceRuntimeContext secondStep = SpliceRuntimeContext.generateSinkRuntimeContext(txn,false); step2Bucket = SpliceDriver.driver().getTempTable().getCurrentSpread().bucketIndex(secondStep.getHashBucket()); secondStep.setStatementInfo(runtimeContext.getStatementInfo()); final RowProvider step1 = source.getMapRowProvider(this, OperationUtils.getPairDecoder(this, runtimeContext), firstStep); // Step 1 final RowProvider step2 = getMapRowProvider(this, OperationUtils.getPairDecoder(this, runtimeContext), secondStep); // Step 2 provider = RowProviders.combineInSeries(step1, step2); } else { SpliceRuntimeContext secondStep = SpliceRuntimeContext.generateSinkRuntimeContext(txn,false); secondStep.setStatementInfo(runtimeContext.getStatementInfo()); provider = source.getMapRowProvider(this, OperationUtils.getPairDecoder(this, runtimeContext), secondStep); // Step 1 } nextTime+= System.currentTimeMillis()-start; SpliceObserverInstructions soi = SpliceObserverInstructions.create(getActivation(),this,runtimeContext); return provider.shuffleRows(soi,OperationUtils.cleanupSubTasks(this)); } private void buildReduceScan(byte[] uniqueSequenceID) throws StandardException { try{ reduceScan = Scans.buildPrefixRangeScan(uniqueSequenceID, null); //no transaction needed //make sure that we filter out failed tasks if (failedTasks.size() > 0) { reduceScan.setFilter(derbyFactory.getSuccessFilter(failedTasks)); } } catch (IOException e) { throw Exceptions.parseException(e); } } @Override public void close() throws StandardException, IOException { if(scanner!=null) scanner.close(); super.close(); // TODO: check why we cal source.close() even though we don't call source.open() from open() if(source!=null) source.close(); } @Override public byte[] getUniqueSequenceId() { return uniqueSequenceID; } private ExecRow getStep1Row(final SpliceRuntimeContext spliceRuntimeContext) throws StandardException, IOException { if (step1Aggregator == null) { buffer = new DistinctAggregateBuffer(SpliceConstants.ringBufferSize, aggregates,new EmptyRowSupplier(aggregateContext),new SpliceWarningCollector(activation),DistinctAggregateBuffer.STEP.ONE,spliceRuntimeContext); DescriptorSerializer[] serializers = VersionedSerializers.latestVersion(false).getSerializers(getExecRowDefinition()); KeyEncoder encoder = new KeyEncoder(NoOpPrefix.INSTANCE,BareKeyHash.encoder(keyColumns,null,serializers),NoOpPostfix.INSTANCE); step1Aggregator = new DistinctScalarAggregateIterator(buffer,new SourceIterator(source),encoder); step1Aggregator.open(); timer = spliceRuntimeContext.newTimer(); } timer.startTiming(); GroupedRow row = step1Aggregator.next(spliceRuntimeContext); if(row==null){ currentKey=null; clearCurrentRow(); step1Aggregator.close(); timer.stopTiming(); stopExecutionTime = System.currentTimeMillis(); return null; } currentKey = row.getGroupingKey(); ExecRow execRow = row.getRow(); setCurrentRow(execRow); timer.tick(1); return execRow; } private ExecRow getStep2Row(final SpliceRuntimeContext spliceRuntimeContext) throws StandardException, IOException { if (step2Closed) return null; if(step2Aggregator==null){ scanner = getResultScanner(keyColumns,spliceRuntimeContext,extraUniqueSequenceID); StandardIterator<ExecRow> sourceIterator = new ScanIterator(scanner,OperationUtils.getPairDecoder(this,spliceRuntimeContext)); step2Aggregator = new SingleDistinctScalarAggregateIterator(sourceIterator,new EmptyRowSupplier(aggregateContext),new SpliceWarningCollector(activation),aggregates); step2Aggregator.open(); timer = spliceRuntimeContext.newTimer(); } timer.startTiming(); step2Closed = true; boolean shouldClose = true; try{ GroupedRow row = step2Aggregator.next(spliceRuntimeContext); if(row==null) { clearCurrentRow(); timer.stopTiming(); stopExecutionTime = System.currentTimeMillis(); return null; } //don't close the aggregator unless you have no more data shouldClose =false; ExecRow execRow = row.getRow(); setCurrentRow(execRow); timer.tick(1); return execRow; } finally{ if(shouldClose) step2Aggregator.close(); } } private boolean matchesSpliceRuntimeBucket(final SpliceRuntimeContext spliceRuntimeContext) { boolean retval = true; if (region != null) { byte[] startKey = region.getStartKey(); // see if this region was used to write intermediate results from step 2 SpreadBucket currentSpread = SpliceDriver.driver().getTempTable().getCurrentSpread(); int thisBucket = startKey.length > 0 ? currentSpread.bucketIndex(startKey[0]) : 0; if (step2Bucket != thisBucket) { retval = false; } } return retval; } private ExecRow getStep3Row(final SpliceRuntimeContext spliceRuntimeContext) throws StandardException, IOException { if (step3Closed) return null; if (!matchesSpliceRuntimeBucket(spliceRuntimeContext)) { return null; } if(step3Aggregator==null){ scanner = getResultScanner(keyColumns,spliceRuntimeContext,uniqueSequenceID); StandardIterator<ExecRow> sourceIterator = new ScanIterator(scanner,OperationUtils.getPairDecoder(this,spliceRuntimeContext)); step3Aggregator = new SingleDistinctScalarAggregateIterator(sourceIterator,new EmptyRowSupplier(aggregateContext),new SpliceWarningCollector(activation),aggregates); step3Aggregator.open(); timer = spliceRuntimeContext.newTimer(); } try{ timer.startTiming(); GroupedRow row = step3Aggregator.next(spliceRuntimeContext); step3Closed = true; if(row==null){ clearCurrentRow(); timer.stopTiming(); stopExecutionTime = System.currentTimeMillis(); return null; } ExecRow execRow = row.getRow(); setCurrentRow(execRow); timer.tick(1); return execRow; }finally{ step3Aggregator.close(); } } public ExecRow getNextSinkRow(final SpliceRuntimeContext spliceRuntimeContext) throws StandardException, IOException { if (LOG.isTraceEnabled()) SpliceLogUtils.trace(LOG, "getNextSinkRow"); if (spliceRuntimeContext.isFirstStepInMultistep()) return getStep1Row(spliceRuntimeContext); else return getStep2Row(spliceRuntimeContext); } @Override public ExecRow nextRow(SpliceRuntimeContext spliceRuntimeContext) throws StandardException, IOException { if (LOG.isTraceEnabled()) SpliceLogUtils.trace(LOG, "getNextRow"); return getStep3Row(spliceRuntimeContext); } @Override public KeyEncoder getKeyEncoder(final SpliceRuntimeContext spliceRuntimeContext) throws StandardException { DataHash hash = new SuppliedDataHash(new StandardSupplier<byte[]>() { @Override public byte[] get() throws StandardException { return currentKey; } }); final HashPrefix prefix = spliceRuntimeContext.isFirstStepInMultistep() ? new BucketingPrefix(new FixedPrefix(extraUniqueSequenceID), HashFunctions.murmur3(0),SpliceDriver.driver().getTempTable().getCurrentSpread()) : new FixedBucketPrefix(spliceRuntimeContext.getHashBucket(),new FixedPrefix(uniqueSequenceID)); final KeyPostfix uniquePostfix = new UniquePostfix(spliceRuntimeContext.getCurrentTaskId(),operationInformation.getUUIDGenerator()); return new KeyEncoder(prefix,spliceRuntimeContext.isFirstStepInMultistep()?hash:NoOpDataHash.INSTANCE,uniquePostfix) { @Override public KeyDecoder getDecoder(){ try { return new KeyDecoder(getKeyHashDecoder(),prefix.getPrefixLength()); } catch (StandardException e) { SpliceLogUtils.logAndThrowRuntime(LOG,e); } return null; }}; } private KeyHashDecoder getKeyHashDecoder() throws StandardException { ExecRow execRowDefinition = getExecRowDefinition(); DescriptorSerializer[] serializers = VersionedSerializers.latestVersion(false).getSerializers(execRowDefinition); int[] rowColumns = IntArrays.intersect(keyColumns, execRowDefinition.nColumns()); return EntryDataDecoder.decoder(rowColumns, null,serializers); } @Override public DataHash getRowHash(SpliceRuntimeContext spliceRuntimeContext) throws StandardException { ExecRow execRowDefinition = getExecRowDefinition(); int[] rowColumns = IntArrays.complement(keyColumns, execRowDefinition.nColumns()); DescriptorSerializer[] serializers = VersionedSerializers.latestVersion(false).getSerializers(execRowDefinition); return BareKeyHash.encoder(rowColumns,null,serializers); } @Override public void writeExternal(ObjectOutput out) throws IOException { super.writeExternal(out); out.writeBoolean(isInSortedOrder); out.writeInt(orderItem); out.writeInt(extraUniqueSequenceID.length); out.write(extraUniqueSequenceID); out.writeInt(step2Bucket); } @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { super.readExternal(in); isInSortedOrder = in.readBoolean(); orderItem = in.readInt(); extraUniqueSequenceID = new byte[in.readInt()]; in.readFully(extraUniqueSequenceID); step2Bucket = in.readInt(); } @Override public void init(SpliceOperationContext context) throws StandardException, IOException { super.init(context); ExecPreparedStatement gsps = activation.getPreparedStatement(); ColumnOrdering[] order = (ColumnOrdering[]) ((FormatableArrayHolder)gsps.getSavedObject(orderItem)).getArray(ColumnOrdering.class); keyColumns = new int[order.length]; for(int index=0;index<order.length;index++){ keyColumns[index] = order[index].getColumnId(); } baseScan = context.getScan(); startExecutionTime = System.currentTimeMillis(); } @Override protected int getNumMetrics() { int size = super.getNumMetrics(); if(buffer!=null) size++; else if(step3Aggregator!=null) size+=2; if(step2Aggregator!=null ||step1Aggregator!=null) size++; if(scanner!=null) size+=10; return size; } @Override protected void updateStats(OperationRuntimeStats stats) { if(buffer!=null){ stats.addMetric(OperationMetric.FILTERED_ROWS,buffer.getRowsMerged()); stats.setBufferFillRatio(buffer.getMaxFillRatio()); }else if(step3Aggregator!=null){ stats.addMetric(OperationMetric.FILTERED_ROWS,step3Aggregator.getRowsRead()); //stats.addMetric(OperationMetric.INPUT_ROWS, step3Aggregator.getRowsRead()); } if(step1Aggregator!=null){ stats.addMetric(OperationMetric.INPUT_ROWS,step1Aggregator.getRowsRead()); }else if(step2Aggregator!=null){ //stats.addMetric(OperationMetric.INPUT_ROWS, step2Aggregator.getRowsRead()); } if(step3Aggregator!=null){ stats.addMetric(OperationMetric.OUTPUT_ROWS, timer.getNumEvents()); } if(scanner!=null){ stats.addMetric(OperationMetric.LOCAL_SCAN_ROWS,scanner.getLocalRowsRead()); stats.addMetric(OperationMetric.LOCAL_SCAN_BYTES,scanner.getLocalBytesRead()); TimeView localView = scanner.getLocalReadTime(); stats.addMetric(OperationMetric.LOCAL_SCAN_WALL_TIME,localView.getWallClockTime()); stats.addMetric(OperationMetric.LOCAL_SCAN_CPU_TIME,localView.getCpuTime()); stats.addMetric(OperationMetric.LOCAL_SCAN_USER_TIME,localView.getUserTime()); stats.addMetric(OperationMetric.REMOTE_SCAN_ROWS,scanner.getRemoteRowsRead()); stats.addMetric(OperationMetric.REMOTE_SCAN_BYTES,scanner.getRemoteBytesRead()); TimeView remoteView = scanner.getLocalReadTime(); stats.addMetric(OperationMetric.REMOTE_SCAN_WALL_TIME,remoteView.getWallClockTime()); stats.addMetric(OperationMetric.REMOTE_SCAN_CPU_TIME,remoteView.getCpuTime()); stats.addMetric(OperationMetric.REMOTE_SCAN_USER_TIME,remoteView.getUserTime()); } super.updateStats(stats); } @Override public void open() throws StandardException, IOException { super.open(); this.extraUniqueSequenceID = operationInformation.getUUIDGenerator().nextBytes(); if(step3Aggregator!=null){ step3Aggregator.close(); step3Aggregator = null; } step3Closed = false; } private SpliceResultScanner getResultScanner(final int[] keyColumns,SpliceRuntimeContext spliceRuntimeContext, final byte[] uniqueID) throws StandardException { if(!spliceRuntimeContext.isSink()){ byte[] tempTableBytes = SpliceDriver.driver().getTempTable().getTempTableName(); buildReduceScan(uniqueID); return new ClientResultScanner(tempTableBytes,reduceScan,true,spliceRuntimeContext); } //we are under another sink, so we need to use a RegionAwareScanner final DataValueDescriptor[] cols = sourceExecIndexRow.getRowArray(); ScanBoundary boundary = new BaseHashAwareScanBoundary(SpliceConstants.DEFAULT_FAMILY_BYTES){ @Override public byte[] getStartKey(Result result) { MultiFieldDecoder fieldDecoder = MultiFieldDecoder.wrap(result.getRow()); fieldDecoder.seek(uniqueID.length+1); int adjusted = DerbyBytesUtil.skip(fieldDecoder,keyColumns,cols); fieldDecoder.reset(); return fieldDecoder.slice(adjusted+uniqueID.length+1); } @Override public byte[] getStopKey(Result result) { byte[] start = getStartKey(result); BytesUtil.unsignedIncrement(start, start.length - 1); return start; } }; // reset baseScan to bucket# + uniqueId byte[] regionStart = region.getStartKey(); if(regionStart == null || regionStart.length == 0) { regionStart = new byte[1]; regionStart[0] = 0; } byte[] start = new byte[regionStart.length+uniqueID.length]; System.arraycopy(regionStart, 0, start, 0, regionStart.length); System.arraycopy(uniqueID, 0, start, regionStart.length, uniqueID.length); try { baseScan = Scans.buildPrefixRangeScan(start, null); } catch (IOException e) { throw Exceptions.parseException(e); } //don't use a transaction for this, since we are reading from temp return RegionAwareScanner.create(null,region,baseScan,SpliceConstants.TEMP_TABLE_BYTES,boundary,spliceRuntimeContext); } @Override public String toString() { return String.format("DistinctScalarAggregateOperation {resultSetNumber=%d, source=%s}", resultSetNumber, source); } }
DB-3712: fix decoding in third phase
splice_machine/src/main/java/com/splicemachine/derby/impl/sql/execute/operations/DistinctScalarAggregateOperation.java
DB-3712: fix decoding in third phase
Java
agpl-3.0
fea1ee5654ec61d86f720a6a172193ccb1d616f0
0
duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test
4f6a5be6-2e62-11e5-9284-b827eb9e62be
hello.java
4f64db8a-2e62-11e5-9284-b827eb9e62be
4f6a5be6-2e62-11e5-9284-b827eb9e62be
hello.java
4f6a5be6-2e62-11e5-9284-b827eb9e62be
Java
agpl-3.0
e490262cdf577cc7133f8540fbe27fd8b340b73e
0
duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test
d34dad7e-2e61-11e5-9284-b827eb9e62be
hello.java
d3483b0a-2e61-11e5-9284-b827eb9e62be
d34dad7e-2e61-11e5-9284-b827eb9e62be
hello.java
d34dad7e-2e61-11e5-9284-b827eb9e62be
Java
agpl-3.0
93f5cde8ade00c191ff533f747153895d41d5436
0
duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test
754cf396-2e62-11e5-9284-b827eb9e62be
hello.java
75478834-2e62-11e5-9284-b827eb9e62be
754cf396-2e62-11e5-9284-b827eb9e62be
hello.java
754cf396-2e62-11e5-9284-b827eb9e62be
Java
lgpl-2.1
372a5698193aac9f732c1993ebe76b241d2cf859
0
ethaneldridge/vassal,ethaneldridge/vassal,ethaneldridge/vassal
/* * * Copyright (c) 2000-2003 by Rodney Kinney * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Library General Public * License (LGPL) as published by the Free Software Foundation. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Library General Public License for more details. * * You should have received a copy of the GNU Library General Public * License along with this library; if not, copies are available * at http://www.opensource.org. */ package VASSAL.configure; import java.awt.Frame; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import javax.swing.Action; import javax.swing.Box; import javax.swing.BoxLayout; import javax.swing.JButton; import javax.swing.JDialog; import org.w3c.dom.Element; import org.w3c.dom.Node; import VASSAL.build.Buildable; import VASSAL.build.Builder; import VASSAL.build.Configurable; import VASSAL.build.GameModule; import VASSAL.build.module.documentation.HelpWindow; import VASSAL.tools.ErrorDialog; /** * Window for editing properties of a {@link Configurable} object */ public class PropertiesWindow extends JDialog { private static final long serialVersionUID = 1L; private Configurer configurer; private Configurable target; private Element originalState; public PropertiesWindow(Frame owner, boolean modal, final Configurable target, HelpWindow helpWindow) { super(owner, modal); initialize(target, helpWindow); } protected void initialize(final Configurable target, HelpWindow helpWindow) { this.target = target; originalState = target.getBuildElement(Builder.createNewDocument()); Node child = originalState.getFirstChild(); while (child != null) { Node nextChild = child.getNextSibling(); if (Node.ELEMENT_NODE == child.getNodeType()) { // Cull Buildables from the state. try { final Class<?> c = GameModule.getGameModule().getDataArchive().loadClass(((Element)child).getTagName()); if (Buildable.class.isAssignableFrom(c)) { originalState.removeChild(child); } } catch (ClassNotFoundException e) { // This element doesn't correspond to a class. Skip it. } catch (LinkageError e) { ErrorDialog.bug(e); } } child = nextChild; } setLayout(new BoxLayout(getContentPane(),BoxLayout.Y_AXIS)); configurer = target.getConfigurer(); target.addPropertyChangeListener(new PropertyChangeListener() { @Override public void propertyChange(PropertyChangeEvent evt) { if (Configurable.NAME_PROPERTY.equals(evt.getPropertyName())) { setTitle((String) evt.getNewValue()); } } }); add(configurer.getControls()); setTitle(ConfigureTree.getConfigureName(target)); final Box buttonBox = Box.createHorizontalBox(); final JButton okButton = new JButton("Ok"); okButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { save(); } }); buttonBox.add(okButton); final JButton cancelButton = new JButton("Cancel"); cancelButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { cancel(); } }); buttonBox.add(cancelButton); if (target.getHelpFile() != null) { final Action helpAction = new ShowHelpAction(target.getHelpFile().getContents(), null); final JButton helpButton = new JButton(helpAction); buttonBox.add(helpButton); pack(); } add(buttonBox); pack(); setLocationRelativeTo(getParent()); setDefaultCloseOperation(DO_NOTHING_ON_CLOSE); addWindowListener(new WindowAdapter() { @Override public void windowClosing(WindowEvent we) { cancel(); } }); } public void cancel() { target.build(originalState); dispose(); } public void save() { configurer.getValue(); dispose(); } }
src/main/java/VASSAL/configure/PropertiesWindow.java
/* * * Copyright (c) 2000-2003 by Rodney Kinney * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Library General Public * License (LGPL) as published by the Free Software Foundation. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Library General Public License for more details. * * You should have received a copy of the GNU Library General Public * License along with this library; if not, copies are available * at http://www.opensource.org. */ package VASSAL.configure; import java.awt.Frame; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import javax.swing.Action; import javax.swing.Box; import javax.swing.BoxLayout; import javax.swing.JButton; import javax.swing.JDialog; import org.w3c.dom.Element; import org.w3c.dom.Node; import VASSAL.build.Buildable; import VASSAL.build.Builder; import VASSAL.build.Configurable; import VASSAL.build.GameModule; import VASSAL.build.module.documentation.HelpWindow; import VASSAL.tools.ErrorDialog; /** * Window for editing properties of a {@link Configurable} object */ public class PropertiesWindow extends JDialog { private static final long serialVersionUID = 1L; private Configurer configurer; private Configurable target; private Element originalState; public PropertiesWindow(Frame owner, boolean modal, final Configurable target, HelpWindow helpWindow) { super(owner, modal); initialize(target, helpWindow); } protected void initialize(final Configurable target, HelpWindow helpWindow) { this.target = target; originalState = target.getBuildElement(Builder.createNewDocument()); Node child = originalState.getFirstChild(); while (child != null) { Node nextChild = child.getNextSibling(); if (Node.ELEMENT_NODE == child.getNodeType()) { // Cull Buildables from the state. try { final Class<?> c = GameModule.getGameModule().getDataArchive().loadClass(((Element)child).getTagName(), false); if (Buildable.class.isAssignableFrom(c)) { originalState.removeChild(child); } } catch (ClassNotFoundException e) { // This element doesn't correspond to a class. Skip it. } catch (LinkageError e) { ErrorDialog.bug(e); } } child = nextChild; } setLayout(new BoxLayout(getContentPane(),BoxLayout.Y_AXIS)); configurer = target.getConfigurer(); target.addPropertyChangeListener(new PropertyChangeListener() { @Override public void propertyChange(PropertyChangeEvent evt) { if (Configurable.NAME_PROPERTY.equals(evt.getPropertyName())) { setTitle((String) evt.getNewValue()); } } }); add(configurer.getControls()); setTitle(ConfigureTree.getConfigureName(target)); final Box buttonBox = Box.createHorizontalBox(); final JButton okButton = new JButton("Ok"); okButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { save(); } }); buttonBox.add(okButton); final JButton cancelButton = new JButton("Cancel"); cancelButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { cancel(); } }); buttonBox.add(cancelButton); if (target.getHelpFile() != null) { final Action helpAction = new ShowHelpAction(target.getHelpFile().getContents(), null); final JButton helpButton = new JButton(helpAction); buttonBox.add(helpButton); pack(); } add(buttonBox); pack(); setLocationRelativeTo(getParent()); setDefaultCloseOperation(DO_NOTHING_ON_CLOSE); addWindowListener(new WindowAdapter() { @Override public void windowClosing(WindowEvent we) { cancel(); } }); } public void cancel() { target.build(originalState); dispose(); } public void save() { configurer.getValue(); dispose(); } }
removed boolean
src/main/java/VASSAL/configure/PropertiesWindow.java
removed boolean
Java
apache-2.0
691f45c190e4c2677798434c9272da11286c8c4d
0
trekawek/jackrabbit-oak,trekawek/jackrabbit-oak,amit-jain/jackrabbit-oak,mreutegg/jackrabbit-oak,amit-jain/jackrabbit-oak,amit-jain/jackrabbit-oak,anchela/jackrabbit-oak,trekawek/jackrabbit-oak,anchela/jackrabbit-oak,amit-jain/jackrabbit-oak,trekawek/jackrabbit-oak,apache/jackrabbit-oak,apache/jackrabbit-oak,apache/jackrabbit-oak,apache/jackrabbit-oak,anchela/jackrabbit-oak,mreutegg/jackrabbit-oak,amit-jain/jackrabbit-oak,trekawek/jackrabbit-oak,mreutegg/jackrabbit-oak,apache/jackrabbit-oak,mreutegg/jackrabbit-oak,mreutegg/jackrabbit-oak,anchela/jackrabbit-oak,anchela/jackrabbit-oak
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.plugins.document; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.AdditionalAnswers.delegatesTo; import static org.mockito.Matchers.anyInt; import static org.mockito.Matchers.anyObject; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Random; import java.util.Set; import java.util.UUID; import java.util.concurrent.Semaphore; import java.util.concurrent.atomic.AtomicBoolean; import javax.jcr.PropertyType; import javax.jcr.Value; import javax.jcr.ValueFormatException; import junitx.util.PrivateAccessor; import org.apache.jackrabbit.oak.api.CommitFailedException; import org.apache.jackrabbit.oak.api.Descriptors; import org.apache.jackrabbit.oak.commons.json.JsonObject; import org.apache.jackrabbit.oak.commons.json.JsopTokenizer; import org.apache.jackrabbit.oak.plugins.document.memory.MemoryDocumentStore; import org.apache.jackrabbit.oak.plugins.document.util.MongoConnection; import org.apache.jackrabbit.oak.spi.blob.BlobStore; import org.apache.jackrabbit.oak.spi.blob.MemoryBlobStore; import org.apache.jackrabbit.oak.spi.commit.CommitInfo; import org.apache.jackrabbit.oak.spi.commit.EmptyHook; import org.apache.jackrabbit.oak.spi.state.NodeBuilder; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.osgi.framework.BundleContext; import org.osgi.framework.ServiceRegistration; import org.osgi.service.component.ComponentContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Lists; import com.mongodb.DB; /** * Tests for the DocumentDiscoveryLiteService */ public class DocumentDiscoveryLiteServiceTest { /** * container for what should represent an instance, but is not a complete * one, hence 'simplified'. it contains most importantly the * DocuemntNodeStore and the discoveryLite service */ class SimplifiedInstance { private DocumentDiscoveryLiteService service; private DocumentNodeStore ns; private final Descriptors descriptors; private Map<String, Object> registeredServices; private final long lastRevInterval; private volatile boolean lastRevStopped = false; private volatile boolean writeSimulationStopped = false; private Thread lastRevThread; private Thread writeSimulationThread; public String workingDir; SimplifiedInstance(DocumentDiscoveryLiteService service, DocumentNodeStore ns, Descriptors descriptors, Map<String, Object> registeredServices, long lastRevInterval, String workingDir) { this.service = service; this.ns = ns; this.workingDir = workingDir; this.descriptors = descriptors; this.registeredServices = registeredServices; this.lastRevInterval = lastRevInterval; if (lastRevInterval > 0) { startLastRevThread(); } } @Override public String toString() { return "SimplifiedInstance[cid=" + ns.getClusterId() + "]"; } private void startLastRevThread() { lastRevStopped = false; lastRevThread = new Thread(new Runnable() { @Override public void run() { while (!lastRevStopped) { SimplifiedInstance.this.ns.getLastRevRecoveryAgent().performRecoveryIfNeeded(); try { Thread.sleep(SimplifiedInstance.this.lastRevInterval); } catch (InterruptedException e) { e.printStackTrace(); } } } }); lastRevThread.setDaemon(true); lastRevThread.setName("lastRevThread[cid=" + ns.getClusterId() + "]"); lastRevThread.start(); } void stopLastRevThread() throws InterruptedException { lastRevStopped = true; lastRevThread.join(); } boolean isFinal() throws Exception { final JsonObject clusterViewObj = getClusterViewObj(); if (clusterViewObj == null) { throw new IllegalStateException("should always have that final flag set"); } String finalStr = clusterViewObj.getProperties().get("final"); return Boolean.valueOf(finalStr); } boolean hasActiveIds(String clusterViewStr, int... expected) throws Exception { return hasIds(clusterViewStr, "active", expected); } boolean hasDeactivatingIds(String clusterViewStr, int... expected) throws Exception { return hasIds(clusterViewStr, "deactivating", expected); } boolean hasInactiveIds(String clusterViewStr, int... expected) throws Exception { return hasIds(clusterViewStr, "inactive", expected); } private boolean hasIds(final String clusterViewStr, final String key, int... expectedIds) throws Exception { final JsonObject clusterViewObj = asJsonObject(clusterViewStr); String actualIdsStr = clusterViewObj == null ? null : clusterViewObj.getProperties().get(key); boolean actualEmpty = actualIdsStr == null || actualIdsStr.length() == 0 || actualIdsStr.equals("[]"); boolean expectedEmpty = expectedIds == null || expectedIds.length == 0; if (actualEmpty && expectedEmpty) { return true; } if (actualEmpty != expectedEmpty) { return false; } final List<Integer> actualList = Arrays .asList(ClusterViewDocument.csvToIntegerArray(actualIdsStr.substring(1, actualIdsStr.length() - 1))); if (expectedIds.length != actualList.size()) { return false; } for (int i = 0; i < expectedIds.length; i++) { int anExpectedId = expectedIds[i]; if (!actualList.contains(anExpectedId)) { return false; } } return true; } JsonObject getClusterViewObj() throws Exception { final String json = getClusterViewStr(); return asJsonObject(json); } private JsonObject asJsonObject(final String json) { if (json == null) { return null; } JsopTokenizer t = new JsopTokenizer(json); t.read('{'); JsonObject o = JsonObject.create(t); return o; } String getClusterViewStr() throws Exception { return getDescriptor(DocumentDiscoveryLiteService.OAK_DISCOVERYLITE_CLUSTERVIEW); } String getDescriptor(String key) throws Exception { final Value value = descriptors.getValue(key); if (value == null) { return null; } if (value.getType() != PropertyType.STRING) { return null; } try { return value.getString(); } catch (ValueFormatException vfe) { return null; } } public void dispose() { logger.info("Disposing " + this); try { stopSimulatingWrites(); } catch (InterruptedException e) { fail("interrupted"); } if (lastRevThread != null) { try { stopLastRevThread(); } catch (InterruptedException ok) { fail("interrupted"); } lastRevThread = null; } if (service != null) { service.deactivate(); service = null; } if (ns != null) { ns.dispose(); ns = null; } if (registeredServices != null) { registeredServices.clear(); registeredServices = null; } } /** * shutdown simulates the normal, graceful, shutdown * * @throws InterruptedException */ public void shutdown() throws InterruptedException { stopSimulatingWrites(); stopLastRevThread(); ns.dispose(); service.deactivate(); } /** * crash simulates a kill -9, sort of * * @throws Throwable */ public void crash() throws Throwable { logger.info("crash: stopping simulating writes..."); stopSimulatingWrites(); logger.info("crash: stopping lastrev thread..."); stopLastRevThread(); logger.info("crash: stopped lastrev thread, now setting least to end within 1 sec"); boolean renewed = setLeaseTime(1000 /* 1 sec */); if (!renewed) { logger.info("halt"); fail("did not renew clusterid lease"); } logger.info("crash: now stopping background read/update"); stopAllBackgroundThreads(); // but don't do the following from DocumentNodeStore.dispose(): // * don't do the last internalRunBackgroundUpdateOperations - as // we're trying to simulate a crash here // * don't dispose clusterNodeInfo to leave the node in active state // the DocumentDiscoveryLiteService currently can simply be // deactivated, doesn't differ much from crashing service.deactivate(); logger.info("crash: crash simulation done."); } /** * very hacky way of doing the following: make sure this instance's * clusterNodes entry is marked with a very short (1 sec off) lease end * time so that the crash detection doesn't take a minute (as it would * by default) */ private boolean setLeaseTime(final int leaseTime) throws NoSuchFieldException { ns.getClusterInfo().setLeaseTime(leaseTime); PrivateAccessor.setField(ns.getClusterInfo(), "leaseEndTime", System.currentTimeMillis() + (leaseTime / 2)); boolean renewed = ns.renewClusterIdLease(); return renewed; } private AtomicBoolean getIsDisposed() throws NoSuchFieldException { AtomicBoolean isDisposed = (AtomicBoolean) PrivateAccessor.getField(ns, "isDisposed"); return isDisposed; } private void stopAllBackgroundThreads() throws NoSuchFieldException { // get all those background threads... Thread backgroundReadThread = (Thread) PrivateAccessor.getField(ns, "backgroundReadThread"); assertNotNull(backgroundReadThread); Thread backgroundUpdateThread = (Thread) PrivateAccessor.getField(ns, "backgroundUpdateThread"); assertNotNull(backgroundUpdateThread); Thread leaseUpdateThread = (Thread) PrivateAccessor.getField(ns, "leaseUpdateThread"); assertNotNull(leaseUpdateThread); // start doing what DocumentNodeStore.dispose() would do - except do // it very fine controlled, basically: // make sure to stop backgroundReadThread, backgroundUpdateThread // and leaseUpdateThread // but then nothing else. final AtomicBoolean isDisposed = getIsDisposed(); assertFalse(isDisposed.getAndSet(true)); // notify background threads waiting on isDisposed synchronized (isDisposed) { isDisposed.notifyAll(); } try { backgroundReadThread.join(5000); assertTrue(!backgroundReadThread.isAlive()); } catch (InterruptedException e) { // ignore } try { backgroundUpdateThread.join(5000); assertTrue(!backgroundUpdateThread.isAlive()); } catch (InterruptedException e) { // ignore } try { leaseUpdateThread.join(5000); assertTrue(!leaseUpdateThread.isAlive()); } catch (InterruptedException e) { // ignore } } public void stopBgReadThread() throws NoSuchFieldException { final Thread backgroundReadThread = (Thread) PrivateAccessor.getField(ns, "backgroundReadThread"); assertNotNull(backgroundReadThread); final Runnable bgReadRunnable = (Runnable) PrivateAccessor.getField(backgroundReadThread, "target"); assertNotNull(bgReadRunnable); final AtomicBoolean bgReadIsDisposed = new AtomicBoolean(false); PrivateAccessor.setField(bgReadRunnable, "isDisposed", bgReadIsDisposed); assertFalse(bgReadIsDisposed.getAndSet(true)); try { backgroundReadThread.join(5000); assertTrue(!backgroundReadThread.isAlive()); } catch (InterruptedException e) { // ignore } // big of heavy work, but now the backgroundReadThread is stopped // and all the others are still running } public void addNode(String path) throws CommitFailedException { NodeBuilder root = ns.getRoot().builder(); NodeBuilder child = root; String[] split = path.split("/"); for (int i = 1; i < split.length; i++) { child = child.child(split[i]); } logger.info("addNode: " + ns.getClusterId() + " is merging path " + path); ns.merge(root, EmptyHook.INSTANCE, CommitInfo.EMPTY); } public void setProperty(String path, String key, String value) throws CommitFailedException { NodeBuilder root = ns.getRoot().builder(); NodeBuilder child = root; String[] split = path.split("/"); for (int i = 1; i < split.length; i++) { child = child.child(split[i]); } child.setProperty(key, value); logger.info("setProperty: " + ns.getClusterId() + " is merging path/property " + path + ", key=" + key + ", value=" + value); ns.merge(root, EmptyHook.INSTANCE, CommitInfo.EMPTY); } public void setLeastTimeout(long timeoutInMs) throws NoSuchFieldException { ns.getClusterInfo().setLeaseTime(timeoutInMs); PrivateAccessor.setField(ns.getClusterInfo(), "leaseEndTime", System.currentTimeMillis() - 1000); } private void startSimulatingWrites(final long writeInterval) { writeSimulationStopped = false; writeSimulationThread = new Thread(new Runnable() { final Random random = new Random(); @Override public void run() { while (!writeSimulationStopped) { try { writeSomething(); Thread.sleep(SimplifiedInstance.this.lastRevInterval); } catch (Exception e) { e.printStackTrace(); } } } private void writeSomething() throws CommitFailedException { final String path = "/" + ns.getClusterId() + "/" + random.nextInt(100) + "/" + random.nextInt(100) + "/" + random.nextInt(100); logger.info("Writing [" + ns.getClusterId() + "]" + path); addNode(path); } }); writeSimulationThread.setDaemon(true); writeSimulationThread.start(); } void stopSimulatingWrites() throws InterruptedException { writeSimulationStopped = true; if (writeSimulationThread != null) { writeSimulationThread.join(); } } } interface Expectation { /** * check if the expectation is fulfilled, return true if it is, return a * descriptive error msg if not **/ String fulfilled() throws Exception; } class ViewExpectation implements Expectation { private int[] activeIds; private int[] deactivatingIds; private int[] inactiveIds; private final SimplifiedInstance discoveryLiteCombo; private boolean isFinal = true; ViewExpectation(SimplifiedInstance discoveryLiteCombo) { this.discoveryLiteCombo = discoveryLiteCombo; } private int[] asIntArray(Integer[] arr) { int[] result = new int[arr.length]; for (int i = 0; i < arr.length; i++) { result[i] = arr[i]; } return result; } void setActiveIds(Integer[] activeIds) { this.activeIds = asIntArray(activeIds); } void setActiveIds(int... activeIds) { this.activeIds = activeIds; } void setDeactivatingIds(int... deactivatingIds) { this.deactivatingIds = deactivatingIds; } void setInactiveIds(Integer[] inactiveIds) { this.inactiveIds = asIntArray(inactiveIds); } void setInactiveIds(int... inaactiveIds) { this.inactiveIds = inaactiveIds; } @Override public String fulfilled() throws Exception { final String clusterViewStr = discoveryLiteCombo.getClusterViewStr(); if (clusterViewStr == null) { if (activeIds.length != 0) { return "no clusterView, but expected activeIds: " + beautify(activeIds); } if (deactivatingIds.length != 0) { return "no clusterView, but expected deactivatingIds: " + beautify(deactivatingIds); } if (inactiveIds.length != 0) { return "no clusterView, but expected inactiveIds: " + beautify(inactiveIds); } } if (!discoveryLiteCombo.hasActiveIds(clusterViewStr, activeIds)) { return "activeIds dont match, expected: " + beautify(activeIds) + ", got clusterView: " + clusterViewStr; } if (!discoveryLiteCombo.hasDeactivatingIds(clusterViewStr, deactivatingIds)) { return "deactivatingIds dont match, expected: " + beautify(deactivatingIds) + ", got clusterView: " + clusterViewStr; } if (!discoveryLiteCombo.hasInactiveIds(clusterViewStr, inactiveIds)) { return "inactiveIds dont match, expected: " + beautify(inactiveIds) + ", got clusterView: " + clusterViewStr; } if (discoveryLiteCombo.isFinal() != isFinal) { return "final flag does not match. expected: " + isFinal + ", but is: " + discoveryLiteCombo.isFinal(); } return null; } private String beautify(int[] ids) { if (ids == null) { return ""; } StringBuffer sb = new StringBuffer(); for (int i = 0; i < ids.length; i++) { if (i != 0) { sb.append(","); } sb.append(ids[i]); } return sb.toString(); } public void setFinal(boolean isFinal) { this.isFinal = isFinal; } } // private static final boolean MONGO_DB = true; private static final boolean MONGO_DB = false; private List<DocumentMK> mks = Lists.newArrayList(); private MemoryDocumentStore ds; private MemoryBlobStore bs; final Logger logger = LoggerFactory.getLogger(this.getClass()); private List<SimplifiedInstance> allInstances = new LinkedList<SimplifiedInstance>(); @Test public void testActivateDeactivate() throws Exception { // then test normal start with a DocumentNodeStore DocumentMK mk1 = createMK(1, 0); DocumentDiscoveryLiteService discoveryLite = new DocumentDiscoveryLiteService(); PrivateAccessor.setField(discoveryLite, "nodeStore", mk1.nodeStore); BundleContext bc = mock(BundleContext.class); ComponentContext c = mock(ComponentContext.class); when(c.getBundleContext()).thenReturn(bc); discoveryLite.activate(c); verify(c, times(0)).disableComponent(DocumentDiscoveryLiteService.COMPONENT_NAME); discoveryLite.deactivate(); } /** * Borrowed from * http://stackoverflow.com/questions/3301635/change-private-static-final- * field-using-java-reflection */ static void setFinalStatic(Field field, Object newValue) throws Exception { field.setAccessible(true); Field modifiersField = Field.class.getDeclaredField("modifiers"); modifiersField.setAccessible(true); modifiersField.setInt(field, field.getModifiers() & ~Modifier.FINAL); field.set(null, newValue); } // subsequent tests should get a DocumentDiscoveryLiteService setup from the // start private DocumentNodeStore createNodeStore(String workingDir) throws SecurityException, Exception { // ensure that we always get a fresh cluster[node]id System.setProperty("user.dir", workingDir); setFinalStatic(ClusterNodeInfo.class.getDeclaredField("WORKING_DIR"), workingDir); // then create the DocumentNodeStore DocumentMK mk1 = createMK( 0 /* to make sure the clusterNodes collection is used **/, 500 /* asyncDelay: background interval */); logger.info("createNodeStore: created DocumentNodeStore with cid=" + mk1.nodeStore.getClusterId() + ", workingDir=" + workingDir); return mk1.nodeStore; } private SimplifiedInstance createInstance() throws Exception { final String workingDir = UUID.randomUUID().toString(); return createInstance(workingDir); } private SimplifiedInstance createInstance(String workingDir) throws SecurityException, Exception { DocumentNodeStore ns = createNodeStore(workingDir); return createInstance(ns, workingDir); } private SimplifiedInstance createInstance(DocumentNodeStore ns, String workingDir) throws NoSuchFieldException { DocumentDiscoveryLiteService discoveryLite = new DocumentDiscoveryLiteService(); PrivateAccessor.setField(discoveryLite, "nodeStore", ns); BundleContext bc = mock(BundleContext.class); ComponentContext c = mock(ComponentContext.class); when(c.getBundleContext()).thenReturn(bc); final Map<String, Object> registeredServices = new HashMap<String, Object>(); when(bc.registerService(anyString(), anyObject(), (Properties) anyObject())).then(new Answer<ServiceRegistration>() { @Override public ServiceRegistration answer(InvocationOnMock invocation) { registeredServices.put((String) invocation.getArguments()[0], invocation.getArguments()[1]); return null; } }); discoveryLite.activate(c); Descriptors d = (Descriptors) registeredServices.get(Descriptors.class.getName()); final SimplifiedInstance result = new SimplifiedInstance(discoveryLite, ns, d, registeredServices, 500, workingDir); allInstances.add(result); logger.info("Created " + result); return result; } private void waitFor(Expectation expectation, int timeout, String msg) throws Exception { final long tooLate = System.currentTimeMillis() + timeout; while (true) { final String fulfillmentResult = expectation.fulfilled(); if (fulfillmentResult == null) { // everything's fine return; } if (System.currentTimeMillis() > tooLate) { fail("expectation not fulfilled within " + timeout + "ms: " + msg + ", fulfillment result: " + fulfillmentResult); } Thread.sleep(100); } } @Test public void testOneNode() throws Exception { final SimplifiedInstance s1 = createInstance(); final ViewExpectation expectation = new ViewExpectation(s1); expectation.setActiveIds(s1.ns.getClusterId()); waitFor(expectation, 2000, "see myself as active"); } @Test public void testTwoNodesWithCleanShutdown() throws Exception { final SimplifiedInstance s1 = createInstance(); final SimplifiedInstance s2 = createInstance(); final ViewExpectation expectation1 = new ViewExpectation(s1); final ViewExpectation expectation2 = new ViewExpectation(s2); expectation1.setActiveIds(s1.ns.getClusterId(), s2.ns.getClusterId()); expectation2.setActiveIds(s1.ns.getClusterId(), s2.ns.getClusterId()); waitFor(expectation1, 2000, "first should see both as active"); waitFor(expectation2, 2000, "second should see both as active"); s2.shutdown(); final ViewExpectation expectation1AfterShutdown = new ViewExpectation(s1); expectation1AfterShutdown.setActiveIds(s1.ns.getClusterId()); expectation1AfterShutdown.setInactiveIds(s2.ns.getClusterId()); waitFor(expectation1AfterShutdown, 2000, "first should only see itself after shutdown"); } @Test public void testTwoNodesWithCrash() throws Throwable { final SimplifiedInstance s1 = createInstance(); final SimplifiedInstance s2 = createInstance(); final ViewExpectation expectation1 = new ViewExpectation(s1); final ViewExpectation expectation2 = new ViewExpectation(s2); expectation1.setActiveIds(s1.ns.getClusterId(), s2.ns.getClusterId()); expectation2.setActiveIds(s1.ns.getClusterId(), s2.ns.getClusterId()); waitFor(expectation1, 2000, "first should see both as active"); waitFor(expectation2, 2000, "second should see both as active"); s2.crash(); final ViewExpectation expectation1AfterShutdown = new ViewExpectation(s1); expectation1AfterShutdown.setActiveIds(s1.ns.getClusterId()); expectation1AfterShutdown.setInactiveIds(s2.ns.getClusterId()); waitFor(expectation1AfterShutdown, 2000, "first should only see itself after shutdown"); } @Test public void testTwoNodesWithCrashAndLongduringRecovery() throws Throwable { doTestTwoNodesWithCrashAndLongduringDeactivation(false); } @Test public void testTwoNodesWithCrashAndLongduringRecoveryAndBacklog() throws Throwable { doTestTwoNodesWithCrashAndLongduringDeactivation(true); } void doTestTwoNodesWithCrashAndLongduringDeactivation(boolean withBacklog) throws Throwable { final int TEST_WAIT_TIMEOUT = 10000; final SimplifiedInstance s1 = createInstance(); final SimplifiedInstance s2 = createInstance(); final ViewExpectation expectation1 = new ViewExpectation(s1); final ViewExpectation expectation2 = new ViewExpectation(s2); expectation1.setActiveIds(s1.ns.getClusterId(), s2.ns.getClusterId()); expectation2.setActiveIds(s1.ns.getClusterId(), s2.ns.getClusterId()); waitFor(expectation1, TEST_WAIT_TIMEOUT, "first should see both as active"); waitFor(expectation2, TEST_WAIT_TIMEOUT, "second should see both as active"); // before crashing s2, make sure that s1's lastRevRecovery thread // doesn't run s1.stopLastRevThread(); if (withBacklog) { // plus also stop s1's backgroundReadThread - in case we want to // test backlog handling s1.stopBgReadThread(); // and then, if we want to do backlog testing, then s2 should write // something // before it crashes, so here it comes: s2.addNode("/foo/bar"); s2.setProperty("/foo/bar", "prop", "value"); } // then crash s2 s2.crash(); // then wait 2 sec Thread.sleep(2000); // at this stage, while s2 has crashed, we have stopped s1's // lastRevRecoveryThread, so we should still see both as active logger.info(s1.getClusterViewStr()); final ViewExpectation expectation1AfterCrashBeforeLastRevRecovery = new ViewExpectation(s1); expectation1AfterCrashBeforeLastRevRecovery.setActiveIds(s1.ns.getClusterId(), s2.ns.getClusterId()); waitFor(expectation1AfterCrashBeforeLastRevRecovery, TEST_WAIT_TIMEOUT, "first should still see both as active"); // the next part is a bit tricky: we want to fine-control the // lastRevRecoveryThread's acquire/release locking. // the chosen way to do this is to make heavy use of mockito and two // semaphores: // when acquireRecoveryLock is called, that thread should wait for the // waitBeforeLocking semaphore to be released final MissingLastRevSeeker missingLastRevUtil = (MissingLastRevSeeker) PrivateAccessor .getField(s1.ns.getLastRevRecoveryAgent(), "missingLastRevUtil"); assertNotNull(missingLastRevUtil); MissingLastRevSeeker mockedLongduringMissingLastRevUtil = mock(MissingLastRevSeeker.class, delegatesTo(missingLastRevUtil)); final Semaphore waitBeforeLocking = new Semaphore(0); when(mockedLongduringMissingLastRevUtil.acquireRecoveryLock(anyInt())).then(new Answer<Boolean>() { @Override public Boolean answer(InvocationOnMock invocation) throws Throwable { logger.info("going to waitBeforeLocking"); waitBeforeLocking.acquire(); logger.info("done with waitBeforeLocking"); return missingLastRevUtil.acquireRecoveryLock((Integer) invocation.getArguments()[0]); } }); PrivateAccessor.setField(s1.ns.getLastRevRecoveryAgent(), "missingLastRevUtil", mockedLongduringMissingLastRevUtil); // so let's start the lastRevThread again and wait for that // waitBeforeLocking semaphore to be hit s1.startLastRevThread(); waitFor(new Expectation() { @Override public String fulfilled() throws Exception { if (!waitBeforeLocking.hasQueuedThreads()) { return "no thread queued"; } return null; } }, TEST_WAIT_TIMEOUT, "lastRevRecoveryThread should acquire a lock"); // at this stage the crashed s2 is still not in recovery mode, so let's // check: logger.info(s1.getClusterViewStr()); final ViewExpectation expectation1AfterCrashBeforeLastRevRecoveryLocking = new ViewExpectation(s1); expectation1AfterCrashBeforeLastRevRecoveryLocking.setActiveIds(s1.ns.getClusterId(), s2.ns.getClusterId()); waitFor(expectation1AfterCrashBeforeLastRevRecoveryLocking, TEST_WAIT_TIMEOUT, "first should still see both as active"); // one thing, before we let the waitBeforeLocking go, setup the release // semaphore/mock: final Semaphore waitBeforeUnlocking = new Semaphore(0); Mockito.doAnswer(new Answer<Void>() { public Void answer(InvocationOnMock invocation) throws InterruptedException { logger.info("Going to waitBeforeUnlocking"); waitBeforeUnlocking.acquire(); logger.info("Done with waitBeforeUnlocking"); missingLastRevUtil.releaseRecoveryLock((Integer) invocation.getArguments()[0]); return null; } }).when(mockedLongduringMissingLastRevUtil).releaseRecoveryLock(anyInt()); // let go (or tschaedere loh) waitBeforeLocking.release(); // then, right after we let the waitBeforeLocking semaphore go, we // should see s2 in recovery mode final ViewExpectation expectation1AfterCrashWhileLastRevRecoveryLocking = new ViewExpectation(s1); expectation1AfterCrashWhileLastRevRecoveryLocking.setActiveIds(s1.ns.getClusterId()); expectation1AfterCrashWhileLastRevRecoveryLocking.setDeactivatingIds(s2.ns.getClusterId()); waitFor(expectation1AfterCrashWhileLastRevRecoveryLocking, TEST_WAIT_TIMEOUT, "first should still see s2 as recovering"); // ok, meanwhile, the lastRevRecoveryAgent should have hit the ot waitFor(new Expectation() { @Override public String fulfilled() throws Exception { if (!waitBeforeUnlocking.hasQueuedThreads()) { return "no thread queued"; } return null; } }, TEST_WAIT_TIMEOUT, "lastRevRecoveryThread should want to release a lock"); // so then, we should still see the same state waitFor(expectation1AfterCrashWhileLastRevRecoveryLocking, TEST_WAIT_TIMEOUT, "first should still see s2 as recovering"); logger.info("Waiting 1,5sec"); Thread.sleep(1500); logger.info("Waiting done"); // first, lets check to see what the view looks like - should be // unchanged: waitFor(expectation1AfterCrashWhileLastRevRecoveryLocking, TEST_WAIT_TIMEOUT, "first should still see s2 as recovering"); // let waitBeforeUnlocking go logger.info("releasing waitBeforeUnlocking, state: " + s1.getClusterViewStr()); waitBeforeUnlocking.release(); logger.info("released waitBeforeUnlocking"); if (!withBacklog) { final ViewExpectation expectationWithoutBacklog = new ViewExpectation(s1); expectationWithoutBacklog.setActiveIds(s1.ns.getClusterId()); expectationWithoutBacklog.setInactiveIds(s2.ns.getClusterId()); waitFor(expectationWithoutBacklog, TEST_WAIT_TIMEOUT, "finally we should see s2 as completely inactive"); } else { // wait just 2 sec to see if the bgReadThread is really stopped logger.info("sleeping 2 sec"); Thread.sleep(2000); logger.info("sleeping 2 sec done, state: " + s1.getClusterViewStr()); // when that's the case, check the view - it should now be in a // special 'final=false' mode final ViewExpectation expectationBeforeBgRead = new ViewExpectation(s1); expectationBeforeBgRead.setActiveIds(s1.ns.getClusterId()); expectationBeforeBgRead.setDeactivatingIds(s2.ns.getClusterId()); expectationBeforeBgRead.setFinal(false); waitFor(expectationBeforeBgRead, TEST_WAIT_TIMEOUT, "first should only see itself after shutdown"); // ook, now we explicitly do a background read to get out of the // backlog situation s1.ns.runBackgroundReadOperations(); final ViewExpectation expectationAfterBgRead = new ViewExpectation(s1); expectationAfterBgRead.setActiveIds(s1.ns.getClusterId()); expectationAfterBgRead.setInactiveIds(s2.ns.getClusterId()); waitFor(expectationAfterBgRead, TEST_WAIT_TIMEOUT, "finally we should see s2 as completely inactive"); } } /** * This test creates a large number of documentnodestores which it starts, * runs, stops in a random fashion, always testing to make sure the * clusterView is correct */ @Test public void testLargeStartStopFiesta() throws Throwable { final List<SimplifiedInstance> instances = new LinkedList<SimplifiedInstance>(); final Map<Integer, String> inactiveIds = new HashMap<Integer, String>(); final Random random = new Random(); final int LOOP_CNT = 50; // with too many loops have also seen mongo // connections becoming starved thus test // failed final int CHECK_EVERY = 3; final int MAX_NUM_INSTANCES = 8; for (int i = 0; i < LOOP_CNT; i++) { if (i % CHECK_EVERY == 0) { checkFiestaState(instances, inactiveIds.keySet()); } final int nextInt = random.nextInt(5); // logger.info("testLargeStartStopFiesta: iteration "+i+" with case // "+nextInt); String workingDir = UUID.randomUUID().toString(); switch (nextInt) { case 0: { // increase likelihood of creating instances.. // but reuse an inactive one if possible if (inactiveIds.size() > 0) { logger.info("Case 0 - reactivating an instance..."); final int n = random.nextInt(inactiveIds.size()); final Integer cid = new LinkedList<Integer>(inactiveIds.keySet()).get(n); final String reactivatedWorkingDir = inactiveIds.remove(cid); if (reactivatedWorkingDir == null) { fail("reactivatedWorkingDir null for n=" + n + ", cid=" + cid + ", other inactives: " + inactiveIds); } assertNotNull(reactivatedWorkingDir); logger.info("Case 0 - reactivated instance " + cid + ", workingDir=" + reactivatedWorkingDir); workingDir = reactivatedWorkingDir; logger.info("Case 0: creating instance"); final SimplifiedInstance newInstance = createInstance(workingDir); newInstance.setLeastTimeout(5000); newInstance.startSimulatingWrites(500); logger.info("Case 0: created instance: " + newInstance.ns.getClusterId()); if (newInstance.ns.getClusterId() != cid) { logger.info( "Case 0: reactivated instance did not take over cid - probably a testing artifact. expected cid: {}, actual cid: {}", cid, newInstance.ns.getClusterId()); inactiveIds.put(cid, reactivatedWorkingDir); // remove the newly reactivated from the inactives - // although it shouldn't be there, it might! inactiveIds.remove(newInstance.ns.getClusterId()); } instances.add(newInstance); } break; } case 1: { // creates a new instance if (instances.size() < MAX_NUM_INSTANCES) { logger.info("Case 1: creating instance"); final SimplifiedInstance newInstance = createInstance(workingDir); newInstance.setLeastTimeout(5000); newInstance.startSimulatingWrites(500); logger.info("Case 1: created instance: " + newInstance.ns.getClusterId()); instances.add(newInstance); } break; } case 2: { // do nothing break; } case 3: { // shutdown instance if (instances.size() > 1) { // before shutting down: make sure we have a stable view // (we could otherwise not correctly startup too) checkFiestaState(instances, inactiveIds.keySet()); final SimplifiedInstance instance = instances.remove(random.nextInt(instances.size())); assertNotNull(instance.workingDir); logger.info("Case 3: Shutdown instance: " + instance.ns.getClusterId()); inactiveIds.put(instance.ns.getClusterId(), instance.workingDir); instance.shutdown(); } break; } case 4: { // crash instance if (instances.size() > 1) { // before crashing make sure we have a stable view (we // could otherwise not correctly startup too) checkFiestaState(instances, inactiveIds.keySet()); final SimplifiedInstance instance = instances.remove(random.nextInt(instances.size())); assertNotNull(instance.workingDir); logger.info("Case 4: Crashing instance: " + instance.ns.getClusterId()); inactiveIds.put(instance.ns.getClusterId(), instance.workingDir); instance.addNode("/" + instance.ns.getClusterId() + "/stuffForRecovery/" + random.nextInt(10000)); instance.crash(); } break; } } } } private void dumpChildren(DocumentNodeState root) { logger.info("testEmptyParentRecovery: root: " + root); Iterator<String> it = root.getChildNodeNames().iterator(); while (it.hasNext()) { String n = it.next(); logger.info("testEmptyParentRecovery: a child: '" + n + "'"); } } private void checkFiestaState(final List<SimplifiedInstance> instances, Set<Integer> inactiveIds) throws Exception { final List<Integer> activeIds = new LinkedList<Integer>(); for (Iterator<SimplifiedInstance> it = instances.iterator(); it.hasNext();) { SimplifiedInstance anInstance = it.next(); activeIds.add(anInstance.ns.getClusterId()); } for (Iterator<SimplifiedInstance> it = instances.iterator(); it.hasNext();) { SimplifiedInstance anInstance = it.next(); final ViewExpectation e = new ViewExpectation(anInstance); e.setActiveIds(activeIds.toArray(new Integer[activeIds.size()])); e.setInactiveIds(inactiveIds.toArray(new Integer[inactiveIds.size()])); waitFor(e, 20000, "checkFiestaState failed for " + anInstance + ", with instances: " + instances + ", and inactiveIds: " + inactiveIds); } } @Before @After public void clear() { for (SimplifiedInstance i : allInstances) { i.dispose(); } for (DocumentMK mk : mks) { mk.dispose(); } mks.clear(); if (MONGO_DB) { MongoConnection connection = MongoUtils.getConnection(); if (connection != null) { DB db = connection.getDB(); if (db != null) { MongoUtils.dropCollections(db); } } } } private DocumentMK createMK(int clusterId, int asyncDelay) { if (MONGO_DB) { DB db = MongoUtils.getConnection().getDB(); return register(new DocumentMK.Builder().setMongoDB(db).setLeaseCheck(false).setClusterId(clusterId) .setAsyncDelay(asyncDelay).open()); } else { if (ds == null) { ds = new MemoryDocumentStore(); } if (bs == null) { bs = new MemoryBlobStore(); } return createMK(clusterId, asyncDelay, ds, bs); } } private DocumentMK createMK(int clusterId, int asyncDelay, DocumentStore ds, BlobStore bs) { return register(new DocumentMK.Builder().setDocumentStore(ds).setBlobStore(bs).setClusterId(clusterId).setLeaseCheck(false) .setAsyncDelay(asyncDelay).open()); } private DocumentMK register(DocumentMK mk) { mks.add(mk); return mk; } }
oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/DocumentDiscoveryLiteServiceTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.plugins.document; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.AdditionalAnswers.delegatesTo; import static org.mockito.Matchers.anyInt; import static org.mockito.Matchers.anyObject; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Random; import java.util.Set; import java.util.UUID; import java.util.concurrent.Semaphore; import java.util.concurrent.atomic.AtomicBoolean; import javax.jcr.PropertyType; import javax.jcr.Value; import javax.jcr.ValueFormatException; import junitx.util.PrivateAccessor; import org.apache.jackrabbit.oak.api.CommitFailedException; import org.apache.jackrabbit.oak.api.Descriptors; import org.apache.jackrabbit.oak.commons.json.JsonObject; import org.apache.jackrabbit.oak.commons.json.JsopTokenizer; import org.apache.jackrabbit.oak.plugins.document.memory.MemoryDocumentStore; import org.apache.jackrabbit.oak.plugins.document.util.MongoConnection; import org.apache.jackrabbit.oak.spi.blob.BlobStore; import org.apache.jackrabbit.oak.spi.blob.MemoryBlobStore; import org.apache.jackrabbit.oak.spi.commit.CommitInfo; import org.apache.jackrabbit.oak.spi.commit.EmptyHook; import org.apache.jackrabbit.oak.spi.state.NodeBuilder; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.osgi.framework.BundleContext; import org.osgi.framework.ServiceRegistration; import org.osgi.service.component.ComponentContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Lists; import com.mongodb.DB; /** * Tests for the DocumentDiscoveryLiteService */ public class DocumentDiscoveryLiteServiceTest { /** * container for what should represent an instance, but is not a complete * one, hence 'simplified'. it contains most importantly the * DocuemntNodeStore and the discoveryLite service */ class SimplifiedInstance { private DocumentDiscoveryLiteService service; private DocumentNodeStore ns; private final Descriptors descriptors; private Map<String, Object> registeredServices; private final long lastRevInterval; private volatile boolean lastRevStopped = false; private volatile boolean writeSimulationStopped = false; private Thread lastRevThread; private Thread writeSimulationThread; public String workingDir; SimplifiedInstance(DocumentDiscoveryLiteService service, DocumentNodeStore ns, Descriptors descriptors, Map<String, Object> registeredServices, long lastRevInterval, String workingDir) { this.service = service; this.ns = ns; this.workingDir = workingDir; this.descriptors = descriptors; this.registeredServices = registeredServices; this.lastRevInterval = lastRevInterval; if (lastRevInterval > 0) { startLastRevThread(); } } @Override public String toString() { return "SimplifiedInstance[cid=" + ns.getClusterId() + "]"; } private void startLastRevThread() { lastRevStopped = false; lastRevThread = new Thread(new Runnable() { @Override public void run() { while (!lastRevStopped) { SimplifiedInstance.this.ns.getLastRevRecoveryAgent().performRecoveryIfNeeded(); try { Thread.sleep(SimplifiedInstance.this.lastRevInterval); } catch (InterruptedException e) { e.printStackTrace(); } } } }); lastRevThread.setDaemon(true); lastRevThread.setName("lastRevThread[cid=" + ns.getClusterId() + "]"); lastRevThread.start(); } void stopLastRevThread() throws InterruptedException { lastRevStopped = true; lastRevThread.join(); } boolean isFinal() throws Exception { final JsonObject clusterViewObj = getClusterViewObj(); if (clusterViewObj == null) { throw new IllegalStateException("should always have that final flag set"); } String finalStr = clusterViewObj.getProperties().get("final"); return Boolean.valueOf(finalStr); } boolean hasActiveIds(String clusterViewStr, int... expected) throws Exception { return hasIds(clusterViewStr, "active", expected); } boolean hasDeactivatingIds(String clusterViewStr, int... expected) throws Exception { return hasIds(clusterViewStr, "deactivating", expected); } boolean hasInactiveIds(String clusterViewStr, int... expected) throws Exception { return hasIds(clusterViewStr, "inactive", expected); } private boolean hasIds(final String clusterViewStr, final String key, int... expectedIds) throws Exception { final JsonObject clusterViewObj = asJsonObject(clusterViewStr); String actualIdsStr = clusterViewObj == null ? null : clusterViewObj.getProperties().get(key); boolean actualEmpty = actualIdsStr == null || actualIdsStr.length() == 0 || actualIdsStr.equals("[]"); boolean expectedEmpty = expectedIds == null || expectedIds.length == 0; if (actualEmpty && expectedEmpty) { return true; } if (actualEmpty != expectedEmpty) { return false; } final List<Integer> actualList = Arrays .asList(ClusterViewDocument.csvToIntegerArray(actualIdsStr.substring(1, actualIdsStr.length() - 1))); if (expectedIds.length != actualList.size()) { return false; } for (int i = 0; i < expectedIds.length; i++) { int anExpectedId = expectedIds[i]; if (!actualList.contains(anExpectedId)) { return false; } } return true; } JsonObject getClusterViewObj() throws Exception { final String json = getClusterViewStr(); return asJsonObject(json); } private JsonObject asJsonObject(final String json) { if (json == null) { return null; } JsopTokenizer t = new JsopTokenizer(json); t.read('{'); JsonObject o = JsonObject.create(t); return o; } String getClusterViewStr() throws Exception { return getDescriptor(DocumentDiscoveryLiteService.OAK_DISCOVERYLITE_CLUSTERVIEW); } String getDescriptor(String key) throws Exception { final Value value = descriptors.getValue(key); if (value == null) { return null; } if (value.getType() != PropertyType.STRING) { return null; } try { return value.getString(); } catch (ValueFormatException vfe) { return null; } } public void dispose() { logger.info("Disposing " + this); try { stopSimulatingWrites(); } catch (InterruptedException e) { fail("interrupted"); } if (lastRevThread != null) { try { stopLastRevThread(); } catch (InterruptedException ok) { fail("interrupted"); } lastRevThread = null; } if (service != null) { service.deactivate(); service = null; } if (ns != null) { ns.dispose(); ns = null; } if (registeredServices != null) { registeredServices.clear(); registeredServices = null; } } /** * shutdown simulates the normal, graceful, shutdown * * @throws InterruptedException */ public void shutdown() throws InterruptedException { stopSimulatingWrites(); stopLastRevThread(); ns.dispose(); service.deactivate(); } /** * crash simulates a kill -9, sort of * * @throws Throwable */ public void crash() throws Throwable { logger.info("crash: stopping simulating writes..."); stopSimulatingWrites(); logger.info("crash: stopping lastrev thread..."); stopLastRevThread(); logger.info("crash: stopped lastrev thread, now setting least to end within 1 sec"); boolean renewed = setLeaseTime(1000 /* 1 sec */); if (!renewed) { logger.info("halt"); fail("did not renew clusterid lease"); } logger.info("crash: now stopping background read/update"); stopAllBackgroundThreads(); // but don't do the following from DocumentNodeStore.dispose(): // * don't do the last internalRunBackgroundUpdateOperations - as // we're trying to simulate a crash here // * don't dispose clusterNodeInfo to leave the node in active state // the DocumentDiscoveryLiteService currently can simply be // deactivated, doesn't differ much from crashing service.deactivate(); logger.info("crash: crash simulation done."); } /** * very hacky way of doing the following: make sure this instance's * clusterNodes entry is marked with a very short (1 sec off) lease end * time so that the crash detection doesn't take a minute (as it would * by default) */ private boolean setLeaseTime(final int leaseTime) throws NoSuchFieldException { ns.getClusterInfo().setLeaseTime(leaseTime); PrivateAccessor.setField(ns.getClusterInfo(), "leaseEndTime", System.currentTimeMillis() + (leaseTime / 2)); boolean renewed = ns.renewClusterIdLease(); return renewed; } private AtomicBoolean getIsDisposed() throws NoSuchFieldException { AtomicBoolean isDisposed = (AtomicBoolean) PrivateAccessor.getField(ns, "isDisposed"); return isDisposed; } private void stopAllBackgroundThreads() throws NoSuchFieldException { // get all those background threads... Thread backgroundReadThread = (Thread) PrivateAccessor.getField(ns, "backgroundReadThread"); assertNotNull(backgroundReadThread); Thread backgroundUpdateThread = (Thread) PrivateAccessor.getField(ns, "backgroundUpdateThread"); assertNotNull(backgroundUpdateThread); Thread leaseUpdateThread = (Thread) PrivateAccessor.getField(ns, "leaseUpdateThread"); assertNotNull(leaseUpdateThread); // start doing what DocumentNodeStore.dispose() would do - except do // it very fine controlled, basically: // make sure to stop backgroundReadThread, backgroundUpdateThread // and leaseUpdateThread // but then nothing else. final AtomicBoolean isDisposed = getIsDisposed(); assertFalse(isDisposed.getAndSet(true)); // notify background threads waiting on isDisposed synchronized (isDisposed) { isDisposed.notifyAll(); } try { backgroundReadThread.join(5000); assertTrue(!backgroundReadThread.isAlive()); } catch (InterruptedException e) { // ignore } try { backgroundUpdateThread.join(5000); assertTrue(!backgroundUpdateThread.isAlive()); } catch (InterruptedException e) { // ignore } try { leaseUpdateThread.join(5000); assertTrue(!leaseUpdateThread.isAlive()); } catch (InterruptedException e) { // ignore } } public void stopBgReadThread() throws NoSuchFieldException { final Thread backgroundReadThread = (Thread) PrivateAccessor.getField(ns, "backgroundReadThread"); assertNotNull(backgroundReadThread); final Runnable bgReadRunnable = (Runnable) PrivateAccessor.getField(backgroundReadThread, "target"); assertNotNull(bgReadRunnable); final AtomicBoolean bgReadIsDisposed = new AtomicBoolean(false); PrivateAccessor.setField(bgReadRunnable, "isDisposed", bgReadIsDisposed); assertFalse(bgReadIsDisposed.getAndSet(true)); try { backgroundReadThread.join(5000); assertTrue(!backgroundReadThread.isAlive()); } catch (InterruptedException e) { // ignore } // big of heavy work, but now the backgroundReadThread is stopped // and all the others are still running } public void addNode(String path) throws CommitFailedException { NodeBuilder root = ns.getRoot().builder(); NodeBuilder child = root; String[] split = path.split("/"); for (int i = 1; i < split.length; i++) { child = child.child(split[i]); } logger.info("addNode: " + ns.getClusterId() + " is merging path " + path); ns.merge(root, EmptyHook.INSTANCE, CommitInfo.EMPTY); } public void setProperty(String path, String key, String value) throws CommitFailedException { NodeBuilder root = ns.getRoot().builder(); NodeBuilder child = root; String[] split = path.split("/"); for (int i = 1; i < split.length; i++) { child = child.child(split[i]); } child.setProperty(key, value); logger.info("setProperty: " + ns.getClusterId() + " is merging path/property " + path + ", key=" + key + ", value=" + value); ns.merge(root, EmptyHook.INSTANCE, CommitInfo.EMPTY); } public void setLeastTimeout(long timeoutInMs) throws NoSuchFieldException { ns.getClusterInfo().setLeaseTime(timeoutInMs); PrivateAccessor.setField(ns.getClusterInfo(), "leaseEndTime", System.currentTimeMillis() - 1000); } private void startSimulatingWrites(final long writeInterval) { writeSimulationStopped = false; writeSimulationThread = new Thread(new Runnable() { final Random random = new Random(); @Override public void run() { while (!writeSimulationStopped) { try { writeSomething(); Thread.sleep(SimplifiedInstance.this.lastRevInterval); } catch (Exception e) { e.printStackTrace(); } } } private void writeSomething() throws CommitFailedException { final String path = "/" + ns.getClusterId() + "/" + random.nextInt(100) + "/" + random.nextInt(100) + "/" + random.nextInt(100); logger.info("Writing [" + ns.getClusterId() + "]" + path); addNode(path); } }); writeSimulationThread.setDaemon(true); writeSimulationThread.start(); } void stopSimulatingWrites() throws InterruptedException { writeSimulationStopped = true; if (writeSimulationThread != null) { writeSimulationThread.join(); } } } interface Expectation { /** * check if the expectation is fulfilled, return true if it is, return a * descriptive error msg if not **/ String fulfilled() throws Exception; } class ViewExpectation implements Expectation { private int[] activeIds; private int[] deactivatingIds; private int[] inactiveIds; private final SimplifiedInstance discoveryLiteCombo; private boolean isFinal = true; ViewExpectation(SimplifiedInstance discoveryLiteCombo) { this.discoveryLiteCombo = discoveryLiteCombo; } private int[] asIntArray(Integer[] arr) { int[] result = new int[arr.length]; for (int i = 0; i < arr.length; i++) { result[i] = arr[i]; } return result; } void setActiveIds(Integer[] activeIds) { this.activeIds = asIntArray(activeIds); } void setActiveIds(int... activeIds) { this.activeIds = activeIds; } void setDeactivatingIds(int... deactivatingIds) { this.deactivatingIds = deactivatingIds; } void setInactiveIds(Integer[] inactiveIds) { this.inactiveIds = asIntArray(inactiveIds); } void setInactiveIds(int... inaactiveIds) { this.inactiveIds = inaactiveIds; } @Override public String fulfilled() throws Exception { final String clusterViewStr = discoveryLiteCombo.getClusterViewStr(); if (clusterViewStr == null) { if (activeIds.length != 0) { return "no clusterView, but expected activeIds: " + beautify(activeIds); } if (deactivatingIds.length != 0) { return "no clusterView, but expected deactivatingIds: " + beautify(deactivatingIds); } if (inactiveIds.length != 0) { return "no clusterView, but expected inactiveIds: " + beautify(inactiveIds); } } if (!discoveryLiteCombo.hasActiveIds(clusterViewStr, activeIds)) { return "activeIds dont match, expected: " + beautify(activeIds) + ", got clusterView: " + clusterViewStr; } if (!discoveryLiteCombo.hasDeactivatingIds(clusterViewStr, deactivatingIds)) { return "deactivatingIds dont match, expected: " + beautify(deactivatingIds) + ", got clusterView: " + clusterViewStr; } if (!discoveryLiteCombo.hasInactiveIds(clusterViewStr, inactiveIds)) { return "inactiveIds dont match, expected: " + beautify(inactiveIds) + ", got clusterView: " + clusterViewStr; } if (discoveryLiteCombo.isFinal() != isFinal) { return "final flag does not match. expected: " + isFinal + ", but is: " + discoveryLiteCombo.isFinal(); } return null; } private String beautify(int[] ids) { if (ids == null) { return ""; } StringBuffer sb = new StringBuffer(); for (int i = 0; i < ids.length; i++) { if (i != 0) { sb.append(","); } sb.append(ids[i]); } return sb.toString(); } public void setFinal(boolean isFinal) { this.isFinal = isFinal; } } // private static final boolean MONGO_DB = true; private static final boolean MONGO_DB = true; private List<DocumentMK> mks = Lists.newArrayList(); private MemoryDocumentStore ds; private MemoryBlobStore bs; final Logger logger = LoggerFactory.getLogger(this.getClass()); private List<SimplifiedInstance> allInstances = new LinkedList<SimplifiedInstance>(); @Test public void testActivateDeactivate() throws Exception { // then test normal start with a DocumentNodeStore DocumentMK mk1 = createMK(1, 0); DocumentDiscoveryLiteService discoveryLite = new DocumentDiscoveryLiteService(); PrivateAccessor.setField(discoveryLite, "nodeStore", mk1.nodeStore); BundleContext bc = mock(BundleContext.class); ComponentContext c = mock(ComponentContext.class); when(c.getBundleContext()).thenReturn(bc); discoveryLite.activate(c); verify(c, times(0)).disableComponent(DocumentDiscoveryLiteService.COMPONENT_NAME); discoveryLite.deactivate(); } /** * Borrowed from * http://stackoverflow.com/questions/3301635/change-private-static-final- * field-using-java-reflection */ static void setFinalStatic(Field field, Object newValue) throws Exception { field.setAccessible(true); Field modifiersField = Field.class.getDeclaredField("modifiers"); modifiersField.setAccessible(true); modifiersField.setInt(field, field.getModifiers() & ~Modifier.FINAL); field.set(null, newValue); } // subsequent tests should get a DocumentDiscoveryLiteService setup from the // start private DocumentNodeStore createNodeStore(String workingDir) throws SecurityException, Exception { // ensure that we always get a fresh cluster[node]id System.setProperty("user.dir", workingDir); setFinalStatic(ClusterNodeInfo.class.getDeclaredField("WORKING_DIR"), workingDir); // then create the DocumentNodeStore DocumentMK mk1 = createMK( 0 /* to make sure the clusterNodes collection is used **/, 500 /* asyncDelay: background interval */); logger.info("createNodeStore: created DocumentNodeStore with cid=" + mk1.nodeStore.getClusterId() + ", workingDir=" + workingDir); return mk1.nodeStore; } private SimplifiedInstance createInstance() throws Exception { final String workingDir = UUID.randomUUID().toString(); return createInstance(workingDir); } private SimplifiedInstance createInstance(String workingDir) throws SecurityException, Exception { DocumentNodeStore ns = createNodeStore(workingDir); return createInstance(ns, workingDir); } private SimplifiedInstance createInstance(DocumentNodeStore ns, String workingDir) throws NoSuchFieldException { DocumentDiscoveryLiteService discoveryLite = new DocumentDiscoveryLiteService(); PrivateAccessor.setField(discoveryLite, "nodeStore", ns); BundleContext bc = mock(BundleContext.class); ComponentContext c = mock(ComponentContext.class); when(c.getBundleContext()).thenReturn(bc); final Map<String, Object> registeredServices = new HashMap<String, Object>(); when(bc.registerService(anyString(), anyObject(), (Properties) anyObject())).then(new Answer<ServiceRegistration>() { @Override public ServiceRegistration answer(InvocationOnMock invocation) { registeredServices.put((String) invocation.getArguments()[0], invocation.getArguments()[1]); return null; } }); discoveryLite.activate(c); Descriptors d = (Descriptors) registeredServices.get(Descriptors.class.getName()); final SimplifiedInstance result = new SimplifiedInstance(discoveryLite, ns, d, registeredServices, 500, workingDir); allInstances.add(result); logger.info("Created " + result); return result; } private void waitFor(Expectation expectation, int timeout, String msg) throws Exception { final long tooLate = System.currentTimeMillis() + timeout; while (true) { final String fulfillmentResult = expectation.fulfilled(); if (fulfillmentResult == null) { // everything's fine return; } if (System.currentTimeMillis() > tooLate) { fail("expectation not fulfilled within " + timeout + "ms: " + msg + ", fulfillment result: " + fulfillmentResult); } Thread.sleep(100); } } @Test public void testOneNode() throws Exception { final SimplifiedInstance s1 = createInstance(); final ViewExpectation expectation = new ViewExpectation(s1); expectation.setActiveIds(s1.ns.getClusterId()); waitFor(expectation, 2000, "see myself as active"); } @Test public void testTwoNodesWithCleanShutdown() throws Exception { final SimplifiedInstance s1 = createInstance(); final SimplifiedInstance s2 = createInstance(); final ViewExpectation expectation1 = new ViewExpectation(s1); final ViewExpectation expectation2 = new ViewExpectation(s2); expectation1.setActiveIds(s1.ns.getClusterId(), s2.ns.getClusterId()); expectation2.setActiveIds(s1.ns.getClusterId(), s2.ns.getClusterId()); waitFor(expectation1, 2000, "first should see both as active"); waitFor(expectation2, 2000, "second should see both as active"); s2.shutdown(); final ViewExpectation expectation1AfterShutdown = new ViewExpectation(s1); expectation1AfterShutdown.setActiveIds(s1.ns.getClusterId()); expectation1AfterShutdown.setInactiveIds(s2.ns.getClusterId()); waitFor(expectation1AfterShutdown, 2000, "first should only see itself after shutdown"); } @Test public void testTwoNodesWithCrash() throws Throwable { final SimplifiedInstance s1 = createInstance(); final SimplifiedInstance s2 = createInstance(); final ViewExpectation expectation1 = new ViewExpectation(s1); final ViewExpectation expectation2 = new ViewExpectation(s2); expectation1.setActiveIds(s1.ns.getClusterId(), s2.ns.getClusterId()); expectation2.setActiveIds(s1.ns.getClusterId(), s2.ns.getClusterId()); waitFor(expectation1, 2000, "first should see both as active"); waitFor(expectation2, 2000, "second should see both as active"); s2.crash(); final ViewExpectation expectation1AfterShutdown = new ViewExpectation(s1); expectation1AfterShutdown.setActiveIds(s1.ns.getClusterId()); expectation1AfterShutdown.setInactiveIds(s2.ns.getClusterId()); waitFor(expectation1AfterShutdown, 2000, "first should only see itself after shutdown"); } @Test public void testTwoNodesWithCrashAndLongduringRecovery() throws Throwable { doTestTwoNodesWithCrashAndLongduringDeactivation(false); } @Test public void testTwoNodesWithCrashAndLongduringRecoveryAndBacklog() throws Throwable { doTestTwoNodesWithCrashAndLongduringDeactivation(true); } void doTestTwoNodesWithCrashAndLongduringDeactivation(boolean withBacklog) throws Throwable { final int TEST_WAIT_TIMEOUT = 10000; final SimplifiedInstance s1 = createInstance(); final SimplifiedInstance s2 = createInstance(); final ViewExpectation expectation1 = new ViewExpectation(s1); final ViewExpectation expectation2 = new ViewExpectation(s2); expectation1.setActiveIds(s1.ns.getClusterId(), s2.ns.getClusterId()); expectation2.setActiveIds(s1.ns.getClusterId(), s2.ns.getClusterId()); waitFor(expectation1, TEST_WAIT_TIMEOUT, "first should see both as active"); waitFor(expectation2, TEST_WAIT_TIMEOUT, "second should see both as active"); // before crashing s2, make sure that s1's lastRevRecovery thread // doesn't run s1.stopLastRevThread(); if (withBacklog) { // plus also stop s1's backgroundReadThread - in case we want to // test backlog handling s1.stopBgReadThread(); // and then, if we want to do backlog testing, then s2 should write // something // before it crashes, so here it comes: s2.addNode("/foo/bar"); s2.setProperty("/foo/bar", "prop", "value"); } // then crash s2 s2.crash(); // then wait 2 sec Thread.sleep(2000); // at this stage, while s2 has crashed, we have stopped s1's // lastRevRecoveryThread, so we should still see both as active logger.info(s1.getClusterViewStr()); final ViewExpectation expectation1AfterCrashBeforeLastRevRecovery = new ViewExpectation(s1); expectation1AfterCrashBeforeLastRevRecovery.setActiveIds(s1.ns.getClusterId(), s2.ns.getClusterId()); waitFor(expectation1AfterCrashBeforeLastRevRecovery, TEST_WAIT_TIMEOUT, "first should still see both as active"); // the next part is a bit tricky: we want to fine-control the // lastRevRecoveryThread's acquire/release locking. // the chosen way to do this is to make heavy use of mockito and two // semaphores: // when acquireRecoveryLock is called, that thread should wait for the // waitBeforeLocking semaphore to be released final MissingLastRevSeeker missingLastRevUtil = (MissingLastRevSeeker) PrivateAccessor .getField(s1.ns.getLastRevRecoveryAgent(), "missingLastRevUtil"); assertNotNull(missingLastRevUtil); MissingLastRevSeeker mockedLongduringMissingLastRevUtil = mock(MissingLastRevSeeker.class, delegatesTo(missingLastRevUtil)); final Semaphore waitBeforeLocking = new Semaphore(0); when(mockedLongduringMissingLastRevUtil.acquireRecoveryLock(anyInt())).then(new Answer<Boolean>() { @Override public Boolean answer(InvocationOnMock invocation) throws Throwable { logger.info("going to waitBeforeLocking"); waitBeforeLocking.acquire(); logger.info("done with waitBeforeLocking"); return missingLastRevUtil.acquireRecoveryLock((Integer) invocation.getArguments()[0]); } }); PrivateAccessor.setField(s1.ns.getLastRevRecoveryAgent(), "missingLastRevUtil", mockedLongduringMissingLastRevUtil); // so let's start the lastRevThread again and wait for that // waitBeforeLocking semaphore to be hit s1.startLastRevThread(); waitFor(new Expectation() { @Override public String fulfilled() throws Exception { if (!waitBeforeLocking.hasQueuedThreads()) { return "no thread queued"; } return null; } }, TEST_WAIT_TIMEOUT, "lastRevRecoveryThread should acquire a lock"); // at this stage the crashed s2 is still not in recovery mode, so let's // check: logger.info(s1.getClusterViewStr()); final ViewExpectation expectation1AfterCrashBeforeLastRevRecoveryLocking = new ViewExpectation(s1); expectation1AfterCrashBeforeLastRevRecoveryLocking.setActiveIds(s1.ns.getClusterId(), s2.ns.getClusterId()); waitFor(expectation1AfterCrashBeforeLastRevRecoveryLocking, TEST_WAIT_TIMEOUT, "first should still see both as active"); // one thing, before we let the waitBeforeLocking go, setup the release // semaphore/mock: final Semaphore waitBeforeUnlocking = new Semaphore(0); Mockito.doAnswer(new Answer<Void>() { public Void answer(InvocationOnMock invocation) throws InterruptedException { logger.info("Going to waitBeforeUnlocking"); waitBeforeUnlocking.acquire(); logger.info("Done with waitBeforeUnlocking"); missingLastRevUtil.releaseRecoveryLock((Integer) invocation.getArguments()[0]); return null; } }).when(mockedLongduringMissingLastRevUtil).releaseRecoveryLock(anyInt()); // let go (or tschaedere loh) waitBeforeLocking.release(); // then, right after we let the waitBeforeLocking semaphore go, we // should see s2 in recovery mode final ViewExpectation expectation1AfterCrashWhileLastRevRecoveryLocking = new ViewExpectation(s1); expectation1AfterCrashWhileLastRevRecoveryLocking.setActiveIds(s1.ns.getClusterId()); expectation1AfterCrashWhileLastRevRecoveryLocking.setDeactivatingIds(s2.ns.getClusterId()); waitFor(expectation1AfterCrashWhileLastRevRecoveryLocking, TEST_WAIT_TIMEOUT, "first should still see s2 as recovering"); // ok, meanwhile, the lastRevRecoveryAgent should have hit the ot waitFor(new Expectation() { @Override public String fulfilled() throws Exception { if (!waitBeforeUnlocking.hasQueuedThreads()) { return "no thread queued"; } return null; } }, TEST_WAIT_TIMEOUT, "lastRevRecoveryThread should want to release a lock"); // so then, we should still see the same state waitFor(expectation1AfterCrashWhileLastRevRecoveryLocking, TEST_WAIT_TIMEOUT, "first should still see s2 as recovering"); logger.info("Waiting 1,5sec"); Thread.sleep(1500); logger.info("Waiting done"); // first, lets check to see what the view looks like - should be // unchanged: waitFor(expectation1AfterCrashWhileLastRevRecoveryLocking, TEST_WAIT_TIMEOUT, "first should still see s2 as recovering"); // let waitBeforeUnlocking go logger.info("releasing waitBeforeUnlocking, state: " + s1.getClusterViewStr()); waitBeforeUnlocking.release(); logger.info("released waitBeforeUnlocking"); if (!withBacklog) { final ViewExpectation expectationWithoutBacklog = new ViewExpectation(s1); expectationWithoutBacklog.setActiveIds(s1.ns.getClusterId()); expectationWithoutBacklog.setInactiveIds(s2.ns.getClusterId()); waitFor(expectationWithoutBacklog, TEST_WAIT_TIMEOUT, "finally we should see s2 as completely inactive"); } else { // wait just 2 sec to see if the bgReadThread is really stopped logger.info("sleeping 2 sec"); Thread.sleep(2000); logger.info("sleeping 2 sec done, state: " + s1.getClusterViewStr()); // when that's the case, check the view - it should now be in a // special 'final=false' mode final ViewExpectation expectationBeforeBgRead = new ViewExpectation(s1); expectationBeforeBgRead.setActiveIds(s1.ns.getClusterId()); expectationBeforeBgRead.setDeactivatingIds(s2.ns.getClusterId()); expectationBeforeBgRead.setFinal(false); waitFor(expectationBeforeBgRead, TEST_WAIT_TIMEOUT, "first should only see itself after shutdown"); // ook, now we explicitly do a background read to get out of the // backlog situation s1.ns.runBackgroundReadOperations(); final ViewExpectation expectationAfterBgRead = new ViewExpectation(s1); expectationAfterBgRead.setActiveIds(s1.ns.getClusterId()); expectationAfterBgRead.setInactiveIds(s2.ns.getClusterId()); waitFor(expectationAfterBgRead, TEST_WAIT_TIMEOUT, "finally we should see s2 as completely inactive"); } } /** * This test creates a large number of documentnodestores which it starts, * runs, stops in a random fashion, always testing to make sure the * clusterView is correct */ @Test public void testLargeStartStopFiesta() throws Throwable { final List<SimplifiedInstance> instances = new LinkedList<SimplifiedInstance>(); final Map<Integer, String> inactiveIds = new HashMap<Integer, String>(); final Random random = new Random(); final int LOOP_CNT = 50; // with too many loops have also seen mongo // connections becoming starved thus test // failed final int CHECK_EVERY = 3; final int MAX_NUM_INSTANCES = 8; for (int i = 0; i < LOOP_CNT; i++) { if (i % CHECK_EVERY == 0) { checkFiestaState(instances, inactiveIds.keySet()); } final int nextInt = random.nextInt(5); // logger.info("testLargeStartStopFiesta: iteration "+i+" with case // "+nextInt); String workingDir = UUID.randomUUID().toString(); switch (nextInt) { case 0: { // increase likelihood of creating instances.. // but reuse an inactive one if possible if (inactiveIds.size() > 0) { logger.info("Case 0 - reactivating an instance..."); final int n = random.nextInt(inactiveIds.size()); final Integer cid = new LinkedList<Integer>(inactiveIds.keySet()).get(n); final String reactivatedWorkingDir = inactiveIds.remove(cid); if (reactivatedWorkingDir == null) { fail("reactivatedWorkingDir null for n=" + n + ", cid=" + cid + ", other inactives: " + inactiveIds); } assertNotNull(reactivatedWorkingDir); logger.info("Case 0 - reactivated instance " + cid + ", workingDir=" + reactivatedWorkingDir); workingDir = reactivatedWorkingDir; logger.info("Case 0: creating instance"); final SimplifiedInstance newInstance = createInstance(workingDir); newInstance.setLeastTimeout(5000); newInstance.startSimulatingWrites(500); logger.info("Case 0: created instance: " + newInstance.ns.getClusterId()); if (newInstance.ns.getClusterId() != cid) { logger.info( "Case 0: reactivated instance did not take over cid - probably a testing artifact. expected cid: {}, actual cid: {}", cid, newInstance.ns.getClusterId()); inactiveIds.put(cid, reactivatedWorkingDir); // remove the newly reactivated from the inactives - // although it shouldn't be there, it might! inactiveIds.remove(newInstance.ns.getClusterId()); } instances.add(newInstance); } break; } case 1: { // creates a new instance if (instances.size() < MAX_NUM_INSTANCES) { logger.info("Case 1: creating instance"); final SimplifiedInstance newInstance = createInstance(workingDir); newInstance.setLeastTimeout(5000); newInstance.startSimulatingWrites(500); logger.info("Case 1: created instance: " + newInstance.ns.getClusterId()); instances.add(newInstance); } break; } case 2: { // do nothing break; } case 3: { // shutdown instance if (instances.size() > 1) { // before shutting down: make sure we have a stable view // (we could otherwise not correctly startup too) checkFiestaState(instances, inactiveIds.keySet()); final SimplifiedInstance instance = instances.remove(random.nextInt(instances.size())); assertNotNull(instance.workingDir); logger.info("Case 3: Shutdown instance: " + instance.ns.getClusterId()); inactiveIds.put(instance.ns.getClusterId(), instance.workingDir); instance.shutdown(); } break; } case 4: { // crash instance if (instances.size() > 1) { // before crashing make sure we have a stable view (we // could otherwise not correctly startup too) checkFiestaState(instances, inactiveIds.keySet()); final SimplifiedInstance instance = instances.remove(random.nextInt(instances.size())); assertNotNull(instance.workingDir); logger.info("Case 4: Crashing instance: " + instance.ns.getClusterId()); inactiveIds.put(instance.ns.getClusterId(), instance.workingDir); instance.addNode("/" + instance.ns.getClusterId() + "/stuffForRecovery/" + random.nextInt(10000)); instance.crash(); } break; } } } } private void dumpChildren(DocumentNodeState root) { logger.info("testEmptyParentRecovery: root: " + root); Iterator<String> it = root.getChildNodeNames().iterator(); while (it.hasNext()) { String n = it.next(); logger.info("testEmptyParentRecovery: a child: '" + n + "'"); } } private void checkFiestaState(final List<SimplifiedInstance> instances, Set<Integer> inactiveIds) throws Exception { final List<Integer> activeIds = new LinkedList<Integer>(); for (Iterator<SimplifiedInstance> it = instances.iterator(); it.hasNext();) { SimplifiedInstance anInstance = it.next(); activeIds.add(anInstance.ns.getClusterId()); } for (Iterator<SimplifiedInstance> it = instances.iterator(); it.hasNext();) { SimplifiedInstance anInstance = it.next(); final ViewExpectation e = new ViewExpectation(anInstance); e.setActiveIds(activeIds.toArray(new Integer[activeIds.size()])); e.setInactiveIds(inactiveIds.toArray(new Integer[inactiveIds.size()])); waitFor(e, 20000, "checkFiestaState failed for " + anInstance + ", with instances: " + instances + ", and inactiveIds: " + inactiveIds); } } @Before @After public void clear() { for (SimplifiedInstance i : allInstances) { i.dispose(); } for (DocumentMK mk : mks) { mk.dispose(); } mks.clear(); if (MONGO_DB) { MongoConnection connection = MongoUtils.getConnection(); if (connection != null) { DB db = connection.getDB(); if (db != null) { MongoUtils.dropCollections(db); } } } } private DocumentMK createMK(int clusterId, int asyncDelay) { if (MONGO_DB) { DB db = MongoUtils.getConnection().getDB(); return register(new DocumentMK.Builder().setMongoDB(db).setLeaseCheck(false).setClusterId(clusterId) .setAsyncDelay(asyncDelay).open()); } else { if (ds == null) { ds = new MemoryDocumentStore(); } if (bs == null) { bs = new MemoryBlobStore(); } return createMK(clusterId, asyncDelay, ds, bs); } } private DocumentMK createMK(int clusterId, int asyncDelay, DocumentStore ds, BlobStore bs) { return register(new DocumentMK.Builder().setDocumentStore(ds).setBlobStore(bs).setClusterId(clusterId).setLeaseCheck(false) .setAsyncDelay(asyncDelay).open()); } private DocumentMK register(DocumentMK mk) { mks.add(mk); return mk; } }
OAK-2844 - now .. switch to memory store for jenkins tests git-svn-id: 67138be12999c61558c3dd34328380c8e4523e73@1697404 13f79535-47bb-0310-9956-ffa450edef68
oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/DocumentDiscoveryLiteServiceTest.java
OAK-2844 - now .. switch to memory store for jenkins tests
Java
apache-2.0
bfc375b66d1ec4f1bf69d24ff14aa6c2b1e460d4
0
quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus
/* * Copyright 2018 Red Hat, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.protean.arc.processor; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.ListIterator; import java.util.Set; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Predicate; import org.jboss.jandex.AnnotationInstance; import org.jboss.jandex.AnnotationTarget; import org.jboss.jandex.FieldInfo; import org.jboss.jandex.MethodInfo; import org.jboss.jandex.Type; /** * Represents an injection point. * * @author Martin Kouba */ public class InjectionPointInfo { static InjectionPointInfo fromField(FieldInfo field, BeanDeployment beanDeployment) { Set<AnnotationInstance> qualifiers = new HashSet<>(); for (AnnotationInstance annotation : beanDeployment.getAnnotations(field)) { if (beanDeployment.getQualifier(annotation.name()) != null) { qualifiers.add(annotation); } } return new InjectionPointInfo(field.type(), qualifiers.isEmpty() ? Collections.emptySet() : qualifiers, field); } static InjectionPointInfo fromResourceField(FieldInfo field, BeanDeployment beanDeployment) { return new InjectionPointInfo(field.type(), new HashSet<>(field.annotations()), Kind.RESOURCE, field); } static List<InjectionPointInfo> fromMethod(MethodInfo method, BeanDeployment beanDeployment) { return fromMethod(method, beanDeployment, null); } static List<InjectionPointInfo> fromMethod(MethodInfo method, BeanDeployment beanDeployment, Predicate<Set<AnnotationInstance>> skipPredicate) { List<InjectionPointInfo> injectionPoints = new ArrayList<>(); for (ListIterator<Type> iterator = method.parameters().listIterator(); iterator.hasNext();) { Type paramType = iterator.next(); Set<AnnotationInstance> paramAnnotations = new HashSet<>(); for (AnnotationInstance annotation : beanDeployment.getAnnotations(method)) { if (org.jboss.jandex.AnnotationTarget.Kind.METHOD_PARAMETER.equals(annotation.target().kind()) && annotation.target().asMethodParameter().position() == iterator.previousIndex()) { paramAnnotations.add(annotation); } } if (skipPredicate != null && skipPredicate.test(paramAnnotations)) { // Skip parameter, e.g. @Disposes continue; } Set<AnnotationInstance> paramQualifiers = new HashSet<>(); for (AnnotationInstance paramAnnotation : paramAnnotations) { if (beanDeployment.getQualifier(paramAnnotation.name()) != null) { paramQualifiers.add(paramAnnotation); } } injectionPoints.add(new InjectionPointInfo(paramType, paramQualifiers, method)); } return injectionPoints; } private final TypeAndQualifiers typeAndQualifiers; private final AtomicReference<BeanInfo> resolvedBean; private final Kind kind; private final boolean hasDefaultedQualifier; private final AnnotationTarget target; InjectionPointInfo(Type requiredType, Set<AnnotationInstance> requiredQualifiers, AnnotationTarget target) { this(requiredType, requiredQualifiers, Kind.CDI, target); } InjectionPointInfo(Type requiredType, Set<AnnotationInstance> requiredQualifiers, Kind kind, AnnotationTarget target) { this.typeAndQualifiers = new TypeAndQualifiers(requiredType, requiredQualifiers.isEmpty() ? Collections.singleton(AnnotationInstance.create(DotNames.DEFAULT, null, Collections.emptyList())) : requiredQualifiers); this.resolvedBean = new AtomicReference<BeanInfo>(null); this.kind = kind; this.hasDefaultedQualifier = requiredQualifiers.isEmpty(); this.target = target; } void resolve(BeanInfo bean) { resolvedBean.set(bean); } BeanInfo getResolvedBean() { return resolvedBean.get(); } Kind getKind() { return kind; } public Type getRequiredType() { return typeAndQualifiers.type; } public Set<AnnotationInstance> getRequiredQualifiers() { return typeAndQualifiers.qualifiers; } public boolean hasDefaultedQualifier() { return hasDefaultedQualifier; } TypeAndQualifiers getTypeAndQualifiers() { return typeAndQualifiers; } /** * For injected params, this method returns the corresponding method and not the param itself. * * @return the annotation target */ public AnnotationTarget getTarget() { return target; } public String getTargetInfo() { switch (target.kind()) { case FIELD: return target.asField().declaringClass().name() + "#" + target.asField().name(); case METHOD: return target.asMethod().declaringClass().name() + "#" + target.asMethod().name() + "()"; default: return target.toString(); } } @Override public String toString() { return "InjectionPointInfo [requiredType=" + typeAndQualifiers.type + ", requiredQualifiers=" + typeAndQualifiers.qualifiers + "]"; } enum Kind { CDI, RESOURCE } static class TypeAndQualifiers { final Type type; final Set<AnnotationInstance> qualifiers; public TypeAndQualifiers(Type type, Set<AnnotationInstance> qualifiers) { this.type = type; this.qualifiers = qualifiers; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((qualifiers == null) ? 0 : qualifiers.hashCode()); result = prime * result + ((type == null) ? 0 : type.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } TypeAndQualifiers other = (TypeAndQualifiers) obj; if (qualifiers == null) { if (other.qualifiers != null) { return false; } } else if (!qualifiers.equals(other.qualifiers)) { return false; } if (type == null) { if (other.type != null) { return false; } } else if (!type.equals(other.type)) { return false; } return true; } } }
independent-projects/arc/processor/src/main/java/org/jboss/protean/arc/processor/InjectionPointInfo.java
/* * Copyright 2018 Red Hat, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.protean.arc.processor; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.ListIterator; import java.util.Set; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Predicate; import org.jboss.jandex.AnnotationInstance; import org.jboss.jandex.AnnotationTarget; import org.jboss.jandex.FieldInfo; import org.jboss.jandex.MethodInfo; import org.jboss.jandex.Type; /** * Represents an injection point. * * @author Martin Kouba */ public class InjectionPointInfo { static InjectionPointInfo fromField(FieldInfo field, BeanDeployment beanDeployment) { Set<AnnotationInstance> qualifiers = new HashSet<>(); for (AnnotationInstance annotation : beanDeployment.getAnnotations(field)) { if (beanDeployment.getQualifier(annotation.name()) != null) { qualifiers.add(annotation); } } return new InjectionPointInfo(field.type(), qualifiers.isEmpty() ? Collections.emptySet() : qualifiers, field); } static InjectionPointInfo fromResourceField(FieldInfo field, BeanDeployment beanDeployment) { return new InjectionPointInfo(field.type(), new HashSet<>(field.annotations()), Kind.RESOURCE, field); } static List<InjectionPointInfo> fromMethod(MethodInfo method, BeanDeployment beanDeployment) { return fromMethod(method, beanDeployment, null); } static List<InjectionPointInfo> fromMethod(MethodInfo method, BeanDeployment beanDeployment, Predicate<Set<AnnotationInstance>> skipPredicate) { List<InjectionPointInfo> injectionPoints = new ArrayList<>(); for (ListIterator<Type> iterator = method.parameters().listIterator(); iterator.hasNext();) { Type paramType = iterator.next(); Set<AnnotationInstance> paramAnnotations = new HashSet<>(); for (AnnotationInstance annotation : beanDeployment.getAnnotations(method)) { if (org.jboss.jandex.AnnotationTarget.Kind.METHOD_PARAMETER.equals(annotation.target().kind()) && annotation.target().asMethodParameter().position() == iterator.previousIndex()) { paramAnnotations.add(annotation); } } if (skipPredicate != null && skipPredicate.test(paramAnnotations)) { // Skip parameter, e.g. @Disposes continue; } Set<AnnotationInstance> paramQualifiers = new HashSet<>(); for (AnnotationInstance paramAnnotation : paramAnnotations) { if (beanDeployment.getQualifier(paramAnnotation.name()) != null) { paramQualifiers.add(paramAnnotation); } } injectionPoints.add(new InjectionPointInfo(paramType, paramQualifiers, method)); } return injectionPoints; } private final TypeAndQualifiers typeAndQualifiers; private final AtomicReference<BeanInfo> resolvedBean; private final Kind kind; private final boolean hasDefaultedQualifier; private final AnnotationTarget target; InjectionPointInfo(Type requiredType, Set<AnnotationInstance> requiredQualifiers, AnnotationTarget target) { this(requiredType, requiredQualifiers, Kind.CDI, target); } InjectionPointInfo(Type requiredType, Set<AnnotationInstance> requiredQualifiers, Kind kind, AnnotationTarget target) { this.typeAndQualifiers = new TypeAndQualifiers(requiredType, requiredQualifiers.isEmpty() ? Collections.singleton(AnnotationInstance.create(DotNames.DEFAULT, null, Collections.emptyList())) : requiredQualifiers); this.resolvedBean = new AtomicReference<BeanInfo>(null); this.kind = kind; this.hasDefaultedQualifier = requiredQualifiers.isEmpty(); this.target = target; } void resolve(BeanInfo bean) { resolvedBean.set(bean); } BeanInfo getResolvedBean() { return resolvedBean.get(); } Kind getKind() { return kind; } public Type getRequiredType() { return typeAndQualifiers.type; } public Set<AnnotationInstance> getRequiredQualifiers() { return typeAndQualifiers.qualifiers; } public boolean hasDefaultedQualifier() { return hasDefaultedQualifier; } TypeAndQualifiers getTypeAndQualifiers() { return typeAndQualifiers; } /** * For injectected params, this method returns the corresponding method and not the param itself. * * @return the annotation target */ public AnnotationTarget getTarget() { return target; } public String getTargetInfo() { switch (target.kind()) { case FIELD: return target.asField().declaringClass().name() + "#" + target.asField().name(); case METHOD: return target.asMethod().declaringClass().name() + "#" + target.asMethod().name() + "()"; default: return target.toString(); } } @Override public String toString() { return "InjectionPointInfo [requiredType=" + typeAndQualifiers.type + ", requiredQualifiers=" + typeAndQualifiers.qualifiers + "]"; } enum Kind { CDI, RESOURCE } static class TypeAndQualifiers { final Type type; final Set<AnnotationInstance> qualifiers; public TypeAndQualifiers(Type type, Set<AnnotationInstance> qualifiers) { this.type = type; this.qualifiers = qualifiers; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((qualifiers == null) ? 0 : qualifiers.hashCode()); result = prime * result + ((type == null) ? 0 : type.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } TypeAndQualifiers other = (TypeAndQualifiers) obj; if (qualifiers == null) { if (other.qualifiers != null) { return false; } } else if (!qualifiers.equals(other.qualifiers)) { return false; } if (type == null) { if (other.type != null) { return false; } } else if (!type.equals(other.type)) { return false; } return true; } } }
Minor typo fix Co-Authored-By: stuartwdouglas <[email protected]>
independent-projects/arc/processor/src/main/java/org/jboss/protean/arc/processor/InjectionPointInfo.java
Minor typo fix
Java
apache-2.0
9fc8da6f32b68006c222ccfd038719fc89ff8550
0
sankarh/hive,sankarh/hive,jcamachor/hive,lirui-apache/hive,sankarh/hive,nishantmonu51/hive,jcamachor/hive,jcamachor/hive,sankarh/hive,lirui-apache/hive,jcamachor/hive,nishantmonu51/hive,jcamachor/hive,lirui-apache/hive,jcamachor/hive,nishantmonu51/hive,jcamachor/hive,nishantmonu51/hive,sankarh/hive,nishantmonu51/hive,nishantmonu51/hive,sankarh/hive,nishantmonu51/hive,nishantmonu51/hive,lirui-apache/hive,sankarh/hive,lirui-apache/hive,sankarh/hive,lirui-apache/hive,lirui-apache/hive,nishantmonu51/hive,lirui-apache/hive,jcamachor/hive,jcamachor/hive,sankarh/hive,lirui-apache/hive
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.hadoop.hive.ql.security.authorization.plugin.metastore; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.MetaStoreFilterHook; import org.apache.hadoop.hive.metastore.HiveMetaStore; import org.apache.hadoop.hive.metastore.MetaStorePreEventListener; import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.api.InvalidOperationException; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.events.PreAlterTableEvent; import org.apache.hadoop.hive.metastore.events.PreCreateTableEvent; import org.apache.hadoop.hive.metastore.events.PreDropTableEvent; import org.apache.hadoop.hive.metastore.events.PreEventContext; import org.apache.hadoop.hive.metastore.utils.MetaStoreServerUtils; import org.apache.hadoop.hive.metastore.api.Catalog; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.PartitionSpec; import org.apache.hadoop.hive.metastore.api.TableMeta; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.HiveUtils; import org.apache.hadoop.hive.ql.security.HiveMetastoreAuthenticationProvider; import org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.events.*; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerFactory; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactoryImpl; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; import org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.filtercontext.DatabaseFilterContext; import org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.filtercontext.TableFilterContext; import org.apache.hadoop.security.UserGroupInformation; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; /** * HiveMetaStoreAuthorizer : Do authorization checks on MetaStore Events in MetaStorePreEventListener */ public class HiveMetaStoreAuthorizer extends MetaStorePreEventListener implements MetaStoreFilterHook { private static final Log LOG = LogFactory.getLog(HiveMetaStoreAuthorizer.class); private static final ThreadLocal<Configuration> tConfig = new ThreadLocal<Configuration>() { @Override protected Configuration initialValue() { return new HiveConf(HiveMetaStoreAuthorizer.class); } }; private static final ThreadLocal<HiveMetastoreAuthenticationProvider> tAuthenticator = new ThreadLocal<HiveMetastoreAuthenticationProvider>() { @Override protected HiveMetastoreAuthenticationProvider initialValue() { try { return (HiveMetastoreAuthenticationProvider) HiveUtils.getAuthenticator(tConfig.get(), HiveConf.ConfVars.HIVE_METASTORE_AUTHENTICATOR_MANAGER); } catch (HiveException excp) { throw new IllegalStateException("Authentication provider instantiation failure", excp); } } }; public HiveMetaStoreAuthorizer(Configuration config) { super(config); } @Override public final void onEvent(PreEventContext preEventContext) throws MetaException, NoSuchObjectException, InvalidOperationException { LOG.debug("==> HiveMetaStoreAuthorizer.onEvent(): EventType=" + preEventContext.getEventType()); try { HiveMetaStoreAuthzInfo authzContext = buildAuthzContext(preEventContext); if (!skipAuthorization(authzContext)) { HiveAuthorizer hiveAuthorizer = createHiveMetaStoreAuthorizer(); checkPrivileges(authzContext, hiveAuthorizer); } } catch (Exception e) { LOG.error("HiveMetaStoreAuthorizer.onEvent(): failed", e); throw new MetaException(e.getMessage()); } LOG.debug("<== HiveMetaStoreAuthorizer.onEvent(): EventType=" + preEventContext.getEventType()); } @Override public final List<String> filterDatabases(List<String> list) throws MetaException { LOG.debug("HiveMetaStoreAuthorizer.filterDatabases()"); if (list == null) { return Collections.emptyList(); } DatabaseFilterContext databaseFilterContext = new DatabaseFilterContext(list); HiveMetaStoreAuthzInfo hiveMetaStoreAuthzInfo = databaseFilterContext.getAuthzContext(); List<String> filteredDatabases = filterDatabaseObjects(hiveMetaStoreAuthzInfo); if (CollectionUtils.isEmpty(filteredDatabases)) { filteredDatabases = Collections.emptyList(); } LOG.debug("HiveMetaStoreAuthorizer.filterDatabases() :" + filteredDatabases); return filteredDatabases; } @Override public final Database filterDatabase(Database database) throws MetaException, NoSuchObjectException { if (database != null) { String dbName = database.getName(); List<String> databases = filterDatabases(Collections.singletonList(dbName)); if (databases.isEmpty()) { throw new NoSuchObjectException(String.format("Database %s does not exist", dbName)); } } return database; } @Override public final List<String> filterTableNames(String s, String s1, List<String> list) throws MetaException { LOG.debug("==> HiveMetaStoreAuthorizer.filterTableNames()"); List<String> filteredTableNames = null; if (list != null) { String dbName = getDBName(s1); TableFilterContext tableFilterContext = new TableFilterContext(dbName, list); HiveMetaStoreAuthzInfo hiveMetaStoreAuthzInfo = tableFilterContext.getAuthzContext(); filteredTableNames = filterTableNames(hiveMetaStoreAuthzInfo, dbName, list); if (CollectionUtils.isEmpty(filteredTableNames)) { filteredTableNames = Collections.emptyList(); } } LOG.debug("<== HiveMetaStoreAuthorizer.filterTableNames() : " + filteredTableNames); return filteredTableNames; } @Override public final Table filterTable(Table table) throws MetaException, NoSuchObjectException { if (table != null) { List<Table> tables = filterTables(Collections.singletonList(table)); if (tables.isEmpty()) { throw new NoSuchObjectException(String.format("Database %s does not exist", table.getTableName())); } } return table; } @Override public final List<Table> filterTables(List<Table> list) throws MetaException { LOG.debug("==> HiveMetaStoreAuthorizer.filterTables()"); List<Table> filteredTables = null; if (list != null) { TableFilterContext tableFilterContext = new TableFilterContext(list); HiveMetaStoreAuthzInfo hiveMetaStoreAuthzInfo = tableFilterContext.getAuthzContext(); filteredTables = filterTableObjects(hiveMetaStoreAuthzInfo, list); if (CollectionUtils.isEmpty(filteredTables)) { filteredTables = Collections.emptyList(); } } LOG.debug("<== HiveMetaStoreAuthorizer.filterTables(): " + filteredTables); return filteredTables; } @Override public final Catalog filterCatalog(Catalog catalog) throws MetaException { return catalog; } @Override public final List<String> filterCatalogs(List<String> catalogs) throws MetaException { return catalogs; } @Override public final List<TableMeta> filterTableMetas(String catName, String dbName, List<TableMeta> tableMetas) throws MetaException { return tableMetas; } @Override public final List<Partition> filterPartitions(List<Partition> list) throws MetaException { return list; } @Override public final List<PartitionSpec> filterPartitionSpecs(List<PartitionSpec> list) throws MetaException { return list; } @Override public final Partition filterPartition(Partition partition) throws MetaException, NoSuchObjectException { return partition; } @Override public final List<String> filterPartitionNames(String s, String s1, String s2, List<String> list) throws MetaException { return list; } private List<String> filterDatabaseObjects(HiveMetaStoreAuthzInfo hiveMetaStoreAuthzInfo) throws MetaException { List<String> ret = null; LOG.debug("==> HiveMetaStoreAuthorizer.filterDatabaseObjects()"); try { HiveAuthorizer hiveAuthorizer = createHiveMetaStoreAuthorizer(); List<HivePrivilegeObject> hivePrivilegeObjects = hiveMetaStoreAuthzInfo.getInputHObjs(); HiveAuthzContext hiveAuthzContext = hiveMetaStoreAuthzInfo.getHiveAuthzContext(); List<HivePrivilegeObject> filteredHivePrivilegeObjects = hiveAuthorizer.filterListCmdObjects(hivePrivilegeObjects, hiveAuthzContext); if (CollectionUtils.isNotEmpty(filteredHivePrivilegeObjects)) { ret = getFilteredDatabaseList(filteredHivePrivilegeObjects); } LOG.info(String.format("Filtered %d databases out of %d", filteredHivePrivilegeObjects.size(), hivePrivilegeObjects.size())); } catch (Exception e) { throw new MetaException("Error in HiveMetaStoreAuthorizer.filterDatabase()" + e.getMessage()); } LOG.debug("<== HiveMetaStoreAuthorizer.filterDatabaseObjects() :" + ret ); return ret; } private List<Table> filterTableObjects(HiveMetaStoreAuthzInfo hiveMetaStoreAuthzInfo, List<Table> tableList) throws MetaException { List<Table> ret = null; try { HiveAuthorizer hiveAuthorizer = createHiveMetaStoreAuthorizer(); List<HivePrivilegeObject> hivePrivilegeObjects = hiveMetaStoreAuthzInfo.getInputHObjs(); HiveAuthzContext hiveAuthzContext = hiveMetaStoreAuthzInfo.getHiveAuthzContext(); List<HivePrivilegeObject> filteredHivePrivilegeObjects = hiveAuthorizer.filterListCmdObjects(hivePrivilegeObjects, hiveAuthzContext); if (CollectionUtils.isNotEmpty(filteredHivePrivilegeObjects)) { ret = getFilteredTableList(filteredHivePrivilegeObjects, tableList); } LOG.info(String.format("Filtered %d tables out of %d", filteredHivePrivilegeObjects.size(), hivePrivilegeObjects.size())); } catch (Exception e) { throw new MetaException("Error in HiveMetaStoreAuthorizer.filterTables()" + e.getMessage()); } return ret; } private List<String> getFilteredDatabaseList(List<HivePrivilegeObject> hivePrivilegeObjects) { List<String> ret = new ArrayList<>(); for(HivePrivilegeObject hivePrivilegeObject: hivePrivilegeObjects) { String dbName = hivePrivilegeObject.getDbname(); ret.add(dbName); } return ret; } private List<Table> getFilteredTableList(List<HivePrivilegeObject> hivePrivilegeObjects, List<Table> tableList) { List<Table> ret = new ArrayList<>(); for (HivePrivilegeObject hivePrivilegeObject : hivePrivilegeObjects) { String dbName = hivePrivilegeObject.getDbname(); String tblName = hivePrivilegeObject.getObjectName(); Table table = getFilteredTable(dbName, tblName, tableList); if (table != null) { ret.add(table); } } return ret; } private Table getFilteredTable(String dbName, String tblName, List<Table> tableList) { Table ret = null; for (Table table: tableList) { String databaseName = table.getDbName(); String tableName = table.getTableName(); if (dbName.equals(databaseName) && tblName.equals(tableName)) { ret = table; break; } } return ret; } private List<String> filterTableNames(HiveMetaStoreAuthzInfo hiveMetaStoreAuthzInfo, String dbName, List<String> tableNames) throws MetaException { List<String> ret = null; try { HiveAuthorizer hiveAuthorizer = createHiveMetaStoreAuthorizer(); List<HivePrivilegeObject> hivePrivilegeObjects = hiveMetaStoreAuthzInfo.getInputHObjs(); HiveAuthzContext hiveAuthzContext = hiveMetaStoreAuthzInfo.getHiveAuthzContext(); List<HivePrivilegeObject> filteredHivePrivilegeObjects = hiveAuthorizer.filterListCmdObjects(hivePrivilegeObjects, hiveAuthzContext); if (CollectionUtils.isNotEmpty(filteredHivePrivilegeObjects)) { ret = getFilteredTableNames(filteredHivePrivilegeObjects, dbName, tableNames); } LOG.info(String.format("Filtered %d table names out of %d", filteredHivePrivilegeObjects.size(), hivePrivilegeObjects.size())); } catch (Exception e) { throw new MetaException("Error in HiveMetaStoreAuthorizer.filterTables()" + e.getMessage()); } return ret; } private List<String> getFilteredTableNames(List<HivePrivilegeObject> hivePrivilegeObjects, String databaseName, List<String> tableNames) { List<String> ret = new ArrayList<>(); for (HivePrivilegeObject hivePrivilegeObject : hivePrivilegeObjects) { String dbName = hivePrivilegeObject.getDbname(); String tblName = hivePrivilegeObject.getObjectName(); String table = getFilteredTableNames(dbName, tblName, databaseName, tableNames); if (table != null) { ret.add(table); } } return ret; } private String getFilteredTableNames(String dbName, String tblName, String databaseName, List<String> tableNames) { String ret = null; for (String tableName : tableNames) { if (dbName.equals(databaseName) && tblName.equals(tableName)) { ret = tableName; break; } } return ret; } private String getDBName(String str) { return (str != null) ? str.substring(str.indexOf("#")+1) : null; } HiveMetaStoreAuthzInfo buildAuthzContext(PreEventContext preEventContext) throws MetaException { LOG.debug("==> HiveMetaStoreAuthorizer.buildAuthzContext(): EventType=" + preEventContext.getEventType()); HiveMetaStoreAuthorizableEvent authzEvent = null; if (preEventContext != null) { switch (preEventContext.getEventType()) { case CREATE_DATABASE: authzEvent = new CreateDatabaseEvent(preEventContext); break; case ALTER_DATABASE: authzEvent = new AlterDatabaseEvent(preEventContext); break; case DROP_DATABASE: authzEvent = new DropDatabaseEvent(preEventContext); break; case CREATE_TABLE: authzEvent = new CreateTableEvent(preEventContext); if (isViewOperation(preEventContext) && (!isSuperUser(getCurrentUser(authzEvent)))) { throw new MetaException(getErrorMessage("CREATE_VIEW", getCurrentUser(authzEvent))); } break; case ALTER_TABLE: authzEvent = new AlterTableEvent(preEventContext); if (isViewOperation(preEventContext) && (!isSuperUser(getCurrentUser(authzEvent)))) { throw new MetaException(getErrorMessage("ALTER_VIEW", getCurrentUser(authzEvent))); } break; case DROP_TABLE: authzEvent = new DropTableEvent(preEventContext); if (isViewOperation(preEventContext) && (!isSuperUser(getCurrentUser(authzEvent)))) { throw new MetaException(getErrorMessage("DROP_VIEW", getCurrentUser(authzEvent))); } break; case ADD_PARTITION: authzEvent = new AddPartitionEvent(preEventContext); break; case ALTER_PARTITION: authzEvent = new AlterPartitionEvent(preEventContext); break; case LOAD_PARTITION_DONE: authzEvent = new LoadPartitionDoneEvent(preEventContext); break; case DROP_PARTITION: authzEvent = new DropPartitionEvent(preEventContext); break; case READ_TABLE: authzEvent = new ReadTableEvent(preEventContext); break; case READ_DATABASE: authzEvent = new ReadDatabaseEvent(preEventContext); break; case AUTHORIZATION_API_CALL: case READ_ISCHEMA: case CREATE_ISCHEMA: case DROP_ISCHEMA: case ALTER_ISCHEMA: case ADD_SCHEMA_VERSION: case ALTER_SCHEMA_VERSION: case DROP_SCHEMA_VERSION: case READ_SCHEMA_VERSION: case CREATE_CATALOG: case ALTER_CATALOG: case DROP_CATALOG: if (!isSuperUser(getCurrentUser())) { throw new MetaException(getErrorMessage(preEventContext, getCurrentUser())); } break; default: break; } } HiveMetaStoreAuthzInfo ret = authzEvent != null ? authzEvent.getAuthzContext() : null; LOG.debug("<== HiveMetaStoreAuthorizer.buildAuthzContext(): EventType=" + preEventContext.getEventType() + "; ret=" + ret); return ret; } HiveAuthorizer createHiveMetaStoreAuthorizer() throws Exception { HiveAuthorizer ret = null; HiveConf hiveConf = new HiveConf(super.getConf(), HiveConf.class); HiveAuthorizerFactory authorizerFactory = HiveUtils.getAuthorizerFactory(hiveConf, HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER); if (authorizerFactory != null) { HiveMetastoreAuthenticationProvider authenticator = tAuthenticator.get(); authenticator.setConf(hiveConf); HiveAuthzSessionContext.Builder authzContextBuilder = new HiveAuthzSessionContext.Builder(); authzContextBuilder.setClientType(HiveAuthzSessionContext.CLIENT_TYPE.HIVEMETASTORE); authzContextBuilder.setSessionString("HiveMetaStore"); HiveAuthzSessionContext authzSessionContext = authzContextBuilder.build(); ret = authorizerFactory .createHiveAuthorizer(new HiveMetastoreClientFactoryImpl(), hiveConf, authenticator, authzSessionContext); } return ret; } boolean isSuperUser(String userName) { Configuration conf = getConf(); String ipAddress = HiveMetaStore.HMSHandler.getIPAddress(); return (MetaStoreServerUtils.checkUserHasHostProxyPrivileges(userName, conf, ipAddress)); } boolean isViewOperation(PreEventContext preEventContext) { boolean ret = false; PreEventContext.PreEventType preEventType = preEventContext.getEventType(); switch (preEventType) { case CREATE_TABLE: PreCreateTableEvent preCreateTableEvent = (PreCreateTableEvent) preEventContext; Table table = preCreateTableEvent.getTable(); ret = isViewType(table); break; case ALTER_TABLE: PreAlterTableEvent preAlterTableEvent = (PreAlterTableEvent) preEventContext; Table inTable = preAlterTableEvent.getOldTable(); Table outTable = preAlterTableEvent.getNewTable(); ret = (isViewType(inTable) || isViewType(outTable)); break; case DROP_TABLE: PreDropTableEvent preDropTableEvent = (PreDropTableEvent) preEventContext; Table droppedTable = preDropTableEvent.getTable(); ret = isViewType(droppedTable); break; } return ret; } private void checkPrivileges(final HiveMetaStoreAuthzInfo authzContext, HiveAuthorizer authorizer) throws MetaException { LOG.debug("==> HiveMetaStoreAuthorizer.checkPrivileges(): authzContext=" + authzContext + ", authorizer=" + authorizer); HiveOperationType hiveOpType = authzContext.getOperationType(); List<HivePrivilegeObject> inputHObjs = authzContext.getInputHObjs(); List<HivePrivilegeObject> outputHObjs = authzContext.getOutputHObjs(); HiveAuthzContext hiveAuthzContext = authzContext.getHiveAuthzContext(); try { authorizer.checkPrivileges(hiveOpType, inputHObjs, outputHObjs, hiveAuthzContext); } catch (Exception e) { throw new MetaException(e.getMessage()); } LOG.debug("<== HiveMetaStoreAuthorizer.checkPrivileges(): authzContext=" + authzContext + ", authorizer=" + authorizer); } private boolean skipAuthorization(HiveMetaStoreAuthzInfo authzContext) { LOG.debug("==> HiveMetaStoreAuthorizer.skipAuthorization()"); if(authzContext == null){ return false; } boolean ret = false; UserGroupInformation ugi = null; try { ugi = getUGI(); ret = isSuperUser(ugi.getShortUserName()); } catch (IOException e) { LOG.warn("Not able to obtain UserGroupInformation", e); } LOG.debug("<== HiveMetaStoreAuthorizer.skipAuthorization(): " + ret); return ret; } private boolean isViewType(Table table) { boolean ret = false; String tableType = table.getTableType(); if (TableType.MATERIALIZED_VIEW.name().equals(tableType) || TableType.VIRTUAL_VIEW.name().equals(tableType)) { ret = true; } return ret; } private String getErrorMessage(PreEventContext preEventContext, String user) { String err = "Operation type " + preEventContext.getEventType().name() + " not allowed for user:" + user; return err; } private String getErrorMessage(String eventType, String user) { String err = "Operation type " + eventType + " not allowed for user:" + user; return err; } private String getCurrentUser() { try { return UserGroupInformation.getCurrentUser().getShortUserName(); } catch (IOException excp) { } return null; } private String getCurrentUser(HiveMetaStoreAuthorizableEvent authorizableEvent) { return authorizableEvent.getAuthzContext().getUGI().getShortUserName(); } private UserGroupInformation getUGI() throws IOException { return UserGroupInformation.getCurrentUser(); } }
ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/HiveMetaStoreAuthorizer.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.hadoop.hive.ql.security.authorization.plugin.metastore; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.MetaStoreFilterHook; import org.apache.hadoop.hive.metastore.HiveMetaStore; import org.apache.hadoop.hive.metastore.MetaStorePreEventListener; import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.api.InvalidOperationException; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.events.PreAlterTableEvent; import org.apache.hadoop.hive.metastore.events.PreCreateTableEvent; import org.apache.hadoop.hive.metastore.events.PreDropTableEvent; import org.apache.hadoop.hive.metastore.events.PreEventContext; import org.apache.hadoop.hive.metastore.utils.MetaStoreServerUtils; import org.apache.hadoop.hive.metastore.api.Catalog; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.PartitionSpec; import org.apache.hadoop.hive.metastore.api.TableMeta; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.HiveUtils; import org.apache.hadoop.hive.ql.security.HiveMetastoreAuthenticationProvider; import org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.events.*; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerFactory; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactoryImpl; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; import org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.filtercontext.DatabaseFilterContext; import org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.filtercontext.TableFilterContext; import org.apache.hadoop.security.UserGroupInformation; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; /** * HiveMetaStoreAuthorizer : Do authorization checks on MetaStore Events in MetaStorePreEventListener */ public class HiveMetaStoreAuthorizer extends MetaStorePreEventListener implements MetaStoreFilterHook { private static final Log LOG = LogFactory.getLog(HiveMetaStoreAuthorizer.class); private static final ThreadLocal<Configuration> tConfig = new ThreadLocal<Configuration>() { @Override protected Configuration initialValue() { return new HiveConf(HiveMetaStoreAuthorizer.class); } }; private static final ThreadLocal<HiveMetastoreAuthenticationProvider> tAuthenticator = new ThreadLocal<HiveMetastoreAuthenticationProvider>() { @Override protected HiveMetastoreAuthenticationProvider initialValue() { try { return (HiveMetastoreAuthenticationProvider) HiveUtils.getAuthenticator(tConfig.get(), HiveConf.ConfVars.HIVE_METASTORE_AUTHENTICATOR_MANAGER); } catch (HiveException excp) { throw new IllegalStateException("Authentication provider instantiation failure", excp); } } }; public HiveMetaStoreAuthorizer(Configuration config) { super(config); } @Override public final void onEvent(PreEventContext preEventContext) throws MetaException, NoSuchObjectException, InvalidOperationException { LOG.debug("==> HiveMetaStoreAuthorizer.onEvent(): EventType=" + preEventContext.getEventType()); try { HiveAuthorizer hiveAuthorizer = createHiveMetaStoreAuthorizer(); if (!skipAuthorization()) { HiveMetaStoreAuthzInfo authzContext = buildAuthzContext(preEventContext); checkPrivileges(authzContext, hiveAuthorizer); } } catch (Exception e) { LOG.error("HiveMetaStoreAuthorizer.onEvent(): failed", e); throw new MetaException(e.getMessage()); } LOG.debug("<== HiveMetaStoreAuthorizer.onEvent(): EventType=" + preEventContext.getEventType()); } @Override public final List<String> filterDatabases(List<String> list) throws MetaException { LOG.debug("HiveMetaStoreAuthorizer.filterDatabases()"); if (list == null) { return Collections.emptyList(); } DatabaseFilterContext databaseFilterContext = new DatabaseFilterContext(list); HiveMetaStoreAuthzInfo hiveMetaStoreAuthzInfo = databaseFilterContext.getAuthzContext(); List<String> filteredDatabases = filterDatabaseObjects(hiveMetaStoreAuthzInfo); if (CollectionUtils.isEmpty(filteredDatabases)) { filteredDatabases = Collections.emptyList(); } LOG.debug("HiveMetaStoreAuthorizer.filterDatabases() :" + filteredDatabases); return filteredDatabases; } @Override public final Database filterDatabase(Database database) throws MetaException, NoSuchObjectException { if (database != null) { String dbName = database.getName(); List<String> databases = filterDatabases(Collections.singletonList(dbName)); if (databases.isEmpty()) { throw new NoSuchObjectException(String.format("Database %s does not exist", dbName)); } } return database; } @Override public final List<String> filterTableNames(String s, String s1, List<String> list) throws MetaException { LOG.debug("==> HiveMetaStoreAuthorizer.filterTableNames()"); List<String> filteredTableNames = null; if (list != null) { String dbName = getDBName(s1); TableFilterContext tableFilterContext = new TableFilterContext(dbName, list); HiveMetaStoreAuthzInfo hiveMetaStoreAuthzInfo = tableFilterContext.getAuthzContext(); filteredTableNames = filterTableNames(hiveMetaStoreAuthzInfo, dbName, list); if (CollectionUtils.isEmpty(filteredTableNames)) { filteredTableNames = Collections.emptyList(); } } LOG.debug("<== HiveMetaStoreAuthorizer.filterTableNames() : " + filteredTableNames); return filteredTableNames; } @Override public final Table filterTable(Table table) throws MetaException, NoSuchObjectException { if (table != null) { List<Table> tables = filterTables(Collections.singletonList(table)); if (tables.isEmpty()) { throw new NoSuchObjectException(String.format("Database %s does not exist", table.getTableName())); } } return table; } @Override public final List<Table> filterTables(List<Table> list) throws MetaException { LOG.debug("==> HiveMetaStoreAuthorizer.filterTables()"); List<Table> filteredTables = null; if (list != null) { TableFilterContext tableFilterContext = new TableFilterContext(list); HiveMetaStoreAuthzInfo hiveMetaStoreAuthzInfo = tableFilterContext.getAuthzContext(); filteredTables = filterTableObjects(hiveMetaStoreAuthzInfo, list); if (CollectionUtils.isEmpty(filteredTables)) { filteredTables = Collections.emptyList(); } } LOG.debug("<== HiveMetaStoreAuthorizer.filterTables(): " + filteredTables); return filteredTables; } @Override public final Catalog filterCatalog(Catalog catalog) throws MetaException { return catalog; } @Override public final List<String> filterCatalogs(List<String> catalogs) throws MetaException { return catalogs; } @Override public final List<TableMeta> filterTableMetas(String catName, String dbName, List<TableMeta> tableMetas) throws MetaException { return tableMetas; } @Override public final List<Partition> filterPartitions(List<Partition> list) throws MetaException { return list; } @Override public final List<PartitionSpec> filterPartitionSpecs(List<PartitionSpec> list) throws MetaException { return list; } @Override public final Partition filterPartition(Partition partition) throws MetaException, NoSuchObjectException { return partition; } @Override public final List<String> filterPartitionNames(String s, String s1, String s2, List<String> list) throws MetaException { return list; } private List<String> filterDatabaseObjects(HiveMetaStoreAuthzInfo hiveMetaStoreAuthzInfo) throws MetaException { List<String> ret = null; LOG.debug("==> HiveMetaStoreAuthorizer.filterDatabaseObjects()"); try { HiveAuthorizer hiveAuthorizer = createHiveMetaStoreAuthorizer(); List<HivePrivilegeObject> hivePrivilegeObjects = hiveMetaStoreAuthzInfo.getInputHObjs(); HiveAuthzContext hiveAuthzContext = hiveMetaStoreAuthzInfo.getHiveAuthzContext(); List<HivePrivilegeObject> filteredHivePrivilegeObjects = hiveAuthorizer.filterListCmdObjects(hivePrivilegeObjects, hiveAuthzContext); if (CollectionUtils.isNotEmpty(filteredHivePrivilegeObjects)) { ret = getFilteredDatabaseList(filteredHivePrivilegeObjects); } LOG.info(String.format("Filtered %d databases out of %d", filteredHivePrivilegeObjects.size(), hivePrivilegeObjects.size())); } catch (Exception e) { throw new MetaException("Error in HiveMetaStoreAuthorizer.filterDatabase()" + e.getMessage()); } LOG.debug("<== HiveMetaStoreAuthorizer.filterDatabaseObjects() :" + ret ); return ret; } private List<Table> filterTableObjects(HiveMetaStoreAuthzInfo hiveMetaStoreAuthzInfo, List<Table> tableList) throws MetaException { List<Table> ret = null; try { HiveAuthorizer hiveAuthorizer = createHiveMetaStoreAuthorizer(); List<HivePrivilegeObject> hivePrivilegeObjects = hiveMetaStoreAuthzInfo.getInputHObjs(); HiveAuthzContext hiveAuthzContext = hiveMetaStoreAuthzInfo.getHiveAuthzContext(); List<HivePrivilegeObject> filteredHivePrivilegeObjects = hiveAuthorizer.filterListCmdObjects(hivePrivilegeObjects, hiveAuthzContext); if (CollectionUtils.isNotEmpty(filteredHivePrivilegeObjects)) { ret = getFilteredTableList(filteredHivePrivilegeObjects, tableList); } LOG.info(String.format("Filtered %d tables out of %d", filteredHivePrivilegeObjects.size(), hivePrivilegeObjects.size())); } catch (Exception e) { throw new MetaException("Error in HiveMetaStoreAuthorizer.filterTables()" + e.getMessage()); } return ret; } private List<String> getFilteredDatabaseList(List<HivePrivilegeObject> hivePrivilegeObjects) { List<String> ret = new ArrayList<>(); for(HivePrivilegeObject hivePrivilegeObject: hivePrivilegeObjects) { String dbName = hivePrivilegeObject.getDbname(); ret.add(dbName); } return ret; } private List<Table> getFilteredTableList(List<HivePrivilegeObject> hivePrivilegeObjects, List<Table> tableList) { List<Table> ret = new ArrayList<>(); for (HivePrivilegeObject hivePrivilegeObject : hivePrivilegeObjects) { String dbName = hivePrivilegeObject.getDbname(); String tblName = hivePrivilegeObject.getObjectName(); Table table = getFilteredTable(dbName, tblName, tableList); if (table != null) { ret.add(table); } } return ret; } private Table getFilteredTable(String dbName, String tblName, List<Table> tableList) { Table ret = null; for (Table table: tableList) { String databaseName = table.getDbName(); String tableName = table.getTableName(); if (dbName.equals(databaseName) && tblName.equals(tableName)) { ret = table; break; } } return ret; } private List<String> filterTableNames(HiveMetaStoreAuthzInfo hiveMetaStoreAuthzInfo, String dbName, List<String> tableNames) throws MetaException { List<String> ret = null; try { HiveAuthorizer hiveAuthorizer = createHiveMetaStoreAuthorizer(); List<HivePrivilegeObject> hivePrivilegeObjects = hiveMetaStoreAuthzInfo.getInputHObjs(); HiveAuthzContext hiveAuthzContext = hiveMetaStoreAuthzInfo.getHiveAuthzContext(); List<HivePrivilegeObject> filteredHivePrivilegeObjects = hiveAuthorizer.filterListCmdObjects(hivePrivilegeObjects, hiveAuthzContext); if (CollectionUtils.isNotEmpty(filteredHivePrivilegeObjects)) { ret = getFilteredTableNames(filteredHivePrivilegeObjects, dbName, tableNames); } LOG.info(String.format("Filtered %d table names out of %d", filteredHivePrivilegeObjects.size(), hivePrivilegeObjects.size())); } catch (Exception e) { throw new MetaException("Error in HiveMetaStoreAuthorizer.filterTables()" + e.getMessage()); } return ret; } private List<String> getFilteredTableNames(List<HivePrivilegeObject> hivePrivilegeObjects, String databaseName, List<String> tableNames) { List<String> ret = new ArrayList<>(); for (HivePrivilegeObject hivePrivilegeObject : hivePrivilegeObjects) { String dbName = hivePrivilegeObject.getDbname(); String tblName = hivePrivilegeObject.getObjectName(); String table = getFilteredTableNames(dbName, tblName, databaseName, tableNames); if (table != null) { ret.add(table); } } return ret; } private String getFilteredTableNames(String dbName, String tblName, String databaseName, List<String> tableNames) { String ret = null; for (String tableName : tableNames) { if (dbName.equals(databaseName) && tblName.equals(tableName)) { ret = tableName; break; } } return ret; } private String getDBName(String str) { return (str != null) ? str.substring(str.indexOf("#")+1) : null; } HiveMetaStoreAuthzInfo buildAuthzContext(PreEventContext preEventContext) throws MetaException { LOG.debug("==> HiveMetaStoreAuthorizer.buildAuthzContext(): EventType=" + preEventContext.getEventType()); HiveMetaStoreAuthorizableEvent authzEvent = null; if (preEventContext != null) { switch (preEventContext.getEventType()) { case CREATE_DATABASE: authzEvent = new CreateDatabaseEvent(preEventContext); break; case ALTER_DATABASE: authzEvent = new AlterDatabaseEvent(preEventContext); break; case DROP_DATABASE: authzEvent = new DropDatabaseEvent(preEventContext); break; case CREATE_TABLE: authzEvent = new CreateTableEvent(preEventContext); if (isViewOperation(preEventContext) && (!isSuperUser(getCurrentUser(authzEvent)))) { throw new MetaException(getErrorMessage("CREATE_VIEW", getCurrentUser(authzEvent))); } break; case ALTER_TABLE: authzEvent = new AlterTableEvent(preEventContext); if (isViewOperation(preEventContext) && (!isSuperUser(getCurrentUser(authzEvent)))) { throw new MetaException(getErrorMessage("ALTER_VIEW", getCurrentUser(authzEvent))); } break; case DROP_TABLE: authzEvent = new DropTableEvent(preEventContext); if (isViewOperation(preEventContext) && (!isSuperUser(getCurrentUser(authzEvent)))) { throw new MetaException(getErrorMessage("DROP_VIEW", getCurrentUser(authzEvent))); } break; case ADD_PARTITION: authzEvent = new AddPartitionEvent(preEventContext); break; case ALTER_PARTITION: authzEvent = new AlterPartitionEvent(preEventContext); break; case LOAD_PARTITION_DONE: authzEvent = new LoadPartitionDoneEvent(preEventContext); break; case DROP_PARTITION: authzEvent = new DropPartitionEvent(preEventContext); break; case READ_TABLE: authzEvent = new ReadTableEvent(preEventContext); break; case READ_DATABASE: authzEvent = new ReadDatabaseEvent(preEventContext); break; case AUTHORIZATION_API_CALL: case READ_ISCHEMA: case CREATE_ISCHEMA: case DROP_ISCHEMA: case ALTER_ISCHEMA: case ADD_SCHEMA_VERSION: case ALTER_SCHEMA_VERSION: case DROP_SCHEMA_VERSION: case READ_SCHEMA_VERSION: case CREATE_CATALOG: case ALTER_CATALOG: case DROP_CATALOG: if (!isSuperUser(getCurrentUser())) { throw new MetaException(getErrorMessage(preEventContext, getCurrentUser())); } break; default: break; } } HiveMetaStoreAuthzInfo ret = authzEvent != null ? authzEvent.getAuthzContext() : null; LOG.debug("<== HiveMetaStoreAuthorizer.buildAuthzContext(): EventType=" + preEventContext.getEventType() + "; ret=" + ret); return ret; } HiveAuthorizer createHiveMetaStoreAuthorizer() throws Exception { HiveAuthorizer ret = null; HiveConf hiveConf = new HiveConf(super.getConf(), HiveConf.class); HiveAuthorizerFactory authorizerFactory = HiveUtils.getAuthorizerFactory(hiveConf, HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER); if (authorizerFactory != null) { HiveMetastoreAuthenticationProvider authenticator = tAuthenticator.get(); authenticator.setConf(hiveConf); HiveAuthzSessionContext.Builder authzContextBuilder = new HiveAuthzSessionContext.Builder(); authzContextBuilder.setClientType(HiveAuthzSessionContext.CLIENT_TYPE.HIVEMETASTORE); authzContextBuilder.setSessionString("HiveMetaStore"); HiveAuthzSessionContext authzSessionContext = authzContextBuilder.build(); ret = authorizerFactory .createHiveAuthorizer(new HiveMetastoreClientFactoryImpl(), hiveConf, authenticator, authzSessionContext); } return ret; } boolean isSuperUser(String userName) { Configuration conf = getConf(); String ipAddress = HiveMetaStore.HMSHandler.getIPAddress(); return (MetaStoreServerUtils.checkUserHasHostProxyPrivileges(userName, conf, ipAddress)); } boolean isViewOperation(PreEventContext preEventContext) { boolean ret = false; PreEventContext.PreEventType preEventType = preEventContext.getEventType(); switch (preEventType) { case CREATE_TABLE: PreCreateTableEvent preCreateTableEvent = (PreCreateTableEvent) preEventContext; Table table = preCreateTableEvent.getTable(); ret = isViewType(table); break; case ALTER_TABLE: PreAlterTableEvent preAlterTableEvent = (PreAlterTableEvent) preEventContext; Table inTable = preAlterTableEvent.getOldTable(); Table outTable = preAlterTableEvent.getNewTable(); ret = (isViewType(inTable) || isViewType(outTable)); break; case DROP_TABLE: PreDropTableEvent preDropTableEvent = (PreDropTableEvent) preEventContext; Table droppedTable = preDropTableEvent.getTable(); ret = isViewType(droppedTable); break; } return ret; } private void checkPrivileges(final HiveMetaStoreAuthzInfo authzContext, HiveAuthorizer authorizer) throws MetaException { LOG.debug("==> HiveMetaStoreAuthorizer.checkPrivileges(): authzContext=" + authzContext + ", authorizer=" + authorizer); HiveOperationType hiveOpType = authzContext.getOperationType(); List<HivePrivilegeObject> inputHObjs = authzContext.getInputHObjs(); List<HivePrivilegeObject> outputHObjs = authzContext.getOutputHObjs(); HiveAuthzContext hiveAuthzContext = authzContext.getHiveAuthzContext(); try { authorizer.checkPrivileges(hiveOpType, inputHObjs, outputHObjs, hiveAuthzContext); } catch (Exception e) { throw new MetaException(e.getMessage()); } LOG.debug("<== HiveMetaStoreAuthorizer.checkPrivileges(): authzContext=" + authzContext + ", authorizer=" + authorizer); } private boolean skipAuthorization() { LOG.debug("==> HiveMetaStoreAuthorizer.skipAuthorization()"); boolean ret = false; UserGroupInformation ugi = null; try { ugi = getUGI(); ret = isSuperUser(ugi.getShortUserName()); } catch (IOException e) { LOG.warn("Not able to obtain UserGroupInformation", e); } LOG.debug("<== HiveMetaStoreAuthorizer.skipAuthorization(): " + ret); return ret; } private boolean isViewType(Table table) { boolean ret = false; String tableType = table.getTableType(); if (TableType.MATERIALIZED_VIEW.name().equals(tableType) || TableType.VIRTUAL_VIEW.name().equals(tableType)) { ret = true; } return ret; } private String getErrorMessage(PreEventContext preEventContext, String user) { String err = "Operation type " + preEventContext.getEventType().name() + " not allowed for user:" + user; return err; } private String getErrorMessage(String eventType, String user) { String err = "Operation type " + eventType + " not allowed for user:" + user; return err; } private String getCurrentUser() { try { return UserGroupInformation.getCurrentUser().getShortUserName(); } catch (IOException excp) { } return null; } private String getCurrentUser(HiveMetaStoreAuthorizableEvent authorizableEvent) { return authorizableEvent.getAuthzContext().getUGI().getShortUserName(); } private UserGroupInformation getUGI() throws IOException { return UserGroupInformation.getCurrentUser(); } }
HIVE-24004: Improve performance for filter hook for superuser path(Sam An, reviewed by Naveen Gangam)
ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/HiveMetaStoreAuthorizer.java
HIVE-24004: Improve performance for filter hook for superuser path(Sam An, reviewed by Naveen Gangam)
Java
apache-2.0
2fffa660ab2a7436c0a2fe9944b1664134d4e335
0
volodymyr-babak/thingsboard,thingsboard/thingsboard,volodymyr-babak/thingsboard,thingsboard/thingsboard,thingsboard/thingsboard,thingsboard/thingsboard,volodymyr-babak/thingsboard,volodymyr-babak/thingsboard,thingsboard/thingsboard,volodymyr-babak/thingsboard,volodymyr-babak/thingsboard,thingsboard/thingsboard
/** * Copyright © 2016-2020 The Thingsboard Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.thingsboard.rule.engine.edge; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.util.concurrent.FutureCallback; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import lombok.extern.slf4j.Slf4j; import org.thingsboard.rule.engine.api.EmptyNodeConfiguration; import org.thingsboard.rule.engine.api.RuleNode; import org.thingsboard.rule.engine.api.TbContext; import org.thingsboard.rule.engine.api.TbNode; import org.thingsboard.rule.engine.api.TbNodeConfiguration; import org.thingsboard.rule.engine.api.TbNodeException; import org.thingsboard.rule.engine.api.util.TbNodeUtils; import org.thingsboard.server.common.data.DataConstants; import org.thingsboard.server.common.data.EdgeUtils; import org.thingsboard.server.common.data.EntityType; import org.thingsboard.server.common.data.audit.ActionType; import org.thingsboard.server.common.data.edge.EdgeEvent; import org.thingsboard.server.common.data.edge.EdgeEventType; import org.thingsboard.server.common.data.id.EdgeId; import org.thingsboard.server.common.data.id.EntityId; import org.thingsboard.server.common.data.id.TenantId; import org.thingsboard.server.common.data.plugin.ComponentType; import org.thingsboard.server.common.data.relation.EntityRelation; import org.thingsboard.server.common.data.relation.RelationTypeGroup; import org.thingsboard.server.common.data.rule.RuleChainType; import org.thingsboard.server.common.msg.TbMsg; import org.thingsboard.server.common.msg.session.SessionMsgType; import javax.annotation.Nullable; import java.util.List; import java.util.UUID; import static org.thingsboard.rule.engine.api.TbRelationTypes.SUCCESS; @Slf4j @RuleNode( type = ComponentType.ACTION, name = "push to edge", configClazz = EmptyNodeConfiguration.class, nodeDescription = "Pushes messages to edge", nodeDetails = "Pushes messages to edge, if Message Originator assigned to particular edge or is EDGE entity. This node is used only on Cloud instances to push messages from Cloud to Edge. Supports only DEVICE, ENTITY_VIEW, ASSET and EDGE Message Originator(s).", uiResources = {"static/rulenode/rulenode-core-config.js", "static/rulenode/rulenode-core-config.css"}, configDirective = "tbNodeEmptyConfig", icon = "cloud_download", ruleChainTypes = RuleChainType.CORE ) public class TbMsgPushToEdgeNode implements TbNode { private EmptyNodeConfiguration config; private static final ObjectMapper json = new ObjectMapper(); @Override public void init(TbContext ctx, TbNodeConfiguration configuration) throws TbNodeException { this.config = TbNodeUtils.convert(configuration, EmptyNodeConfiguration.class); } @Override public void onMsg(TbContext ctx, TbMsg msg) { if (DataConstants.EDGE_MSG_SOURCE.equalsIgnoreCase(msg.getMetaData().getValue(DataConstants.MSG_SOURCE_KEY))) { log.debug("Ignoring msg from the cloud, msg [{}]", msg); return; } if (isSupportedOriginator(msg.getOriginator().getEntityType())) { if (isSupportedMsgType(msg.getType())) { ListenableFuture<EdgeId> getEdgeIdFuture = getEdgeIdByOriginatorId(ctx, ctx.getTenantId(), msg.getOriginator()); Futures.addCallback(getEdgeIdFuture, new FutureCallback<EdgeId>() { @Override public void onSuccess(@Nullable EdgeId edgeId) { try { EdgeEvent edgeEvent = buildEdgeEvent(msg, ctx); if (edgeEvent == null) { log.debug("Edge event type is null. Entity Type {}", msg.getOriginator().getEntityType()); ctx.tellFailure(msg, new RuntimeException("Edge event type is null. Entity Type '" + msg.getOriginator().getEntityType() + "'")); } else { edgeEvent.setEdgeId(edgeId); ListenableFuture<EdgeEvent> saveFuture = ctx.getEdgeEventService().saveAsync(edgeEvent); Futures.addCallback(saveFuture, new FutureCallback<EdgeEvent>() { @Override public void onSuccess(@Nullable EdgeEvent event) { ctx.tellNext(msg, SUCCESS); } @Override public void onFailure(Throwable th) { log.error("Could not save edge event", th); ctx.tellFailure(msg, th); } }, ctx.getDbCallbackExecutor()); } } catch (JsonProcessingException e) { log.error("Failed to build edge event", e); ctx.tellFailure(msg, e); } } @Override public void onFailure(Throwable t) { ctx.tellFailure(msg, t); } }, ctx.getDbCallbackExecutor()); } else { log.debug("Unsupported msg type {}", msg.getType()); ctx.tellFailure(msg, new RuntimeException("Unsupported msg type '" + msg.getType() + "'")); } } else { log.debug("Unsupported originator type {}", msg.getOriginator().getEntityType()); ctx.tellFailure(msg, new RuntimeException("Unsupported originator type '" + msg.getOriginator().getEntityType() + "'")); } } private EdgeEvent buildEdgeEvent(TbMsg msg, TbContext ctx) throws JsonProcessingException { if (DataConstants.ALARM.equals(msg.getType())) { return buildEdgeEvent(ctx.getTenantId(), ActionType.ADDED, getUUIDFromMsgData(msg), EdgeEventType.ALARM, null); } else { EdgeEventType edgeEventTypeByEntityType = EdgeUtils.getEdgeEventTypeByEntityType(msg.getOriginator().getEntityType()); if (edgeEventTypeByEntityType == null) { return null; } return buildEdgeEvent(ctx.getTenantId(), getActionTypeByMsgType(msg.getType()), msg.getOriginator().getId(), edgeEventTypeByEntityType, json.readTree(msg.getData())); } } private EdgeEvent buildEdgeEvent(TenantId tenantId, ActionType edgeEventAction, UUID entityId, EdgeEventType edgeEventType, JsonNode entityBody) { EdgeEvent edgeEvent = new EdgeEvent(); edgeEvent.setTenantId(tenantId); edgeEvent.setEdgeEventAction(edgeEventAction.name()); edgeEvent.setEntityId(entityId); edgeEvent.setEdgeEventType(edgeEventType); edgeEvent.setEntityBody(entityBody); return edgeEvent; } private UUID getUUIDFromMsgData(TbMsg msg) throws JsonProcessingException { JsonNode data = json.readTree(msg.getData()).get("id"); String id = json.treeToValue(data.get("id"), String.class); return UUID.fromString(id); } private ActionType getActionTypeByMsgType(String msgType) { ActionType actionType; if (SessionMsgType.POST_TELEMETRY_REQUEST.name().equals(msgType)) { actionType = ActionType.TIMESERIES_UPDATED; } else if (SessionMsgType.POST_ATTRIBUTES_REQUEST.name().equals(msgType) || DataConstants.ATTRIBUTES_UPDATED.equals(msgType)) { actionType = ActionType.ATTRIBUTES_UPDATED; } else { actionType = ActionType.ATTRIBUTES_DELETED; } return actionType; } private boolean isSupportedOriginator(EntityType entityType) { switch (entityType) { case DEVICE: case ASSET: case ENTITY_VIEW: case DASHBOARD: return true; default: return false; } } private boolean isSupportedMsgType(String msgType) { return SessionMsgType.POST_TELEMETRY_REQUEST.name().equals(msgType) || SessionMsgType.POST_ATTRIBUTES_REQUEST.name().equals(msgType) || DataConstants.ATTRIBUTES_UPDATED.equals(msgType) || DataConstants.ATTRIBUTES_DELETED.equals(msgType) || DataConstants.ALARM.equals(msgType); } private ListenableFuture<EdgeId> getEdgeIdByOriginatorId(TbContext ctx, TenantId tenantId, EntityId originatorId) { ListenableFuture<List<EntityRelation>> future = ctx.getRelationService().findByToAndTypeAsync(tenantId, originatorId, EntityRelation.CONTAINS_TYPE, RelationTypeGroup.EDGE); return Futures.transform(future, relations -> { if (relations != null && relations.size() > 0) { return new EdgeId(relations.get(0).getFrom().getId()); } else { return null; } }, ctx.getDbCallbackExecutor()); } @Override public void destroy() { } }
rule-engine/rule-engine-components/src/main/java/org/thingsboard/rule/engine/edge/TbMsgPushToEdgeNode.java
/** * Copyright © 2016-2020 The Thingsboard Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.thingsboard.rule.engine.edge; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.util.concurrent.FutureCallback; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import lombok.extern.slf4j.Slf4j; import org.thingsboard.rule.engine.api.EmptyNodeConfiguration; import org.thingsboard.rule.engine.api.RuleNode; import org.thingsboard.rule.engine.api.TbContext; import org.thingsboard.rule.engine.api.TbNode; import org.thingsboard.rule.engine.api.TbNodeConfiguration; import org.thingsboard.rule.engine.api.TbNodeException; import org.thingsboard.rule.engine.api.util.TbNodeUtils; import org.thingsboard.server.common.data.DataConstants; import org.thingsboard.server.common.data.EdgeUtils; import org.thingsboard.server.common.data.EntityType; import org.thingsboard.server.common.data.audit.ActionType; import org.thingsboard.server.common.data.edge.EdgeEvent; import org.thingsboard.server.common.data.edge.EdgeEventType; import org.thingsboard.server.common.data.id.EdgeId; import org.thingsboard.server.common.data.id.EntityId; import org.thingsboard.server.common.data.id.TenantId; import org.thingsboard.server.common.data.plugin.ComponentType; import org.thingsboard.server.common.data.relation.EntityRelation; import org.thingsboard.server.common.data.relation.RelationTypeGroup; import org.thingsboard.server.common.data.rule.RuleChainType; import org.thingsboard.server.common.msg.TbMsg; import org.thingsboard.server.common.msg.session.SessionMsgType; import javax.annotation.Nullable; import java.util.List; import java.util.UUID; import static org.thingsboard.rule.engine.api.TbRelationTypes.SUCCESS; @Slf4j @RuleNode( type = ComponentType.ACTION, name = "push to edge", configClazz = EmptyNodeConfiguration.class, nodeDescription = "Pushes messages to edge", nodeDetails = "Pushes messages to edge, if Message Originator assigned to particular edge or is EDGE entity. This node is used only on Cloud instances to push messages from Cloud to Edge. Supports only DEVICE, ENTITY_VIEW, ASSET and EDGE Message Originator(s).", uiResources = {"static/rulenode/rulenode-core-config.js", "static/rulenode/rulenode-core-config.css"}, configDirective = "tbNodeEmptyConfig", icon = "cloud_download", ruleChainTypes = RuleChainType.CORE ) public class TbMsgPushToEdgeNode implements TbNode { private EmptyNodeConfiguration config; private static final ObjectMapper json = new ObjectMapper(); @Override public void init(TbContext ctx, TbNodeConfiguration configuration) throws TbNodeException { this.config = TbNodeUtils.convert(configuration, EmptyNodeConfiguration.class); } @Override public void onMsg(TbContext ctx, TbMsg msg) { if (DataConstants.EDGE_MSG_SOURCE.equalsIgnoreCase(msg.getMetaData().getValue(DataConstants.MSG_SOURCE_KEY))) { log.debug("Ignoring msg from the cloud, msg [{}]", msg); return; } if (isSupportedOriginator(msg.getOriginator().getEntityType())) { if (isSupportedMsgType(msg.getType())) { ListenableFuture<EdgeId> getEdgeIdFuture = getEdgeIdByOriginatorId(ctx, ctx.getTenantId(), msg.getOriginator()); Futures.addCallback(getEdgeIdFuture, new FutureCallback<EdgeId>() { @Override public void onSuccess(@Nullable EdgeId edgeId) { EdgeEvent edgeEvent = null; try { edgeEvent = buildEdgeEvent(msg, ctx); edgeEvent.setEdgeId(edgeId); } catch (JsonProcessingException e) { log.error("Failed to build edge event", e); } ListenableFuture<EdgeEvent> saveFuture = ctx.getEdgeEventService().saveAsync(edgeEvent); Futures.addCallback(saveFuture, new FutureCallback<EdgeEvent>() { @Override public void onSuccess(@Nullable EdgeEvent event) { ctx.tellNext(msg, SUCCESS); } @Override public void onFailure(Throwable th) { log.error("Could not save edge event", th); ctx.tellFailure(msg, th); } }, ctx.getDbCallbackExecutor()); } @Override public void onFailure(Throwable t) { ctx.tellFailure(msg, t); } }, ctx.getDbCallbackExecutor()); } else { log.debug("Unsupported msg type {}", msg.getType()); ctx.tellFailure(msg, new RuntimeException("Unsupported msg type '" + msg.getType() + "'")); } } else { log.debug("Unsupported originator type {}", msg.getOriginator().getEntityType()); ctx.tellFailure(msg, new RuntimeException("Unsupported originator type '" + msg.getOriginator().getEntityType() + "'")); } } private EdgeEvent buildEdgeEvent(TbMsg msg, TbContext ctx) throws JsonProcessingException { if (DataConstants.ALARM.equals(msg.getType())) { return buildEdgeEvent(ctx.getTenantId(), ActionType.ADDED, getUUIDFromMsgData(msg), EdgeEventType.ALARM, null); } else { EdgeEventType edgeEventTypeByEntityType = EdgeUtils.getEdgeEventTypeByEntityType(msg.getOriginator().getEntityType()); if (edgeEventTypeByEntityType == null) { log.debug("Edge event type is null. Entity Type {}", msg.getOriginator().getEntityType()); ctx.tellFailure(msg, new RuntimeException("Edge event type is null. Entity Type '" + msg.getOriginator().getEntityType() + "'")); } return buildEdgeEvent(ctx.getTenantId(), getActionTypeByMsgType(msg.getType()), msg.getOriginator().getId(), edgeEventTypeByEntityType, json.readTree(msg.getData())); } } private EdgeEvent buildEdgeEvent(TenantId tenantId, ActionType edgeEventAction, UUID entityId, EdgeEventType edgeEventType, JsonNode entityBody) { EdgeEvent edgeEvent = new EdgeEvent(); edgeEvent.setTenantId(tenantId); edgeEvent.setEdgeEventAction(edgeEventAction.name()); edgeEvent.setEntityId(entityId); edgeEvent.setEdgeEventType(edgeEventType); edgeEvent.setEntityBody(entityBody); return edgeEvent; } private UUID getUUIDFromMsgData(TbMsg msg) throws JsonProcessingException { JsonNode data = json.readTree(msg.getData()).get("id"); String id = json.treeToValue(data.get("id"), String.class); return UUID.fromString(id); } private ActionType getActionTypeByMsgType(String msgType) { ActionType actionType; if (SessionMsgType.POST_TELEMETRY_REQUEST.name().equals(msgType)) { actionType = ActionType.TIMESERIES_UPDATED; } else if (SessionMsgType.POST_ATTRIBUTES_REQUEST.name().equals(msgType) || DataConstants.ATTRIBUTES_UPDATED.equals(msgType)) { actionType = ActionType.ATTRIBUTES_UPDATED; } else { actionType = ActionType.ATTRIBUTES_DELETED; } return actionType; } private boolean isSupportedOriginator(EntityType entityType) { switch (entityType) { case DEVICE: case ASSET: case ENTITY_VIEW: case DASHBOARD: return true; default: return false; } } private boolean isSupportedMsgType(String msgType) { return SessionMsgType.POST_TELEMETRY_REQUEST.name().equals(msgType) || SessionMsgType.POST_ATTRIBUTES_REQUEST.name().equals(msgType) || DataConstants.ATTRIBUTES_UPDATED.equals(msgType) || DataConstants.ATTRIBUTES_DELETED.equals(msgType) || DataConstants.ALARM.equals(msgType); } private ListenableFuture<EdgeId> getEdgeIdByOriginatorId(TbContext ctx, TenantId tenantId, EntityId originatorId) { ListenableFuture<List<EntityRelation>> future = ctx.getRelationService().findByToAndTypeAsync(tenantId, originatorId, EntityRelation.CONTAINS_TYPE, RelationTypeGroup.EDGE); return Futures.transform(future, relations -> { if (relations != null && relations.size() > 0) { return new EdgeId(relations.get(0).getFrom().getId()); } else { return null; } }, ctx.getDbCallbackExecutor()); } @Override public void destroy() { } }
Refacroting to handle null cases
rule-engine/rule-engine-components/src/main/java/org/thingsboard/rule/engine/edge/TbMsgPushToEdgeNode.java
Refacroting to handle null cases
Java
apache-2.0
bc37713ce691903642d5644a04f772357ceb062c
0
isharac/carbon-apimgt,wso2/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,pubudu538/carbon-apimgt,wso2/carbon-apimgt,fazlan-nazeem/carbon-apimgt,prasa7/carbon-apimgt,isharac/carbon-apimgt,isharac/carbon-apimgt,chamindias/carbon-apimgt,chamindias/carbon-apimgt,isharac/carbon-apimgt,harsha89/carbon-apimgt,tharindu1st/carbon-apimgt,tharikaGitHub/carbon-apimgt,nuwand/carbon-apimgt,wso2/carbon-apimgt,chamilaadhi/carbon-apimgt,fazlan-nazeem/carbon-apimgt,Rajith90/carbon-apimgt,tharikaGitHub/carbon-apimgt,Rajith90/carbon-apimgt,chamindias/carbon-apimgt,fazlan-nazeem/carbon-apimgt,jaadds/carbon-apimgt,Rajith90/carbon-apimgt,harsha89/carbon-apimgt,tharindu1st/carbon-apimgt,malinthaprasan/carbon-apimgt,fazlan-nazeem/carbon-apimgt,praminda/carbon-apimgt,bhathiya/carbon-apimgt,nuwand/carbon-apimgt,chamindias/carbon-apimgt,tharikaGitHub/carbon-apimgt,chamilaadhi/carbon-apimgt,praminda/carbon-apimgt,pubudu538/carbon-apimgt,malinthaprasan/carbon-apimgt,nuwand/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,prasa7/carbon-apimgt,prasa7/carbon-apimgt,jaadds/carbon-apimgt,ruks/carbon-apimgt,malinthaprasan/carbon-apimgt,jaadds/carbon-apimgt,ruks/carbon-apimgt,bhathiya/carbon-apimgt,tharindu1st/carbon-apimgt,ruks/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,chamilaadhi/carbon-apimgt,malinthaprasan/carbon-apimgt,uvindra/carbon-apimgt,pubudu538/carbon-apimgt,chamilaadhi/carbon-apimgt,tharindu1st/carbon-apimgt,harsha89/carbon-apimgt,uvindra/carbon-apimgt,tharikaGitHub/carbon-apimgt,praminda/carbon-apimgt,uvindra/carbon-apimgt,pubudu538/carbon-apimgt,bhathiya/carbon-apimgt,prasa7/carbon-apimgt,jaadds/carbon-apimgt,bhathiya/carbon-apimgt,ruks/carbon-apimgt,harsha89/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,wso2/carbon-apimgt,uvindra/carbon-apimgt,nuwand/carbon-apimgt,Rajith90/carbon-apimgt
/* * Copyright WSO2 Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.carbon.apimgt.impl; import org.apache.axiom.om.OMElement; import org.apache.axiom.om.util.AXIOMUtil; import org.apache.axis2.AxisFault; import org.apache.commons.lang3.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.wso2.carbon.apimgt.api.APIManagementException; import org.wso2.carbon.apimgt.api.dto.ClientCertificateDTO; import org.wso2.carbon.apimgt.api.model.API; import org.wso2.carbon.apimgt.api.model.APIIdentifier; import org.wso2.carbon.apimgt.api.model.APIProduct; import org.wso2.carbon.apimgt.api.model.APIProductIdentifier; import org.wso2.carbon.apimgt.api.model.APIProductResource; import org.wso2.carbon.apimgt.gateway.dto.stub.APIData; import org.wso2.carbon.apimgt.gateway.dto.stub.ResourceData; import org.wso2.carbon.apimgt.impl.certificatemgt.CertificateManagerImpl; import org.wso2.carbon.apimgt.impl.certificatemgt.exceptions.CertificateManagementException; import org.wso2.carbon.apimgt.impl.dao.CertificateMgtDAO; import org.wso2.carbon.apimgt.impl.dto.Environment; import org.wso2.carbon.apimgt.impl.internal.ServiceReferenceHolder; import org.wso2.carbon.apimgt.impl.template.APITemplateBuilder; import org.wso2.carbon.apimgt.impl.utils.APIGatewayAdminClient; import org.wso2.carbon.apimgt.impl.utils.APIUtil; import org.wso2.carbon.context.PrivilegedCarbonContext; import org.wso2.carbon.governance.api.exception.GovernanceException; import org.wso2.carbon.governance.api.generic.dataobjects.GenericArtifact; import org.wso2.carbon.utils.multitenancy.MultitenantConstants; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import javax.xml.namespace.QName; import javax.xml.stream.XMLStreamException; public class APIGatewayManager { private static final Log log = LogFactory.getLog(APIGatewayManager.class); private static APIGatewayManager instance; private Map<String, Environment> environments; private boolean debugEnabled = log.isDebugEnabled(); private final String ENDPOINT_PRODUCTION = "_PRODUCTION_"; private final String ENDPOINT_SANDBOX = "_SANDBOX_"; private static final String PRODUCT_PREFIX = "prod"; private static final String PRODUCT_VERSION = "1.0.0"; private APIGatewayManager() { APIManagerConfiguration config = ServiceReferenceHolder.getInstance() .getAPIManagerConfigurationService() .getAPIManagerConfiguration(); environments = config.getApiGatewayEnvironments(); } public synchronized static APIGatewayManager getInstance() { if (instance == null) { instance = new APIGatewayManager(); } return instance; } /** * Publishes an API to all configured Gateways. * * @param api * - The API to be published * @param builder * - The template builder * @param tenantDomain * - Tenant Domain of the publisher */ public Map<String, String> publishToGateway(API api, APITemplateBuilder builder, String tenantDomain) { Map<String, String> failedEnvironmentsMap = new HashMap<String, String>(0); if (api.getEnvironments() == null) { return failedEnvironmentsMap; } long startTime; long endTime; if (debugEnabled) { log.debug("API to be published: " + api.getId()); log.debug("Number of environments to be published to: " + api.getEnvironments().size()); } for (String environmentName : api.getEnvironments()) { long startTimePublishToGateway = System.currentTimeMillis(); Environment environment = environments.get(environmentName); //If the environment is removed from the configuration, continue without publishing if (environment == null) { continue; } APIGatewayAdminClient client; try { client = new APIGatewayAdminClient(environment); String operation; long apiGetStartTime = System.currentTimeMillis(); APIData apiData = client.getApi(tenantDomain, api.getId()); endTime = System.currentTimeMillis(); if (debugEnabled) { log.debug("Time taken to fetch API Data: " + (endTime - apiGetStartTime) / 1000 + " seconds"); } // If the API exists in the Gateway if (apiData != null) { startTime = System.currentTimeMillis(); // If the Gateway type is 'production' and the production url // has been removed // Or if the Gateway type is 'sandbox' and the sandbox url has // been removed. if ((APIConstants.GATEWAY_ENV_TYPE_PRODUCTION.equals(environment.getType()) && !APIUtil.isProductionEndpointsExists(api.getEndpointConfig())) || (APIConstants.GATEWAY_ENV_TYPE_SANDBOX.equals(environment.getType()) && !APIUtil.isSandboxEndpointsExists(api.getEndpointConfig()))) { if (debugEnabled) { log.debug("Removing API " + api.getId().getApiName() + " from Environment " + environment.getName() + " since its relevant URL has been removed."); } client.deleteApi(tenantDomain, api.getId()); if (api.isPublishedDefaultVersion()) { if (client.getDefaultApi(tenantDomain, api.getId()) != null) { client.deleteDefaultApi(tenantDomain, api.getId()); } } setSecureVaultProperty(client, api, tenantDomain, environment); undeployCustomSequences(client, api, tenantDomain, environment); unDeployClientCertificates(client, api, tenantDomain); } else { if (debugEnabled) { log.debug("API exists, updating existing API " + api.getId().getApiName() + " in environment " + environment.getName()); } //Deploy the fault sequence first since it has to be available by the time the API is deployed. deployAPIFaultSequence(client, api, tenantDomain, environment); operation = "update"; //Update the API if (api.getImplementation().equalsIgnoreCase(APIConstants.IMPLEMENTATION_TYPE_INLINE)) { client.updateApiForInlineScript(builder, tenantDomain, api.getId()); } else if (api.getImplementation().equalsIgnoreCase(APIConstants.IMPLEMENTATION_TYPE_ENDPOINT)) { client.updateApi(builder, tenantDomain, api.getId()); client.saveEndpoint(api, builder, tenantDomain); } if (api.isDefaultVersion() || api.isPublishedDefaultVersion()) {//api.isPublishedDefaultVersion() check is used to detect and update when context etc. is changed in the api which is not the default version but has a published default api if (client.getDefaultApi(tenantDomain, api.getId()) != null) { client.updateDefaultApi(builder, tenantDomain, api.getId().getVersion(), api.getId()); } else { client.addDefaultAPI(builder, tenantDomain, api.getId().getVersion(), api.getId()); } } setSecureVaultProperty(client, api, tenantDomain, environment); long customSeqStartTime = System.currentTimeMillis(); //Update the custom sequences of the API updateCustomSequences(client, api, tenantDomain, environment); endTime = System.currentTimeMillis(); if (debugEnabled) { log.debug("Time taken to deploy custom Sequences: " + (endTime - customSeqStartTime) / 1000 + " seconds"); } updateClientCertificates(client, api, tenantDomain); } endTime = System.currentTimeMillis(); if (debugEnabled) { log.debug("Publishing API (if the API exists in the Gateway) took " + (endTime - startTime) / 1000 + " seconds"); } } else { // If the Gateway type is 'production' and a production url has // not been specified // Or if the Gateway type is 'sandbox' and a sandbox url has not // been specified startTime = System.currentTimeMillis(); if ((APIConstants.GATEWAY_ENV_TYPE_PRODUCTION.equals(environment.getType()) && !APIUtil.isProductionEndpointsExists(api.getEndpointConfig())) || (APIConstants.GATEWAY_ENV_TYPE_SANDBOX.equals(environment.getType()) && !APIUtil.isSandboxEndpointsExists(api.getEndpointConfig()))) { if (debugEnabled) { log.debug("Not adding API to environment " + environment.getName() + " since its endpoint URL " + "cannot be found"); } } else { if (debugEnabled) { log.debug("API does not exist, adding new API " + api.getId().getApiName() + " in environment " + environment.getName()); } //Deploy the fault sequence first since it has to be available by the time the API is deployed. deployAPIFaultSequence(client, api, tenantDomain, environment); deployClientCertificates(client, api, tenantDomain); if (!APIConstants.APITransportType.WS.toString().equals(api.getType())) { //Add the API if (APIConstants.IMPLEMENTATION_TYPE_INLINE.equalsIgnoreCase(api.getImplementation())) { client.addPrototypeApiScriptImpl(builder, tenantDomain, api.getId()); } else if (APIConstants.IMPLEMENTATION_TYPE_ENDPOINT .equalsIgnoreCase(api.getImplementation())) { client.addApi(builder, tenantDomain, api.getId()); client.addEndpoint(api, builder, tenantDomain); } if (api.isDefaultVersion()) { if (client.getDefaultApi(tenantDomain, api.getId()) != null) { client.updateDefaultApi(builder, tenantDomain, api.getId().getVersion(), api.getId()); } else { client.addDefaultAPI(builder, tenantDomain, api.getId().getVersion(), api.getId()); } } setSecureVaultProperty(client, api, tenantDomain, environment); //Deploy the custom sequences of the API. deployCustomSequences(client, api, tenantDomain, environment); } else { deployWebsocketAPI(api, client); } } endTime = System.currentTimeMillis(); if (debugEnabled) { log.debug("Publishing API (if the API does not exist in the Gateway) took " + (endTime - startTime) / 1000 + " seconds"); } } } catch (AxisFault axisFault) { /* didn't throw this exception to handle multiple gateway publishing if gateway is unreachable we collect that environments into map with issue and show on popup in ui therefore this didn't break the gateway publishing if one gateway unreachable */ failedEnvironmentsMap.put(environmentName, axisFault.getMessage()); log.error("Error occurred when publish to gateway " + environmentName, axisFault); } catch (APIManagementException ex) { /* didn't throw this exception to handle multiple gateway publishing if gateway is unreachable we collect that environments into map with issue and show on popup in ui therefore this didn't break the gateway publishing if one gateway unreachable */ log.error("Error occurred deploying sequences on " + environmentName, ex); failedEnvironmentsMap.put(environmentName, ex.getMessage()); } catch (JSONException ex) { /* didn't throw this exception to handle multiple gateway publishing if gateway is unreachable we collect that environments into map with issue and show on popup in ui therefore this didn't break the gateway publishing if one gateway unreachable */ log.error("Error occurred deploying sequences on " + environmentName, ex); failedEnvironmentsMap.put(environmentName, ex.getMessage()); } catch (EndpointAdminException ex) { log.error("Error occurred when endpoint add/update operation" + environmentName, ex); failedEnvironmentsMap.put(environmentName, ex.getMessage()); } catch (CertificateManagementException ex) { log.error("Error occurred while adding/updating client certificate in " + environmentName, ex); failedEnvironmentsMap.put(environmentName, ex.getMessage()); } long endTimePublishToGateway = System.currentTimeMillis(); if (debugEnabled) { log.debug("Publishing to gateway : " + environmentName + " total time taken : " + (endTimePublishToGateway - startTimePublishToGateway) / 1000 + " seconds"); } } updateRemovedClientCertificates(api, tenantDomain); return failedEnvironmentsMap; } /** * Publishes an API Product to all configured Gateways. * * @param apiProduct * - The API Product to be published * @param builder * - The template builder * @param tenantDomain * - Tenant Domain of the publisher */ public Map<String, String> publishToGateway(APIProduct apiProduct, APITemplateBuilder builder, String tenantDomain) { Map<String, String> failedEnvironmentsMap = new HashMap<String, String>(0); if (apiProduct.getEnvironments() == null) { return failedEnvironmentsMap; } long startTime = 0; long startTimePublishToGateway = 0; long apiGetStartTime = 0; APIProductIdentifier apiProductId = apiProduct.getId(); APIIdentifier id = new APIIdentifier(PRODUCT_PREFIX, apiProductId.getName(), PRODUCT_VERSION); if (debugEnabled) { log.debug("API to be published: " + id); log.debug("Number of environments to be published to: " + apiProduct.getEnvironments().size()); } for (String environmentName : apiProduct.getEnvironments()) { if (debugEnabled) { startTimePublishToGateway = System.currentTimeMillis(); } Environment environment = environments.get(environmentName); //If the environment is removed from the configuration, continue without publishing if (environment == null) { continue; } APIGatewayAdminClient client; try { client = new APIGatewayAdminClient(environment); if (debugEnabled) { apiGetStartTime = System.currentTimeMillis(); } APIData apiData = client.getApi(tenantDomain, id); if (debugEnabled) { long endTime = System.currentTimeMillis(); log.debug("Time taken to fetch API Data: " + (endTime - apiGetStartTime) / 1000 + " seconds"); } // If the API exists in the Gateway if (apiData != null) { if (debugEnabled) { startTime = System.currentTimeMillis(); } if (debugEnabled) { log.debug("API exists, updating existing API " + id.getApiName() + " in environment " + environment.getName()); } //Update the API client.updateApi(builder, tenantDomain, id); if (debugEnabled) { long endTime = System.currentTimeMillis(); log.debug("Publishing API (if the API exists in the Gateway) took " + (endTime - startTime) / 1000 + " seconds"); } } else { // If the Gateway type is 'production' and a production url has // not been specified // Or if the Gateway type is 'sandbox' and a sandbox url has not // been specified if (debugEnabled) { startTime = System.currentTimeMillis(); } if (debugEnabled) { log.debug("API does not exist, adding new API " + id.getApiName() + " in environment " + environment.getName()); } //Add the API client.addApi(builder, tenantDomain, id); if (debugEnabled) { long endTime = System.currentTimeMillis(); log.debug("Publishing API (if the API does not exist in the Gateway) took " + (endTime - startTime) / 1000 + " seconds"); } } } catch (AxisFault axisFault) { /* didn't throw this exception to handle multiple gateway publishing if gateway is unreachable we collect that environments into map with issue and show on popup in ui therefore this didn't break the gateway publishing if one gateway unreachable */ failedEnvironmentsMap.put(environmentName, axisFault.getMessage()); log.error("Error occurred when publish to gateway " + environmentName, axisFault); } if (debugEnabled) { long endTimePublishToGateway = System.currentTimeMillis(); log.debug("Publishing to gateway : " + environmentName + " total time taken : " + (endTimePublishToGateway - startTimePublishToGateway) / 1000 + " seconds"); } } return failedEnvironmentsMap; } /** * Removed an API from the configured Gateways * * @param api * - The API to be removed * @param tenantDomain * - Tenant Domain of the publisher */ public Map<String, String> removeFromGateway(API api, String tenantDomain) { Map<String, String> failedEnvironmentsMap = new HashMap<String, String>(0); if (api.getEnvironments() != null) { for (String environmentName : api.getEnvironments()) { try { Environment environment = environments.get(environmentName); //If the environment is removed from the configuration, continue without removing if (environment == null) { continue; } APIGatewayAdminClient client = new APIGatewayAdminClient(environment); unDeployClientCertificates(client, api, tenantDomain); if(!APIConstants.APITransportType.WS.toString().equals(api.getType())) { APIIdentifier id = api.getId(); if (client.getApi(tenantDomain, id) != null) { if (debugEnabled) { log.debug("Removing API " + api.getId().getApiName() + " From environment " + environment.getName()); } if ("INLINE".equals(api.getImplementation()) || "MARKDOWN".equals(api.getImplementation())) { client.deleteApi(tenantDomain, api.getId()); undeployCustomSequences(client, api, tenantDomain, environment); } else { client.deleteEndpoint(api, tenantDomain); client.deleteApi(tenantDomain, api.getId()); undeployCustomSequences(client, api, tenantDomain, environment); } } } else { String fileName = api.getContext().replace('/', '-'); String[] fileNames = new String[2]; fileNames[0] = ENDPOINT_PRODUCTION + fileName; fileNames[1] = ENDPOINT_SANDBOX + fileName; if (client.isExistingSequence(fileNames[0], MultitenantConstants.SUPER_TENANT_DOMAIN_NAME)) { client.deleteSequence(fileNames[0], MultitenantConstants.SUPER_TENANT_DOMAIN_NAME); } if (client.isExistingSequence(fileNames[1], MultitenantConstants.SUPER_TENANT_DOMAIN_NAME)) { client.deleteSequence(fileNames[1], MultitenantConstants.SUPER_TENANT_DOMAIN_NAME); } } if (api.isPublishedDefaultVersion()) { APIIdentifier id = api.getId(); if (client.getDefaultApi(tenantDomain, id) != null) { client.deleteDefaultApi(tenantDomain, api.getId()); } } } catch (AxisFault axisFault) { /* didn't throw this exception to handle multiple gateway publishing if gateway is unreachable we collect that environments into map with issue and show on popup in ui therefore this didn't break the gateway unpublisihing if one gateway unreachable */ log.error("Error occurred when removing from gateway " + environmentName, axisFault); failedEnvironmentsMap.put(environmentName, axisFault.getMessage()); } catch (EndpointAdminException ex) { log.error("Error occurred when deleting endpoint from gateway" + environmentName, ex); failedEnvironmentsMap.put(environmentName, ex.getMessage()); } catch (CertificateManagementException ex) { log.error("Error occurred when deleting certificate from gateway" + environmentName, ex); failedEnvironmentsMap.put(environmentName, ex.getMessage()); } } updateRemovedClientCertificates(api, tenantDomain); } return failedEnvironmentsMap; } /** * add websoocket api to the gateway * * @param api * @param client * @throws APIManagementException */ public void deployWebsocketAPI(API api, APIGatewayAdminClient client) throws APIManagementException, JSONException { try { String production_endpoint = null; String sandbox_endpoint = null; JSONObject obj = new JSONObject(api.getEndpointConfig()); if (obj.has(APIConstants.API_DATA_PRODUCTION_ENDPOINTS)) { production_endpoint = obj.getJSONObject(APIConstants.API_DATA_PRODUCTION_ENDPOINTS).getString("url"); } if (obj.has(APIConstants.API_DATA_SANDBOX_ENDPOINTS)) { sandbox_endpoint = obj.getJSONObject(APIConstants.API_DATA_SANDBOX_ENDPOINTS).getString("url"); } OMElement element; try { if (production_endpoint != null) { String content = createSeqString(api, production_endpoint, ENDPOINT_PRODUCTION); element = AXIOMUtil.stringToOM(content); String fileName = element.getAttributeValue(new QName("name")); if (client.isExistingSequence(fileName, MultitenantConstants.SUPER_TENANT_DOMAIN_NAME)) { client.deleteSequence(fileName, MultitenantConstants.SUPER_TENANT_DOMAIN_NAME); } client.addSequence(element, MultitenantConstants.SUPER_TENANT_DOMAIN_NAME); } if (sandbox_endpoint != null) { String content = createSeqString(api, sandbox_endpoint, ENDPOINT_SANDBOX); element = AXIOMUtil.stringToOM(content); String fileName = element.getAttributeValue(new QName("name")); if (client.isExistingSequence(fileName, MultitenantConstants.SUPER_TENANT_DOMAIN_NAME)) { client.deleteSequence(fileName, MultitenantConstants.SUPER_TENANT_DOMAIN_NAME); } client.addSequence(element, MultitenantConstants.SUPER_TENANT_DOMAIN_NAME); } } catch (AxisFault e) { String msg = "Error while parsing the policy to get the eligibility query: "; log.error(msg, e); throw new APIManagementException(msg); } } catch (XMLStreamException e) { String msg = "Error while parsing the policy to get the eligibility query: "; log.error(msg, e); throw new APIManagementException(msg); } } /** * add new api version at the API Gateway * * @param artifact * @param api */ public void createNewWebsocketApiVersion(GenericArtifact artifact, API api) { try { APIGatewayManager gatewayManager = APIGatewayManager.getInstance(); APIGatewayAdminClient client; Set<String> environments = APIUtil.extractEnvironmentsForAPI( artifact.getAttribute(APIConstants.API_OVERVIEW_ENVIRONMENTS)); api.setEndpointConfig(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_CONFIG)); api.setContext(artifact.getAttribute(APIConstants.API_OVERVIEW_CONTEXT)); for (String environmentName : environments) { Environment environment = this.environments.get(environmentName); client = new APIGatewayAdminClient(environment); try { gatewayManager.deployWebsocketAPI(api, client); } catch (JSONException ex) { /* didn't throw this exception to handle multiple gateway publishing if gateway is unreachable we collect that environments into map with issue and show on popup in ui therefore this didn't break the gateway publishing if one gateway unreachable */ log.error("Error occurred deploying sequences on " + environmentName, ex); } } } catch (APIManagementException ex) { /* didn't throw this exception to handle multiple gateway publishing if gateway is unreachable we collect that environments into map with issue and show on popup in ui therefore this didn't break the gateway unpublisihing if one gateway unreachable */ log.error("Error in deploying to gateway :" + ex.getMessage(), ex); } catch (AxisFault ex) { log.error("Error in deploying to gateway :" + ex.getMessage(), ex); } catch (GovernanceException ex) { log.error("Error in deploying to gateway :" + ex.getMessage(), ex); } } /** * create body of sequence * * @param api * @param url * @return */ public String createSeqString(API api, String url, String urltype) throws JSONException { String context = api.getContext(); context = urltype + context; String[] endpointConfig = websocketEndpointConfig(api, urltype); String timeout = endpointConfig[0]; String suspendOnFailure = endpointConfig[1]; String markForSuspension = endpointConfig[2]; String endpointConf = "<default>\n" + "\t<timeout>\n" + timeout + "\t</timeout>\n" + "\t<suspendOnFailure>\n" + suspendOnFailure + "\n" + "\t</suspendOnFailure>\n" + "\t<markForSuspension>\n" + markForSuspension + "\t</markForSuspension>\n" + "</default>"; String seq = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<sequence xmlns=\"http://ws.apache.org/ns/synapse\" name=\"" + context.replace('/', '-') + "\">\n" + " <property name=\"OUT_ONLY\" value=\"true\"/>\n" + " <script language=\"js\">var sub_path = mc.getProperty(\"websocket.subscriber.path\");\t \n" + " \tvar queryParamString = sub_path.split(\"\\\\?\")[1];\n" + " if(queryParamString != undefined) {\t \n" + "\t\tmc.setProperty('queryparams', \"?\" + queryParamString);\n" + "\t\t}\t\t\n" + " </script>\n" + " <property xmlns:soapenv=\"http://www.w3.org/2003/05/soap-envelope\"\n" + " xmlns:ns=\"http://org.apache.synapse/xsd\"\n" + " xmlns:ns3=\"http://org.apache.synapse/xsd\"\n" + " name=\"queryparams\"\n" + " expression=\"$ctx:queryparams\"/>\n" + " <property name=\"urlVal\" value=\""+ url + "\"/>\n" + " <property xmlns:soapenv=\"http://www.w3.org/2003/05/soap-envelope\"\n" + " xmlns:ns3=\"http://org.apache.synapse/xsd\"\n" + " name=\"fullUrl\"\n" + " expression=\"fn:concat(get-property('urlVal'), get-property('queryparams'))\"\n" + " type=\"STRING\"/>\n" + " <header xmlns:soapenv=\"http://www.w3.org/2003/05/soap-envelope\"\n" + " xmlns:ns3=\"http://org.apache.synapse/xsd\"\n" + " name=\"To\"\n" + " expression=\"$ctx:fullUrl\"/>\n" + " <send>\n" + " <endpoint>\n" + endpointConf + "\n" + " </endpoint>\n" + " </send>\n" + "</sequence>"; return seq; } public Map<String, String> removeDefaultAPIFromGateway(API api, String tenantDomain) { Map<String, String> failedEnvironmentsMap = new HashMap<String, String>(0); if (api.getEnvironments() != null) { for (String environmentName : api.getEnvironments()) { try { Environment environment = environments.get(environmentName); APIGatewayAdminClient client = new APIGatewayAdminClient(environment); APIIdentifier id = api.getId(); if (client.getDefaultApi(tenantDomain, id) != null) { if (debugEnabled) { log.debug("Removing Default API " + api.getId().getApiName() + " From environment " + environment.getName()); } client.deleteDefaultApi(tenantDomain, api.getId()); } } catch (AxisFault axisFault) { /* didn't throw this exception to handle multiple gateway publishing if gateway is unreachable we collect that environments into map with issue and show on popup in ui therefore this didn't break the gateway unpublisihing if one gateway unreachable */ log.error("Error occurred when removing default api from gateway " + environmentName, axisFault); failedEnvironmentsMap.put(environmentName, axisFault.getMessage()); } } } return failedEnvironmentsMap; } /** * Checks whether the API has been published. * * @param api * - The API to be cheked. * @param tenantDomain * - Tenant Domain of the publisher * @return True if the API is available in at least one Gateway. False if * available in none. */ public boolean isAPIPublished(API api, String tenantDomain)throws APIManagementException { for (Environment environment : environments.values()) { try { APIGatewayAdminClient client = new APIGatewayAdminClient(environment); // If the API exists in at least one environment, consider as // published and return true. APIIdentifier id = api.getId(); if (client.getApi(tenantDomain, id) != null) { return true; } } catch (AxisFault axisFault) { /* didn't throw this exception to check api available in all the environments therefore we didn't throw exception to avoid if gateway unreachable affect */ if (!APIConstants.CREATED.equals(api.getStatus())) { log.error("Error occurred when check api is published on gateway" + environment.getName(), axisFault); } } } return false; } /** * Get the endpoint Security type of the published API * * @param api - The API to be checked. * @param tenantDomain - Tenant Domain of the publisher * @return Endpoint security type; Basic or Digest */ public String getAPIEndpointSecurityType(API api, String tenantDomain) throws APIManagementException { for (Environment environment : environments.values()) { try { APIGatewayAdminClient client = new APIGatewayAdminClient(environment); APIIdentifier id = api.getId(); APIData apiData = client.getApi(tenantDomain, id); if (apiData != null) { ResourceData[] resourceData = apiData.getResources(); for (ResourceData resource : resourceData) { if (resource != null && resource.getInSeqXml() != null && resource.getInSeqXml().contains("DigestAuthMediator")) { return APIConstants.APIEndpointSecurityConstants.DIGEST_AUTH; } } } } catch (AxisFault axisFault) { // didn't throw this exception to check api available in all the environments // therefore we didn't throw exception to avoid if gateway unreachable affect if (!APIConstants.CREATED.equals(api.getStatus())) { log.error("Error occurred when check api endpoint security type on gateway" + environment.getName(), axisFault); } } } return APIConstants.APIEndpointSecurityConstants.BASIC_AUTH; } public void setProductResourceSequences(APIProviderImpl apiProvider, APIProduct apiProduct, String tenantDomain) throws APIManagementException { for (APIProductResource resource : apiProduct.getProductResources()) { APIIdentifier apiIdentifier = resource.getApiIdentifier(); API api = apiProvider.getAPI(apiIdentifier); for (String environmentName : api.getEnvironments()) { Environment environment = environments.get(environmentName); try { APIGatewayAdminClient client = new APIGatewayAdminClient(environment); String inSequenceKey = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_IN_EXT; if (client.isExistingSequence(inSequenceKey, tenantDomain)) { resource.setInSequenceName(inSequenceKey); } String outSequenceKey = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_OUT_EXT; if (client.isExistingSequence(outSequenceKey, tenantDomain)) { resource.setOutSequenceName(outSequenceKey); } String faultSequenceKey = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_FAULT_EXT; if (client.isExistingSequence(faultSequenceKey, tenantDomain)) { resource.setFaultSequenceName(faultSequenceKey); } } catch (AxisFault axisFault) { throw new APIManagementException("Error occurred while checking if product resources " + "have custom sequences", axisFault); } } } } /** * To deploy client certificate in given API environment. * * @param client API GatewayAdminClient . * @param api Relevant API. * @param tenantDomain Tenant domain. * @throws CertificateManagementException Certificate Management Exception. * @throws AxisFault AxisFault. */ private void deployClientCertificates(APIGatewayAdminClient client, API api, String tenantDomain) throws CertificateManagementException, AxisFault { if (!CertificateManagerImpl.getInstance().isClientCertificateBasedAuthenticationConfigured()) { return; } int tenantId = APIUtil.getTenantIdFromTenantDomain(tenantDomain); List<ClientCertificateDTO> clientCertificateDTOList = CertificateMgtDAO.getInstance() .getClientCertificates(tenantId, null, api.getId()); if (clientCertificateDTOList != null) { for (ClientCertificateDTO clientCertificateDTO : clientCertificateDTOList) { client.addClientCertificate(clientCertificateDTO.getCertificate(), clientCertificateDTO.getAlias() + "_" + tenantId); } } } /** * To update client certificate in relevant API gateway environment. * * @param client API Gateway admi client. * @param api Relevant API. * @param tenantDomain Tenant domain. */ private void updateClientCertificates(APIGatewayAdminClient client, API api, String tenantDomain) throws CertificateManagementException, AxisFault { if (!CertificateManagerImpl.getInstance().isClientCertificateBasedAuthenticationConfigured()) { return; } int tenantId = APIUtil.getTenantIdFromTenantDomain(tenantDomain); List<String> aliasList = CertificateMgtDAO.getInstance() .getDeletedClientCertificateAlias(api.getId(), tenantId); for (String alias : aliasList) { client.deleteClientCertificate(alias + "_" + tenantId); } List<ClientCertificateDTO> clientCertificateDTOList = CertificateMgtDAO.getInstance() .getClientCertificates(tenantId, null, api.getId()); if (clientCertificateDTOList != null) { for (ClientCertificateDTO clientCertificateDTO : clientCertificateDTOList) { client.addClientCertificate(clientCertificateDTO.getCertificate(), clientCertificateDTO.getAlias() + "_" + tenantId); } } } /** * To update the database instance with the successfully removed client certificates from teh gateway. * * @param api Relevant API related with teh removed certificate. * @param tenantDomain Tenant domain of the API. */ private void updateRemovedClientCertificates(API api, String tenantDomain) { if (!CertificateManagerImpl.getInstance().isClientCertificateBasedAuthenticationConfigured()) { return; } try { CertificateMgtDAO.getInstance().updateRemovedCertificatesFromGateways(api.getId(), APIUtil.getTenantIdFromTenantDomain(tenantDomain)); /* The flow does not need to be blocked, as this failure do not related with updating client certificates in gateway, rather updating in database. There is no harm in database having outdated certificate information.*/ } catch (CertificateManagementException e) { log.error("Certificate Management Exception while trying to update the remove certificate from gateways " + "for the api " + api.getId() + " for the tenant domain " + tenantDomain, e); } } /** * To undeploy the client certificates from the gateway environment. * * @param client APIGatewayAdmin Client. * @param api Relevant API particular certificate is related with. * @param tenantDomain Tenant domain of the API. * @throws CertificateManagementException Certificate Management Exception. * @throws AxisFault AxisFault. */ private void unDeployClientCertificates(APIGatewayAdminClient client, API api, String tenantDomain) throws CertificateManagementException, AxisFault { if (!CertificateManagerImpl.getInstance().isClientCertificateBasedAuthenticationConfigured()) { return; } int tenantId = APIUtil.getTenantIdFromTenantDomain(tenantDomain); List<ClientCertificateDTO> clientCertificateDTOList = CertificateMgtDAO.getInstance() .getClientCertificates(tenantId, null, api.getId()); if (clientCertificateDTOList != null) { for (ClientCertificateDTO clientCertificateDTO : clientCertificateDTOList) { client.deleteClientCertificate(clientCertificateDTO.getAlias() + "_" + tenantId); } } List<String> aliasList = CertificateMgtDAO.getInstance() .getDeletedClientCertificateAlias(api.getId(), tenantId); for (String alias : aliasList) { client.deleteClientCertificate(alias + "_" + tenantId); } } /** * Get the specified in/out sequences from api object * * @param api -API object * @param tenantDomain * @param environment * @throws APIManagementException * @throws AxisFault */ private void deployCustomSequences(APIGatewayAdminClient client, API api, String tenantDomain, Environment environment) throws APIManagementException, AxisFault { if (APIUtil.isSequenceDefined(api.getInSequence()) || APIUtil.isSequenceDefined(api.getOutSequence())) { try { PrivilegedCarbonContext.startTenantFlow(); if(tenantDomain != null && !"".equals(tenantDomain)){ PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } else { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true); } int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); if (APIUtil.isSequenceDefined(api.getInSequence())) { deployInSequence(client, api, tenantId, tenantDomain, environment); } if (APIUtil.isSequenceDefined(api.getOutSequence())) { deployOutSequence(client, api, tenantId, tenantDomain, environment); } } catch (Exception e) { String msg = "Error in deploying the sequence to gateway"; log.error(msg, e); throw new APIManagementException(msg); } finally { PrivilegedCarbonContext.endTenantFlow(); } } } private void deployInSequence(APIGatewayAdminClient sequenceAdminServiceClient, API api, int tenantId, String tenantDomain, Environment environment) throws APIManagementException, AxisFault { String inSequenceName = api.getInSequence(); OMElement inSequence = APIUtil.getCustomSequence(inSequenceName, tenantId, "in", api.getId()); if (inSequence != null) { String inSeqExt = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_IN_EXT; if (inSequence.getAttribute(new QName("name")) != null) { inSequence.getAttribute(new QName("name")).setAttributeValue(inSeqExt); } sequenceAdminServiceClient.addSequence(inSequence, tenantDomain); } } private void deployOutSequence(APIGatewayAdminClient client, API api, int tenantId, String tenantDomain, Environment environment) throws APIManagementException, AxisFault { String outSequenceName = api.getOutSequence(); OMElement outSequence = APIUtil.getCustomSequence(outSequenceName, tenantId, "out", api.getId()); if (outSequence != null) { String outSeqExt = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_OUT_EXT; if (outSequence.getAttribute(new QName("name")) != null) { outSequence.getAttribute(new QName("name")).setAttributeValue(outSeqExt); } client.addSequence(outSequence, tenantDomain); } } /** * Undeploy the sequences deployed in synapse * * @param api * @param tenantDomain * @param environment * @throws APIManagementException */ private void undeployCustomSequences(APIGatewayAdminClient client, API api, String tenantDomain, Environment environment) { if (APIUtil.isSequenceDefined(api.getInSequence()) || APIUtil.isSequenceDefined(api.getOutSequence())) { try { PrivilegedCarbonContext.startTenantFlow(); if(tenantDomain != null && !"".equals(tenantDomain)){ PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } else{ PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true); } if (APIUtil.isSequenceDefined(api.getInSequence())) { String inSequence = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_IN_EXT; client.deleteSequence(inSequence, tenantDomain); } if (APIUtil.isSequenceDefined(api.getOutSequence())) { String outSequence = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_OUT_EXT; client.deleteSequence(outSequence, tenantDomain); } if (APIUtil.isSequenceDefined(api.getFaultSequence())) { String faultSequence = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_FAULT_EXT; if(client.isExistingSequence(faultSequence, tenantDomain)) { client.deleteSequence(faultSequence, tenantDomain); } } } catch (Exception e) { String msg = "Error in deleting the sequence from gateway"; log.error(msg, e); } finally { PrivilegedCarbonContext.endTenantFlow(); } } } /** * Update the custom sequences in gateway * @param api * @param tenantDomain * @param environment * @throws APIManagementException */ private void updateCustomSequences(APIGatewayAdminClient client, API api, String tenantDomain, Environment environment) throws APIManagementException { //If sequences have been added, updated or removed. if (APIUtil.isSequenceDefined(api.getInSequence()) || APIUtil.isSequenceDefined(api.getOutSequence()) || APIUtil.isSequenceDefined(api.getOldInSequence()) || APIUtil.isSequenceDefined(api.getOldOutSequence())) { try { PrivilegedCarbonContext.startTenantFlow(); if(tenantDomain != null && !"".equals(tenantDomain)){ PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } else{ PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true); } int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); //If an inSequence has been added, updated or removed. if (APIUtil.isSequenceDefined(api.getInSequence()) || APIUtil.isSequenceDefined(api.getOldInSequence())) { String inSequenceKey = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_IN_EXT; //If sequence already exists if (client.isExistingSequence(inSequenceKey, tenantDomain)) { //Delete existing sequence client.deleteSequence(inSequenceKey, tenantDomain); } //If an inSequence has been added or updated. if(APIUtil.isSequenceDefined(api.getInSequence())){ //Deploy the inSequence deployInSequence(client, api, tenantId, tenantDomain, environment); } } //If an outSequence has been added, updated or removed. if (APIUtil.isSequenceDefined(api.getOutSequence()) || APIUtil.isSequenceDefined(api.getOldOutSequence())) { String outSequence = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_OUT_EXT; //If the outSequence exists. if (client.isExistingSequence(outSequence, tenantDomain)) { //Delete existing outSequence client.deleteSequence(outSequence, tenantDomain); } //If an outSequence has been added or updated. if (APIUtil.isSequenceDefined(api.getOutSequence())){ //Deploy outSequence deployOutSequence(client, api, tenantId, tenantDomain, environment); } } } catch (Exception e) { String msg = "Error in updating the sequence at the Gateway"; log.error(msg, e); throw new APIManagementException(msg, e); } finally { PrivilegedCarbonContext.endTenantFlow(); } } } private void deployAPIFaultSequence(APIGatewayAdminClient client, API api, String tenantDomain, Environment environment) throws APIManagementException { String faultSequenceName = api.getFaultSequence(); String faultSeqExt = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_FAULT_EXT; boolean isTenantFlowStarted = false; try { PrivilegedCarbonContext.startTenantFlow(); isTenantFlowStarted = true; if (!StringUtils.isEmpty(tenantDomain)) { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } else { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true); tenantDomain = MultitenantConstants.SUPER_TENANT_DOMAIN_NAME; } //If a fault sequence has be defined. if (APIUtil.isSequenceDefined(faultSequenceName)) { int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); //If the sequence already exists if (client.isExistingSequence(faultSeqExt, tenantDomain)) { //Delete the sequence. We need to redeploy afterwards since the sequence may have been updated. client.deleteSequence(faultSeqExt, tenantDomain); } //Get the fault sequence xml OMElement faultSequence = APIUtil.getCustomSequence(faultSequenceName, tenantId, APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT, api.getId()); if (faultSequence != null) { if (APIUtil.isPerAPISequence(faultSequenceName, tenantId, api.getId(), APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT)) { if (faultSequence.getAttribute(new QName("name")) != null) { faultSequence.getAttribute(new QName("name")).setAttributeValue(faultSeqExt); } } else { //If the previous sequence was a per API fault sequence delete it if (client.isExistingSequence(faultSequenceName, tenantDomain)) { client.deleteSequence(faultSequenceName, tenantDomain); } } //Deploy the fault sequence client.addSequence(faultSequence, tenantDomain); } } else { if (client.isExistingSequence(faultSeqExt, tenantDomain)) { client.deleteSequence(faultSeqExt, tenantDomain); } } } catch (AxisFault e) { String msg = "Error while updating the fault sequence at the Gateway"; log.error(msg, e); throw new APIManagementException(msg, e); } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } } /** * Store the secured endpoint username password to registry * @param api * @param tenantDomain * @param environment * @throws APIManagementException */ private void setSecureVaultProperty(APIGatewayAdminClient securityAdminClient, API api, String tenantDomain, Environment environment) throws APIManagementException { boolean isSecureVaultEnabled = Boolean.parseBoolean(ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService(). getAPIManagerConfiguration().getFirstProperty(APIConstants.API_SECUREVAULT_ENABLE)); if (api.isEndpointSecured() && isSecureVaultEnabled) { try { securityAdminClient.setSecureVaultProperty(api, tenantDomain); } catch (Exception e) { String msg = "Error in setting secured password."; log.error(msg + ' ' + e.getLocalizedMessage(), e); throw new APIManagementException(msg); } } } /** * Construct the timeout, suspendOnFailure, markForSuspension to add suspend * configuration to the websocket endpoint (Simply assign config values according to the endpoint-template) * * @param api * * @param urlType * - Whether production or sandbox * @return timeout, suspendOnFailure, markForSuspension which will use to construct the endpoint configuration * */ private String[] websocketEndpointConfig(API api, String urlType) throws JSONException { JSONObject obj = new JSONObject(api.getEndpointConfig()); JSONObject endpointObj = null; if (ENDPOINT_PRODUCTION.equalsIgnoreCase(urlType)) { endpointObj = obj.getJSONObject(APIConstants.API_DATA_PRODUCTION_ENDPOINTS).getJSONObject("config"); } else if (ENDPOINT_SANDBOX.equalsIgnoreCase(urlType)) { endpointObj = obj.getJSONObject(APIConstants.API_DATA_SANDBOX_ENDPOINTS).getJSONObject("config"); } String duration = (endpointObj.has("actionDuration")) ? "\t\t<duration>" + endpointObj.get("actionDuration") + "</duration>\n" : ""; String responseAction = (endpointObj.has("actionSelect")) ? "\t\t<responseAction>" + endpointObj.get("actionSelect") + "</responseAction>\n" : ""; String timeout = duration + "\n" + responseAction; String retryErrorCode; String suspendErrorCode ; if (endpointObj.has("suspendDuration")) { //Avoid suspending the endpoint when suspend duration is zero if (Integer.parseInt(endpointObj.get("suspendDuration").toString()) == 0) { String suspendOnFailure = "\t\t<errorCodes>-1</errorCodes>\n" + "\t\t<initialDuration>0</initialDuration>\n" + "\t\t<progressionFactor>1.0</progressionFactor>\n" + "\t\t<maximumDuration>0</maximumDuration>"; String markForSuspension = "\t\t<errorCodes>-1</errorCodes>"; return new String[]{timeout, suspendOnFailure, markForSuspension}; } } if (endpointObj.has("suspendErrorCode")) { //When there are/is multiple/single suspend error codes if (endpointObj.get("suspendErrorCode") instanceof JSONArray) { String suspendCodeList = ""; for (int i = 0; i < endpointObj.getJSONArray("suspendErrorCode").length(); i++) { suspendCodeList = suspendCodeList + endpointObj.getJSONArray("suspendErrorCode").get(i).toString() + ","; } suspendErrorCode = "\t\t<errorCodes>" + suspendCodeList.substring(0, suspendCodeList.length() - 1) + "</errorCodes>"; } else { suspendErrorCode = "\t\t<errorCodes>" + endpointObj.get("suspendErrorCode") + "</errorCodes>"; } } else { suspendErrorCode = ""; } String suspendDuration = (endpointObj.has("suspendDuration")) ? "\t\t<initialDuration>" + endpointObj.get("suspendDuration").toString() + "</initialDuration>" : ""; String suspendMaxDuration = (endpointObj.has("suspendMaxDuration")) ? "\t\t<maximumDuration>" + endpointObj.get("suspendMaxDuration") + "</maximumDuration>" : ""; String factor = (endpointObj.has("factor")) ? "\t\t<progressionFactor>" + endpointObj.get("factor") + "</progressionFactor>" : ""; String suspendOnFailure = suspendErrorCode + "\n" + suspendDuration + "\n" + suspendMaxDuration + "\n" + factor; if (endpointObj.has("retryErroCode")) { //When there are/is multiple/single retry error codes if (endpointObj.get("retryErroCode") instanceof JSONArray) { String retryCodeList = ""; for (int i = 0; i < endpointObj.getJSONArray("retryErroCode").length(); i++) { retryCodeList = retryCodeList + endpointObj.getJSONArray("retryErroCode").get(i).toString() + ","; } retryErrorCode = "\t\t<errorCodes>" + retryCodeList.substring(0, retryCodeList.length() - 1) + "</errorCodes>"; } else { retryErrorCode = "\t\t<errorCodes>" + endpointObj.get("retryErroCode") + "</errorCodes>"; } } else { retryErrorCode = ""; } String retryTimeOut = (endpointObj.has("retryTimeOut")) ? "\t\t<retriesBeforeSuspension>" + endpointObj.get("retryTimeOut") + "</retriesBeforeSuspension>" : ""; String retryDelay = (endpointObj.has("retryDelay")) ? "\t\t<retryDelay>" + endpointObj.get("retryDelay") + "</retryDelay>" : ""; String markForSuspension = retryErrorCode + "\n" + retryTimeOut + "\n" + retryDelay; return new String[]{timeout, suspendOnFailure, markForSuspension}; } }
components/apimgt/org.wso2.carbon.apimgt.impl/src/main/java/org/wso2/carbon/apimgt/impl/APIGatewayManager.java
/* * Copyright WSO2 Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.carbon.apimgt.impl; import org.apache.axiom.om.OMElement; import org.apache.axiom.om.util.AXIOMUtil; import org.apache.axis2.AxisFault; import org.apache.commons.lang3.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.json.JSONException; import org.json.JSONObject; import org.wso2.carbon.apimgt.api.APIManagementException; import org.wso2.carbon.apimgt.api.dto.ClientCertificateDTO; import org.wso2.carbon.apimgt.api.model.API; import org.wso2.carbon.apimgt.api.model.APIIdentifier; import org.wso2.carbon.apimgt.api.model.APIProduct; import org.wso2.carbon.apimgt.api.model.APIProductIdentifier; import org.wso2.carbon.apimgt.api.model.APIProductResource; import org.wso2.carbon.apimgt.gateway.dto.stub.APIData; import org.wso2.carbon.apimgt.gateway.dto.stub.ResourceData; import org.wso2.carbon.apimgt.impl.certificatemgt.CertificateManagerImpl; import org.wso2.carbon.apimgt.impl.certificatemgt.exceptions.CertificateManagementException; import org.wso2.carbon.apimgt.impl.dao.CertificateMgtDAO; import org.wso2.carbon.apimgt.impl.dto.Environment; import org.wso2.carbon.apimgt.impl.internal.ServiceReferenceHolder; import org.wso2.carbon.apimgt.impl.template.APITemplateBuilder; import org.wso2.carbon.apimgt.impl.utils.APIGatewayAdminClient; import org.wso2.carbon.apimgt.impl.utils.APIUtil; import org.wso2.carbon.context.PrivilegedCarbonContext; import org.wso2.carbon.governance.api.exception.GovernanceException; import org.wso2.carbon.governance.api.generic.dataobjects.GenericArtifact; import org.wso2.carbon.utils.multitenancy.MultitenantConstants; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import javax.xml.namespace.QName; import javax.xml.stream.XMLStreamException; public class APIGatewayManager { private static final Log log = LogFactory.getLog(APIGatewayManager.class); private static APIGatewayManager instance; private Map<String, Environment> environments; private boolean debugEnabled = log.isDebugEnabled(); private final String ENDPOINT_PRODUCTION = "_PRODUCTION_"; private final String ENDPOINT_SANDBOX = "_SANDBOX_"; private static final String PRODUCT_PREFIX = "prod"; private static final String PRODUCT_VERSION = "1.0.0"; private APIGatewayManager() { APIManagerConfiguration config = ServiceReferenceHolder.getInstance() .getAPIManagerConfigurationService() .getAPIManagerConfiguration(); environments = config.getApiGatewayEnvironments(); } public synchronized static APIGatewayManager getInstance() { if (instance == null) { instance = new APIGatewayManager(); } return instance; } /** * Publishes an API to all configured Gateways. * * @param api * - The API to be published * @param builder * - The template builder * @param tenantDomain * - Tenant Domain of the publisher */ public Map<String, String> publishToGateway(API api, APITemplateBuilder builder, String tenantDomain) { Map<String, String> failedEnvironmentsMap = new HashMap<String, String>(0); if (api.getEnvironments() == null) { return failedEnvironmentsMap; } long startTime; long endTime; if (debugEnabled) { log.debug("API to be published: " + api.getId()); log.debug("Number of environments to be published to: " + api.getEnvironments().size()); } for (String environmentName : api.getEnvironments()) { long startTimePublishToGateway = System.currentTimeMillis(); Environment environment = environments.get(environmentName); //If the environment is removed from the configuration, continue without publishing if (environment == null) { continue; } APIGatewayAdminClient client; try { client = new APIGatewayAdminClient(environment); String operation; long apiGetStartTime = System.currentTimeMillis(); APIData apiData = client.getApi(tenantDomain, api.getId()); endTime = System.currentTimeMillis(); if (debugEnabled) { log.debug("Time taken to fetch API Data: " + (endTime - apiGetStartTime) / 1000 + " seconds"); } // If the API exists in the Gateway if (apiData != null) { startTime = System.currentTimeMillis(); // If the Gateway type is 'production' and the production url // has been removed // Or if the Gateway type is 'sandbox' and the sandbox url has // been removed. if ((APIConstants.GATEWAY_ENV_TYPE_PRODUCTION.equals(environment.getType()) && !APIUtil.isProductionEndpointsExists(api.getEndpointConfig())) || (APIConstants.GATEWAY_ENV_TYPE_SANDBOX.equals(environment.getType()) && !APIUtil.isSandboxEndpointsExists(api.getEndpointConfig()))) { if (debugEnabled) { log.debug("Removing API " + api.getId().getApiName() + " from Environment " + environment.getName() + " since its relevant URL has been removed."); } client.deleteApi(tenantDomain, api.getId()); if (api.isPublishedDefaultVersion()) { if (client.getDefaultApi(tenantDomain, api.getId()) != null) { client.deleteDefaultApi(tenantDomain, api.getId()); } } setSecureVaultProperty(client, api, tenantDomain, environment); undeployCustomSequences(client, api, tenantDomain, environment); unDeployClientCertificates(client, api, tenantDomain); } else { if (debugEnabled) { log.debug("API exists, updating existing API " + api.getId().getApiName() + " in environment " + environment.getName()); } //Deploy the fault sequence first since it has to be available by the time the API is deployed. deployAPIFaultSequence(client, api, tenantDomain, environment); operation = "update"; //Update the API if (api.getImplementation().equalsIgnoreCase(APIConstants.IMPLEMENTATION_TYPE_INLINE)) { client.updateApiForInlineScript(builder, tenantDomain, api.getId()); } else if (api.getImplementation().equalsIgnoreCase(APIConstants.IMPLEMENTATION_TYPE_ENDPOINT)) { client.updateApi(builder, tenantDomain, api.getId()); client.saveEndpoint(api, builder, tenantDomain); } if (api.isDefaultVersion() || api.isPublishedDefaultVersion()) {//api.isPublishedDefaultVersion() check is used to detect and update when context etc. is changed in the api which is not the default version but has a published default api if (client.getDefaultApi(tenantDomain, api.getId()) != null) { client.updateDefaultApi(builder, tenantDomain, api.getId().getVersion(), api.getId()); } else { client.addDefaultAPI(builder, tenantDomain, api.getId().getVersion(), api.getId()); } } setSecureVaultProperty(client, api, tenantDomain, environment); long customSeqStartTime = System.currentTimeMillis(); //Update the custom sequences of the API updateCustomSequences(client, api, tenantDomain, environment); endTime = System.currentTimeMillis(); if (debugEnabled) { log.debug("Time taken to deploy custom Sequences: " + (endTime - customSeqStartTime) / 1000 + " seconds"); } updateClientCertificates(client, api, tenantDomain); } endTime = System.currentTimeMillis(); if (debugEnabled) { log.debug("Publishing API (if the API exists in the Gateway) took " + (endTime - startTime) / 1000 + " seconds"); } } else { // If the Gateway type is 'production' and a production url has // not been specified // Or if the Gateway type is 'sandbox' and a sandbox url has not // been specified startTime = System.currentTimeMillis(); if ((APIConstants.GATEWAY_ENV_TYPE_PRODUCTION.equals(environment.getType()) && !APIUtil.isProductionEndpointsExists(api.getEndpointConfig())) || (APIConstants.GATEWAY_ENV_TYPE_SANDBOX.equals(environment.getType()) && !APIUtil.isSandboxEndpointsExists(api.getEndpointConfig()))) { if (debugEnabled) { log.debug("Not adding API to environment " + environment.getName() + " since its endpoint URL " + "cannot be found"); } } else { if (debugEnabled) { log.debug("API does not exist, adding new API " + api.getId().getApiName() + " in environment " + environment.getName()); } //Deploy the fault sequence first since it has to be available by the time the API is deployed. deployAPIFaultSequence(client, api, tenantDomain, environment); deployClientCertificates(client, api, tenantDomain); if (!APIConstants.APITransportType.WS.toString().equals(api.getType())) { //Add the API if (APIConstants.IMPLEMENTATION_TYPE_INLINE.equalsIgnoreCase(api.getImplementation())) { client.addPrototypeApiScriptImpl(builder, tenantDomain, api.getId()); } else if (APIConstants.IMPLEMENTATION_TYPE_ENDPOINT .equalsIgnoreCase(api.getImplementation())) { client.addApi(builder, tenantDomain, api.getId()); client.addEndpoint(api, builder, tenantDomain); } if (api.isDefaultVersion()) { if (client.getDefaultApi(tenantDomain, api.getId()) != null) { client.updateDefaultApi(builder, tenantDomain, api.getId().getVersion(), api.getId()); } else { client.addDefaultAPI(builder, tenantDomain, api.getId().getVersion(), api.getId()); } } setSecureVaultProperty(client, api, tenantDomain, environment); //Deploy the custom sequences of the API. deployCustomSequences(client, api, tenantDomain, environment); } else { deployWebsocketAPI(api, client); } } endTime = System.currentTimeMillis(); if (debugEnabled) { log.debug("Publishing API (if the API does not exist in the Gateway) took " + (endTime - startTime) / 1000 + " seconds"); } } } catch (AxisFault axisFault) { /* didn't throw this exception to handle multiple gateway publishing if gateway is unreachable we collect that environments into map with issue and show on popup in ui therefore this didn't break the gateway publishing if one gateway unreachable */ failedEnvironmentsMap.put(environmentName, axisFault.getMessage()); log.error("Error occurred when publish to gateway " + environmentName, axisFault); } catch (APIManagementException ex) { /* didn't throw this exception to handle multiple gateway publishing if gateway is unreachable we collect that environments into map with issue and show on popup in ui therefore this didn't break the gateway publishing if one gateway unreachable */ log.error("Error occurred deploying sequences on " + environmentName, ex); failedEnvironmentsMap.put(environmentName, ex.getMessage()); } catch (JSONException ex) { /* didn't throw this exception to handle multiple gateway publishing if gateway is unreachable we collect that environments into map with issue and show on popup in ui therefore this didn't break the gateway publishing if one gateway unreachable */ log.error("Error occurred deploying sequences on " + environmentName, ex); failedEnvironmentsMap.put(environmentName, ex.getMessage()); } catch (EndpointAdminException ex) { log.error("Error occurred when endpoint add/update operation" + environmentName, ex); failedEnvironmentsMap.put(environmentName, ex.getMessage()); } catch (CertificateManagementException ex) { log.error("Error occurred while adding/updating client certificate in " + environmentName, ex); failedEnvironmentsMap.put(environmentName, ex.getMessage()); } long endTimePublishToGateway = System.currentTimeMillis(); if (debugEnabled) { log.debug("Publishing to gateway : " + environmentName + " total time taken : " + (endTimePublishToGateway - startTimePublishToGateway) / 1000 + " seconds"); } } updateRemovedClientCertificates(api, tenantDomain); return failedEnvironmentsMap; } /** * Publishes an API Product to all configured Gateways. * * @param apiProduct * - The API Product to be published * @param builder * - The template builder * @param tenantDomain * - Tenant Domain of the publisher */ public Map<String, String> publishToGateway(APIProduct apiProduct, APITemplateBuilder builder, String tenantDomain) { Map<String, String> failedEnvironmentsMap = new HashMap<String, String>(0); if (apiProduct.getEnvironments() == null) { return failedEnvironmentsMap; } long startTime = 0; long startTimePublishToGateway = 0; long apiGetStartTime = 0; APIProductIdentifier apiProductId = apiProduct.getId(); APIIdentifier id = new APIIdentifier(PRODUCT_PREFIX, apiProductId.getName(), PRODUCT_VERSION); if (debugEnabled) { log.debug("API to be published: " + id); log.debug("Number of environments to be published to: " + apiProduct.getEnvironments().size()); } for (String environmentName : apiProduct.getEnvironments()) { if (debugEnabled) { startTimePublishToGateway = System.currentTimeMillis(); } Environment environment = environments.get(environmentName); //If the environment is removed from the configuration, continue without publishing if (environment == null) { continue; } APIGatewayAdminClient client; try { client = new APIGatewayAdminClient(environment); if (debugEnabled) { apiGetStartTime = System.currentTimeMillis(); } APIData apiData = client.getApi(tenantDomain, id); if (debugEnabled) { long endTime = System.currentTimeMillis(); log.debug("Time taken to fetch API Data: " + (endTime - apiGetStartTime) / 1000 + " seconds"); } // If the API exists in the Gateway if (apiData != null) { if (debugEnabled) { startTime = System.currentTimeMillis(); } if (debugEnabled) { log.debug("API exists, updating existing API " + id.getApiName() + " in environment " + environment.getName()); } //Update the API client.updateApi(builder, tenantDomain, id); if (debugEnabled) { long endTime = System.currentTimeMillis(); log.debug("Publishing API (if the API exists in the Gateway) took " + (endTime - startTime) / 1000 + " seconds"); } } else { // If the Gateway type is 'production' and a production url has // not been specified // Or if the Gateway type is 'sandbox' and a sandbox url has not // been specified if (debugEnabled) { startTime = System.currentTimeMillis(); } if (debugEnabled) { log.debug("API does not exist, adding new API " + id.getApiName() + " in environment " + environment.getName()); } //Add the API client.addApi(builder, tenantDomain, id); if (debugEnabled) { long endTime = System.currentTimeMillis(); log.debug("Publishing API (if the API does not exist in the Gateway) took " + (endTime - startTime) / 1000 + " seconds"); } } } catch (AxisFault axisFault) { /* didn't throw this exception to handle multiple gateway publishing if gateway is unreachable we collect that environments into map with issue and show on popup in ui therefore this didn't break the gateway publishing if one gateway unreachable */ failedEnvironmentsMap.put(environmentName, axisFault.getMessage()); log.error("Error occurred when publish to gateway " + environmentName, axisFault); } if (debugEnabled) { long endTimePublishToGateway = System.currentTimeMillis(); log.debug("Publishing to gateway : " + environmentName + " total time taken : " + (endTimePublishToGateway - startTimePublishToGateway) / 1000 + " seconds"); } } return failedEnvironmentsMap; } /** * Removed an API from the configured Gateways * * @param api * - The API to be removed * @param tenantDomain * - Tenant Domain of the publisher */ public Map<String, String> removeFromGateway(API api, String tenantDomain) { Map<String, String> failedEnvironmentsMap = new HashMap<String, String>(0); if (api.getEnvironments() != null) { for (String environmentName : api.getEnvironments()) { try { Environment environment = environments.get(environmentName); //If the environment is removed from the configuration, continue without removing if (environment == null) { continue; } APIGatewayAdminClient client = new APIGatewayAdminClient(environment); unDeployClientCertificates(client, api, tenantDomain); if(!APIConstants.APITransportType.WS.toString().equals(api.getType())) { APIIdentifier id = api.getId(); if (client.getApi(tenantDomain, id) != null) { if (debugEnabled) { log.debug("Removing API " + api.getId().getApiName() + " From environment " + environment.getName()); } if ("INLINE".equals(api.getImplementation()) || "MARKDOWN".equals(api.getImplementation())) { client.deleteApi(tenantDomain, api.getId()); undeployCustomSequences(client, api, tenantDomain, environment); } else { client.deleteEndpoint(api, tenantDomain); client.deleteApi(tenantDomain, api.getId()); undeployCustomSequences(client, api, tenantDomain, environment); } } } else { String fileName = api.getContext().replace('/', '-'); String[] fileNames = new String[2]; fileNames[0] = ENDPOINT_PRODUCTION + fileName; fileNames[1] = ENDPOINT_SANDBOX + fileName; if (client.isExistingSequence(fileNames[0], MultitenantConstants.SUPER_TENANT_DOMAIN_NAME)) { client.deleteSequence(fileNames[0], MultitenantConstants.SUPER_TENANT_DOMAIN_NAME); } if (client.isExistingSequence(fileNames[1], MultitenantConstants.SUPER_TENANT_DOMAIN_NAME)) { client.deleteSequence(fileNames[1], MultitenantConstants.SUPER_TENANT_DOMAIN_NAME); } } if (api.isPublishedDefaultVersion()) { APIIdentifier id = api.getId(); if (client.getDefaultApi(tenantDomain, id) != null) { client.deleteDefaultApi(tenantDomain, api.getId()); } } } catch (AxisFault axisFault) { /* didn't throw this exception to handle multiple gateway publishing if gateway is unreachable we collect that environments into map with issue and show on popup in ui therefore this didn't break the gateway unpublisihing if one gateway unreachable */ log.error("Error occurred when removing from gateway " + environmentName, axisFault); failedEnvironmentsMap.put(environmentName, axisFault.getMessage()); } catch (EndpointAdminException ex) { log.error("Error occurred when deleting endpoint from gateway" + environmentName, ex); failedEnvironmentsMap.put(environmentName, ex.getMessage()); } catch (CertificateManagementException ex) { log.error("Error occurred when deleting certificate from gateway" + environmentName, ex); failedEnvironmentsMap.put(environmentName, ex.getMessage()); } } updateRemovedClientCertificates(api, tenantDomain); } return failedEnvironmentsMap; } /** * add websoocket api to the gateway * * @param api * @param client * @throws APIManagementException */ public void deployWebsocketAPI(API api, APIGatewayAdminClient client) throws APIManagementException, JSONException { try { String production_endpoint = null; String sandbox_endpoint = null; JSONObject obj = new JSONObject(api.getEndpointConfig()); if (obj.has(APIConstants.API_DATA_PRODUCTION_ENDPOINTS)) { production_endpoint = obj.getJSONObject(APIConstants.API_DATA_PRODUCTION_ENDPOINTS).getString("url"); } if (obj.has(APIConstants.API_DATA_SANDBOX_ENDPOINTS)) { sandbox_endpoint = obj.getJSONObject(APIConstants.API_DATA_SANDBOX_ENDPOINTS).getString("url"); } OMElement element; try { if (production_endpoint != null) { String content = createSeqString(api, production_endpoint, ENDPOINT_PRODUCTION); element = AXIOMUtil.stringToOM(content); String fileName = element.getAttributeValue(new QName("name")); if (client.isExistingSequence(fileName, MultitenantConstants.SUPER_TENANT_DOMAIN_NAME)) { client.deleteSequence(fileName, MultitenantConstants.SUPER_TENANT_DOMAIN_NAME); } client.addSequence(element, MultitenantConstants.SUPER_TENANT_DOMAIN_NAME); } if (sandbox_endpoint != null) { String content = createSeqString(api, sandbox_endpoint, ENDPOINT_SANDBOX); element = AXIOMUtil.stringToOM(content); String fileName = element.getAttributeValue(new QName("name")); if (client.isExistingSequence(fileName, MultitenantConstants.SUPER_TENANT_DOMAIN_NAME)) { client.deleteSequence(fileName, MultitenantConstants.SUPER_TENANT_DOMAIN_NAME); } client.addSequence(element, MultitenantConstants.SUPER_TENANT_DOMAIN_NAME); } } catch (AxisFault e) { String msg = "Error while parsing the policy to get the eligibility query: "; log.error(msg, e); throw new APIManagementException(msg); } } catch (XMLStreamException e) { String msg = "Error while parsing the policy to get the eligibility query: "; log.error(msg, e); throw new APIManagementException(msg); } } /** * add new api version at the API Gateway * * @param artifact * @param api */ public void createNewWebsocketApiVersion(GenericArtifact artifact, API api) { try { APIGatewayManager gatewayManager = APIGatewayManager.getInstance(); APIGatewayAdminClient client; Set<String> environments = APIUtil.extractEnvironmentsForAPI( artifact.getAttribute(APIConstants.API_OVERVIEW_ENVIRONMENTS)); api.setEndpointConfig(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_CONFIG)); api.setContext(artifact.getAttribute(APIConstants.API_OVERVIEW_CONTEXT)); for (String environmentName : environments) { Environment environment = this.environments.get(environmentName); client = new APIGatewayAdminClient(environment); try { gatewayManager.deployWebsocketAPI(api, client); } catch (JSONException ex) { /* didn't throw this exception to handle multiple gateway publishing if gateway is unreachable we collect that environments into map with issue and show on popup in ui therefore this didn't break the gateway publishing if one gateway unreachable */ log.error("Error occurred deploying sequences on " + environmentName, ex); } } } catch (APIManagementException ex) { /* didn't throw this exception to handle multiple gateway publishing if gateway is unreachable we collect that environments into map with issue and show on popup in ui therefore this didn't break the gateway unpublisihing if one gateway unreachable */ log.error("Error in deploying to gateway :" + ex.getMessage(), ex); } catch (AxisFault ex) { log.error("Error in deploying to gateway :" + ex.getMessage(), ex); } catch (GovernanceException ex) { log.error("Error in deploying to gateway :" + ex.getMessage(), ex); } } /** * create body of sequence * * @param api * @param url * @return */ public String createSeqString(API api, String url, String urltype) { String context = api.getContext(); context = urltype + context; String seq = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<sequence xmlns=\"http://ws.apache.org/ns/synapse\" name=\"" + context.replace('/', '-') + "\">\n" + " <property name=\"OUT_ONLY\" value=\"true\"/>\n" + " <script language=\"js\">var sub_path = mc.getProperty(\"websocket.subscriber.path\");\t \n" + " \tvar queryParamString = sub_path.split(\"\\\\?\")[1];\n" + " if(queryParamString != undefined) {\t \n" + "\t\tmc.setProperty('queryparams', \"?\" + queryParamString);\n" + "\t\t}\t\t\n" + " </script>\n" + " <property xmlns:soapenv=\"http://www.w3.org/2003/05/soap-envelope\"\n" + " xmlns:ns=\"http://org.apache.synapse/xsd\"\n" + " xmlns:ns3=\"http://org.apache.synapse/xsd\"\n" + " name=\"queryparams\"\n" + " expression=\"$ctx:queryparams\"/>\n" + " <property name=\"urlVal\" value=\""+ url + "\"/>\n" + " <property xmlns:soapenv=\"http://www.w3.org/2003/05/soap-envelope\"\n" + " xmlns:ns3=\"http://org.apache.synapse/xsd\"\n" + " name=\"fullUrl\"\n" + " expression=\"fn:concat(get-property('urlVal'), get-property('queryparams'))\"\n" + " type=\"STRING\"/>\n" + " <header xmlns:soapenv=\"http://www.w3.org/2003/05/soap-envelope\"\n" + " xmlns:ns3=\"http://org.apache.synapse/xsd\"\n" + " name=\"To\"\n" + " expression=\"$ctx:fullUrl\"/>\n" + " <send>\n" + " <endpoint>\n" + " <default/>\n" + " </endpoint>\n" + " </send>\n" + "</sequence>"; return seq; } public Map<String, String> removeDefaultAPIFromGateway(API api, String tenantDomain) { Map<String, String> failedEnvironmentsMap = new HashMap<String, String>(0); if (api.getEnvironments() != null) { for (String environmentName : api.getEnvironments()) { try { Environment environment = environments.get(environmentName); APIGatewayAdminClient client = new APIGatewayAdminClient(environment); APIIdentifier id = api.getId(); if (client.getDefaultApi(tenantDomain, id) != null) { if (debugEnabled) { log.debug("Removing Default API " + api.getId().getApiName() + " From environment " + environment.getName()); } client.deleteDefaultApi(tenantDomain, api.getId()); } } catch (AxisFault axisFault) { /* didn't throw this exception to handle multiple gateway publishing if gateway is unreachable we collect that environments into map with issue and show on popup in ui therefore this didn't break the gateway unpublisihing if one gateway unreachable */ log.error("Error occurred when removing default api from gateway " + environmentName, axisFault); failedEnvironmentsMap.put(environmentName, axisFault.getMessage()); } } } return failedEnvironmentsMap; } /** * Checks whether the API has been published. * * @param api * - The API to be cheked. * @param tenantDomain * - Tenant Domain of the publisher * @return True if the API is available in at least one Gateway. False if * available in none. */ public boolean isAPIPublished(API api, String tenantDomain)throws APIManagementException { for (Environment environment : environments.values()) { try { APIGatewayAdminClient client = new APIGatewayAdminClient(environment); // If the API exists in at least one environment, consider as // published and return true. APIIdentifier id = api.getId(); if (client.getApi(tenantDomain, id) != null) { return true; } } catch (AxisFault axisFault) { /* didn't throw this exception to check api available in all the environments therefore we didn't throw exception to avoid if gateway unreachable affect */ if (!APIConstants.CREATED.equals(api.getStatus())) { log.error("Error occurred when check api is published on gateway" + environment.getName(), axisFault); } } } return false; } /** * Get the endpoint Security type of the published API * * @param api - The API to be checked. * @param tenantDomain - Tenant Domain of the publisher * @return Endpoint security type; Basic or Digest */ public String getAPIEndpointSecurityType(API api, String tenantDomain) throws APIManagementException { for (Environment environment : environments.values()) { try { APIGatewayAdminClient client = new APIGatewayAdminClient(environment); APIIdentifier id = api.getId(); APIData apiData = client.getApi(tenantDomain, id); if (apiData != null) { ResourceData[] resourceData = apiData.getResources(); for (ResourceData resource : resourceData) { if (resource != null && resource.getInSeqXml() != null && resource.getInSeqXml().contains("DigestAuthMediator")) { return APIConstants.APIEndpointSecurityConstants.DIGEST_AUTH; } } } } catch (AxisFault axisFault) { // didn't throw this exception to check api available in all the environments // therefore we didn't throw exception to avoid if gateway unreachable affect if (!APIConstants.CREATED.equals(api.getStatus())) { log.error("Error occurred when check api endpoint security type on gateway" + environment.getName(), axisFault); } } } return APIConstants.APIEndpointSecurityConstants.BASIC_AUTH; } public void setProductResourceSequences(APIProviderImpl apiProvider, APIProduct apiProduct, String tenantDomain) throws APIManagementException { for (APIProductResource resource : apiProduct.getProductResources()) { APIIdentifier apiIdentifier = resource.getApiIdentifier(); API api = apiProvider.getAPI(apiIdentifier); for (String environmentName : api.getEnvironments()) { Environment environment = environments.get(environmentName); try { APIGatewayAdminClient client = new APIGatewayAdminClient(environment); String inSequenceKey = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_IN_EXT; if (client.isExistingSequence(inSequenceKey, tenantDomain)) { resource.setInSequenceName(inSequenceKey); } String outSequenceKey = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_OUT_EXT; if (client.isExistingSequence(outSequenceKey, tenantDomain)) { resource.setOutSequenceName(outSequenceKey); } String faultSequenceKey = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_FAULT_EXT; if (client.isExistingSequence(faultSequenceKey, tenantDomain)) { resource.setFaultSequenceName(faultSequenceKey); } } catch (AxisFault axisFault) { throw new APIManagementException("Error occurred while checking if product resources " + "have custom sequences", axisFault); } } } } /** * To deploy client certificate in given API environment. * * @param client API GatewayAdminClient . * @param api Relevant API. * @param tenantDomain Tenant domain. * @throws CertificateManagementException Certificate Management Exception. * @throws AxisFault AxisFault. */ private void deployClientCertificates(APIGatewayAdminClient client, API api, String tenantDomain) throws CertificateManagementException, AxisFault { if (!CertificateManagerImpl.getInstance().isClientCertificateBasedAuthenticationConfigured()) { return; } int tenantId = APIUtil.getTenantIdFromTenantDomain(tenantDomain); List<ClientCertificateDTO> clientCertificateDTOList = CertificateMgtDAO.getInstance() .getClientCertificates(tenantId, null, api.getId()); if (clientCertificateDTOList != null) { for (ClientCertificateDTO clientCertificateDTO : clientCertificateDTOList) { client.addClientCertificate(clientCertificateDTO.getCertificate(), clientCertificateDTO.getAlias() + "_" + tenantId); } } } /** * To update client certificate in relevant API gateway environment. * * @param client API Gateway admi client. * @param api Relevant API. * @param tenantDomain Tenant domain. */ private void updateClientCertificates(APIGatewayAdminClient client, API api, String tenantDomain) throws CertificateManagementException, AxisFault { if (!CertificateManagerImpl.getInstance().isClientCertificateBasedAuthenticationConfigured()) { return; } int tenantId = APIUtil.getTenantIdFromTenantDomain(tenantDomain); List<String> aliasList = CertificateMgtDAO.getInstance() .getDeletedClientCertificateAlias(api.getId(), tenantId); for (String alias : aliasList) { client.deleteClientCertificate(alias + "_" + tenantId); } List<ClientCertificateDTO> clientCertificateDTOList = CertificateMgtDAO.getInstance() .getClientCertificates(tenantId, null, api.getId()); if (clientCertificateDTOList != null) { for (ClientCertificateDTO clientCertificateDTO : clientCertificateDTOList) { client.addClientCertificate(clientCertificateDTO.getCertificate(), clientCertificateDTO.getAlias() + "_" + tenantId); } } } /** * To update the database instance with the successfully removed client certificates from teh gateway. * * @param api Relevant API related with teh removed certificate. * @param tenantDomain Tenant domain of the API. */ private void updateRemovedClientCertificates(API api, String tenantDomain) { if (!CertificateManagerImpl.getInstance().isClientCertificateBasedAuthenticationConfigured()) { return; } try { CertificateMgtDAO.getInstance().updateRemovedCertificatesFromGateways(api.getId(), APIUtil.getTenantIdFromTenantDomain(tenantDomain)); /* The flow does not need to be blocked, as this failure do not related with updating client certificates in gateway, rather updating in database. There is no harm in database having outdated certificate information.*/ } catch (CertificateManagementException e) { log.error("Certificate Management Exception while trying to update the remove certificate from gateways " + "for the api " + api.getId() + " for the tenant domain " + tenantDomain, e); } } /** * To undeploy the client certificates from the gateway environment. * * @param client APIGatewayAdmin Client. * @param api Relevant API particular certificate is related with. * @param tenantDomain Tenant domain of the API. * @throws CertificateManagementException Certificate Management Exception. * @throws AxisFault AxisFault. */ private void unDeployClientCertificates(APIGatewayAdminClient client, API api, String tenantDomain) throws CertificateManagementException, AxisFault { if (!CertificateManagerImpl.getInstance().isClientCertificateBasedAuthenticationConfigured()) { return; } int tenantId = APIUtil.getTenantIdFromTenantDomain(tenantDomain); List<ClientCertificateDTO> clientCertificateDTOList = CertificateMgtDAO.getInstance() .getClientCertificates(tenantId, null, api.getId()); if (clientCertificateDTOList != null) { for (ClientCertificateDTO clientCertificateDTO : clientCertificateDTOList) { client.deleteClientCertificate(clientCertificateDTO.getAlias() + "_" + tenantId); } } List<String> aliasList = CertificateMgtDAO.getInstance() .getDeletedClientCertificateAlias(api.getId(), tenantId); for (String alias : aliasList) { client.deleteClientCertificate(alias + "_" + tenantId); } } /** * Get the specified in/out sequences from api object * * @param api -API object * @param tenantDomain * @param environment * @throws APIManagementException * @throws AxisFault */ private void deployCustomSequences(APIGatewayAdminClient client, API api, String tenantDomain, Environment environment) throws APIManagementException, AxisFault { if (APIUtil.isSequenceDefined(api.getInSequence()) || APIUtil.isSequenceDefined(api.getOutSequence())) { try { PrivilegedCarbonContext.startTenantFlow(); if(tenantDomain != null && !"".equals(tenantDomain)){ PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } else { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true); } int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); if (APIUtil.isSequenceDefined(api.getInSequence())) { deployInSequence(client, api, tenantId, tenantDomain, environment); } if (APIUtil.isSequenceDefined(api.getOutSequence())) { deployOutSequence(client, api, tenantId, tenantDomain, environment); } } catch (Exception e) { String msg = "Error in deploying the sequence to gateway"; log.error(msg, e); throw new APIManagementException(msg); } finally { PrivilegedCarbonContext.endTenantFlow(); } } } private void deployInSequence(APIGatewayAdminClient sequenceAdminServiceClient, API api, int tenantId, String tenantDomain, Environment environment) throws APIManagementException, AxisFault { String inSequenceName = api.getInSequence(); OMElement inSequence = APIUtil.getCustomSequence(inSequenceName, tenantId, "in", api.getId()); if (inSequence != null) { String inSeqExt = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_IN_EXT; if (inSequence.getAttribute(new QName("name")) != null) { inSequence.getAttribute(new QName("name")).setAttributeValue(inSeqExt); } sequenceAdminServiceClient.addSequence(inSequence, tenantDomain); } } private void deployOutSequence(APIGatewayAdminClient client, API api, int tenantId, String tenantDomain, Environment environment) throws APIManagementException, AxisFault { String outSequenceName = api.getOutSequence(); OMElement outSequence = APIUtil.getCustomSequence(outSequenceName, tenantId, "out", api.getId()); if (outSequence != null) { String outSeqExt = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_OUT_EXT; if (outSequence.getAttribute(new QName("name")) != null) { outSequence.getAttribute(new QName("name")).setAttributeValue(outSeqExt); } client.addSequence(outSequence, tenantDomain); } } /** * Undeploy the sequences deployed in synapse * * @param api * @param tenantDomain * @param environment * @throws APIManagementException */ private void undeployCustomSequences(APIGatewayAdminClient client, API api, String tenantDomain, Environment environment) { if (APIUtil.isSequenceDefined(api.getInSequence()) || APIUtil.isSequenceDefined(api.getOutSequence())) { try { PrivilegedCarbonContext.startTenantFlow(); if(tenantDomain != null && !"".equals(tenantDomain)){ PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } else{ PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true); } if (APIUtil.isSequenceDefined(api.getInSequence())) { String inSequence = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_IN_EXT; client.deleteSequence(inSequence, tenantDomain); } if (APIUtil.isSequenceDefined(api.getOutSequence())) { String outSequence = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_OUT_EXT; client.deleteSequence(outSequence, tenantDomain); } if (APIUtil.isSequenceDefined(api.getFaultSequence())) { String faultSequence = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_FAULT_EXT; if(client.isExistingSequence(faultSequence, tenantDomain)) { client.deleteSequence(faultSequence, tenantDomain); } } } catch (Exception e) { String msg = "Error in deleting the sequence from gateway"; log.error(msg, e); } finally { PrivilegedCarbonContext.endTenantFlow(); } } } /** * Update the custom sequences in gateway * @param api * @param tenantDomain * @param environment * @throws APIManagementException */ private void updateCustomSequences(APIGatewayAdminClient client, API api, String tenantDomain, Environment environment) throws APIManagementException { //If sequences have been added, updated or removed. if (APIUtil.isSequenceDefined(api.getInSequence()) || APIUtil.isSequenceDefined(api.getOutSequence()) || APIUtil.isSequenceDefined(api.getOldInSequence()) || APIUtil.isSequenceDefined(api.getOldOutSequence())) { try { PrivilegedCarbonContext.startTenantFlow(); if(tenantDomain != null && !"".equals(tenantDomain)){ PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } else{ PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true); } int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); //If an inSequence has been added, updated or removed. if (APIUtil.isSequenceDefined(api.getInSequence()) || APIUtil.isSequenceDefined(api.getOldInSequence())) { String inSequenceKey = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_IN_EXT; //If sequence already exists if (client.isExistingSequence(inSequenceKey, tenantDomain)) { //Delete existing sequence client.deleteSequence(inSequenceKey, tenantDomain); } //If an inSequence has been added or updated. if(APIUtil.isSequenceDefined(api.getInSequence())){ //Deploy the inSequence deployInSequence(client, api, tenantId, tenantDomain, environment); } } //If an outSequence has been added, updated or removed. if (APIUtil.isSequenceDefined(api.getOutSequence()) || APIUtil.isSequenceDefined(api.getOldOutSequence())) { String outSequence = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_OUT_EXT; //If the outSequence exists. if (client.isExistingSequence(outSequence, tenantDomain)) { //Delete existing outSequence client.deleteSequence(outSequence, tenantDomain); } //If an outSequence has been added or updated. if (APIUtil.isSequenceDefined(api.getOutSequence())){ //Deploy outSequence deployOutSequence(client, api, tenantId, tenantDomain, environment); } } } catch (Exception e) { String msg = "Error in updating the sequence at the Gateway"; log.error(msg, e); throw new APIManagementException(msg, e); } finally { PrivilegedCarbonContext.endTenantFlow(); } } } private void deployAPIFaultSequence(APIGatewayAdminClient client, API api, String tenantDomain, Environment environment) throws APIManagementException { String faultSequenceName = api.getFaultSequence(); String faultSeqExt = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_FAULT_EXT; boolean isTenantFlowStarted = false; try { PrivilegedCarbonContext.startTenantFlow(); isTenantFlowStarted = true; if (!StringUtils.isEmpty(tenantDomain)) { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } else { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true); tenantDomain = MultitenantConstants.SUPER_TENANT_DOMAIN_NAME; } //If a fault sequence has be defined. if (APIUtil.isSequenceDefined(faultSequenceName)) { int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); //If the sequence already exists if (client.isExistingSequence(faultSeqExt, tenantDomain)) { //Delete the sequence. We need to redeploy afterwards since the sequence may have been updated. client.deleteSequence(faultSeqExt, tenantDomain); } //Get the fault sequence xml OMElement faultSequence = APIUtil.getCustomSequence(faultSequenceName, tenantId, APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT, api.getId()); if (faultSequence != null) { if (APIUtil.isPerAPISequence(faultSequenceName, tenantId, api.getId(), APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT)) { if (faultSequence.getAttribute(new QName("name")) != null) { faultSequence.getAttribute(new QName("name")).setAttributeValue(faultSeqExt); } } else { //If the previous sequence was a per API fault sequence delete it if (client.isExistingSequence(faultSequenceName, tenantDomain)) { client.deleteSequence(faultSequenceName, tenantDomain); } } //Deploy the fault sequence client.addSequence(faultSequence, tenantDomain); } } else { if (client.isExistingSequence(faultSeqExt, tenantDomain)) { client.deleteSequence(faultSeqExt, tenantDomain); } } } catch (AxisFault e) { String msg = "Error while updating the fault sequence at the Gateway"; log.error(msg, e); throw new APIManagementException(msg, e); } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } } /** * Store the secured endpoint username password to registry * @param api * @param tenantDomain * @param environment * @throws APIManagementException */ private void setSecureVaultProperty(APIGatewayAdminClient securityAdminClient, API api, String tenantDomain, Environment environment) throws APIManagementException { boolean isSecureVaultEnabled = Boolean.parseBoolean(ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService(). getAPIManagerConfiguration().getFirstProperty(APIConstants.API_SECUREVAULT_ENABLE)); if (api.isEndpointSecured() && isSecureVaultEnabled) { try { securityAdminClient.setSecureVaultProperty(api, tenantDomain); } catch (Exception e) { String msg = "Error in setting secured password."; log.error(msg + ' ' + e.getLocalizedMessage(), e); throw new APIManagementException(msg); } } } }
Fixing Websocket API Endpoint settings do not replicate to synapse artifact
components/apimgt/org.wso2.carbon.apimgt.impl/src/main/java/org/wso2/carbon/apimgt/impl/APIGatewayManager.java
Fixing Websocket API Endpoint settings do not replicate to synapse artifact
Java
apache-2.0
afea3f1a5047eaeda6213c10868b2e001cfe9c4a
0
ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma
/* * The Gemma project. * * Copyright (c) 2006-2007 University of British Columbia * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package ubic.gemma.web.visualization; import java.lang.Exception; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.UUID; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import javax.annotation.PostConstruct; import net.sf.ehcache.Cache; import net.sf.ehcache.CacheManager; import net.sf.ehcache.Element; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import org.springframework.util.StopWatch; import ubic.gemma.analysis.expression.diff.LinearModelAnalyzer; import ubic.gemma.analysis.util.ExperimentalDesignUtils; import ubic.gemma.expression.experiment.service.ExpressionExperimentService; import ubic.gemma.model.analysis.expression.diff.ContrastResult; import ubic.gemma.model.analysis.expression.diff.DiffExprGeneSearchResult; import ubic.gemma.model.analysis.expression.diff.DifferentialExpressionAnalysis; import ubic.gemma.model.analysis.expression.diff.DifferentialExpressionAnalysisResult; import ubic.gemma.model.analysis.expression.diff.DifferentialExpressionAnalysisService; import ubic.gemma.model.analysis.expression.diff.DifferentialExpressionResultService; import ubic.gemma.model.analysis.expression.diff.ExpressionAnalysisResultSet; import ubic.gemma.model.expression.experiment.BioAssaySet; import ubic.gemma.model.expression.experiment.ExperimentalFactor; import ubic.gemma.model.expression.experiment.ExpressionExperiment; import ubic.gemma.model.expression.experiment.ExpressionExperimentSubSet; import ubic.gemma.model.expression.experiment.FactorValue; import ubic.gemma.model.genome.Gene; import ubic.gemma.util.EntityUtils; import ubic.gemma.web.visualization.DifferentialExpressionGenesConditionsValueObject.Condition; /** * TODO Document Me * * @author anton * @version $Id$ */ @Component public class DifferentialExpressionGeneConditionSearchServiceImpl implements DifferentialExpressionGeneConditionSearchService { public static class TaskProgress { private double progressPercent; private String currentStage; private DifferentialExpressionGenesConditionsValueObject taskResult = null; public TaskProgress( String stage, double percent, DifferentialExpressionGenesConditionsValueObject result ) { this.currentStage = stage; this.progressPercent = percent; this.taskResult = result; } public String getCurrentStage() { return this.currentStage; } public double getProgressPercent() { return this.progressPercent; } public DifferentialExpressionGenesConditionsValueObject getTaskResult() { return taskResult; } public void setTaskResult( DifferentialExpressionGenesConditionsValueObject taskResult ) { this.taskResult = taskResult; } } /** * Encapulsates the search for differential expression results, for a set of genes and experiments (which can be * grouped) */ private class DifferentialExpressionSearchTask implements Callable<DifferentialExpressionGenesConditionsValueObject> { /** * Pvalues smaller than this (e.g., 0 are set to this value instead. */ private static final double TINY_PVALUE = 1e-16; private static final double TINY_QVALUE = 1e-10; private List<List<Gene>> geneGroups; private List<Collection<ExpressionExperiment>> experimentGroups; private List<String> geneGroupNames; private List<String> experimentGroupNames; private String taskProgressStage = "Query submitted..."; private double taskProgressPercent = 0.0; private DifferentialExpressionGenesConditionsValueObject taskResult = null; /** * @param geneGroups - the sets of genes to query * @param experimentGroups - the sets of experiments to query * @param geneGroupNames - metadata * @param experimentGroupNames */ public DifferentialExpressionSearchTask( List<List<Gene>> geneGroups, List<Collection<ExpressionExperiment>> experimentGroups, List<String> geneGroupNames, List<String> experimentGroupNames ) { assert !geneGroups.isEmpty() && !geneGroups.get( 0 ).isEmpty(); assert !experimentGroups.isEmpty() && !experimentGroups.get( 0 ).isEmpty(); assert geneGroups.size() == geneGroupNames.size(); assert experimentGroups.size() == experimentGroupNames.size(); this.geneGroups = geneGroups; this.experimentGroups = experimentGroups; this.geneGroupNames = geneGroupNames; this.experimentGroupNames = experimentGroupNames; } /* * Does all the actual work of the query. (non-Javadoc) * * @see java.util.concurrent.Callable#call() */ @Override public DifferentialExpressionGenesConditionsValueObject call() { log.info( "Call..." ); DifferentialExpressionGenesConditionsValueObject searchResult = new DifferentialExpressionGenesConditionsValueObject(); addGenesToSearchResultValueObject( searchResult ); List<ExpressionAnalysisResultSet> resultSets = addConditionsToSearchResultValueObject( searchResult ); fillHeatmapCells( resultSets, getGeneIds( searchResult.getGenes() ), searchResult ); this.taskResult = searchResult; log.info("Finished DiffExpSearchTask."); return searchResult; } /** * @return */ public synchronized TaskProgress getTaskProgress() { // I think this is safe only because String is immutable // and double is copied by value. FIXME ???? return new TaskProgress( this.taskProgressStage, this.taskProgressPercent, this.taskResult ); } /** * Get information on the conditions to be searched. This is not part of the query for the results themselves, * but uses the database to get metadata/summaries about the analyses that will be used. Initializes the * searchResult value object. * * @param searchResult to be initialized * @return lsit of the resultSets that should be queried. */ private List<ExpressionAnalysisResultSet> addConditionsToSearchResultValueObject( DifferentialExpressionGenesConditionsValueObject searchResult ) { StopWatch watch = new StopWatch( "addConditionsToSearchResultValueObject" ); watch.start( "Add conditions to search result value object" ); List<ExpressionAnalysisResultSet> usedResultSets = new LinkedList<ExpressionAnalysisResultSet>(); int experimentGroupIndex = 0; int i = 0; for ( Collection<ExpressionExperiment> experimentGroup : experimentGroups ) { String stage = "Loading " + experimentGroupNames.get( experimentGroupIndex ) + " experiments..."; double progress = 0.0; double progressStep = 100.0 / experimentGroup.size(); this.setTaskProgress( stage, progress ); // database hit: important that this be fast. Map<BioAssaySet, Collection<DifferentialExpressionAnalysis>> analyses = differentialExpressionAnalysisService .getAnalyses( experimentGroup ); experiment: for ( BioAssaySet bas : analyses.keySet() ) { if ( !( bas instanceof ExpressionExperiment ) ) { log.warn( "Subsets not supported yet (" + bas + "), skipping" ); continue; } ExpressionExperiment experiment = ( ExpressionExperiment ) bas; Collection<DifferentialExpressionAnalysis> analysesForExperiment = filterAnalyses( analyses .get( experiment ) ); if ( analysesForExperiment.isEmpty() ) { continue; } for ( DifferentialExpressionAnalysis analysis : analysesForExperiment ) { List<ExpressionAnalysisResultSet> resultSets = filterResultSets( analysis.getResultSets() ); usedResultSets.addAll( resultSets ); if ( resultSets.isEmpty() ) { log.info( "No resultSets usable for " + experiment.getShortName() ); } for ( ExpressionAnalysisResultSet resultSet : resultSets ) { // this is taken care of by the filterResultSets assert resultSet.getHitListSizes() != null; assert resultSet.getExperimentalFactors().size() == 1; ExperimentalFactor factor = resultSet.getExperimentalFactors().iterator().next(); Collection<FactorValue> factorValues = filterFactorValues( factor.getFactorValues(), resultSet.getBaselineGroup().getId() ); for ( FactorValue factorValue : factorValues ) { Condition condition = searchResult.new Condition( experiment, analysis, resultSet, factorValue ); condition.setExperimentGroupName( experimentGroupNames.get( experimentGroupIndex ) ); condition.setExperimentGroupIndex( experimentGroupIndex ); /* * SANITY CHECKS these fields should be filled in. If not, we are going to skip the * results. */ if ( condition.getNumberDiffExpressedProbes() == -1 ) { // Sorry, this is too slow and the hitlists should be filled in. // total = differentialExpressionAnalysisService.countProbesMeetingThreshold( // resultSet, 0.5 ); log.warn( bas + ": Error: No hit list sizes for resultSet with ID=" + resultSet.getId() ); continue; } if ( condition.getNumberOfProbesOnArray() == null || condition.getNumberDiffExpressedProbes() == null ) { log.error( bas + ": Error: Null counts for # diff ex probe or # probes on array, Skipping" ); continue experiment; } else if ( condition.getNumberOfProbesOnArray() < condition .getNumberDiffExpressedProbes() ) { log.error( bas + ": Error: More diff expressed probes than probes on array. Skipping." ); continue experiment; } searchResult.addCondition( condition ); i++; } } } progress += progressStep; this.setTaskProgress( stage, progress ); } experimentGroupIndex++; } watch.stop(); if ( watch.getTotalTimeMillis() > 100 ) { // This does not include getting the actual diff ex results. log.info( "Get information on conditions/analyses for " + i + " items: " + watch.getTotalTimeMillis() + "ms" ); } return usedResultSets; } /** * No database calls here, just organization. * * @param searchResult */ private void addGenesToSearchResultValueObject( DifferentialExpressionGenesConditionsValueObject searchResult ) { int geneGroupIndex = 0; for ( List<Gene> geneGroup : geneGroups ) { String geneGroupName = geneGroupNames.get( geneGroupIndex ); String stage = "Loading " + geneGroupName + " genes..."; double progress = 0.0; double progressStep = 100 / geneGroup.size(); this.setTaskProgress( stage, progress ); for ( Gene gene : geneGroup ) { DifferentialExpressionGenesConditionsValueObject.Gene g = searchResult.new Gene( gene.getId(), gene.getOfficialSymbol(), gene.getOfficialName() ); g.setGroupIndex( geneGroupIndex ); g.setGroupName( geneGroupName ); searchResult.addGene( g ); progress += progressStep; this.setTaskProgress( stage, progress ); } geneGroupIndex++; } } /** * Gets all the diff ex results, flattening out the relation with resultset and gene (the objs still have this * information in them) * * @param resultSetToGeneResults * @return */ private Collection<DiffExprGeneSearchResult> aggregateAcrossResultSets( Map<Long, Map<Long, DiffExprGeneSearchResult>> resultSetToGeneResults ) { int i = 0; Collection<DiffExprGeneSearchResult> aggregatedResults = new HashSet<DiffExprGeneSearchResult>(); for ( Entry<Long, Map<Long, DiffExprGeneSearchResult>> resultSetEntry : resultSetToGeneResults.entrySet() ) { Collection<DiffExprGeneSearchResult> values = resultSetEntry.getValue().values(); i += resultSetEntry.getValue().size(); for ( DiffExprGeneSearchResult v : values ) { if ( aggregatedResults.contains( v ) ) { log.warn( "Already have : " + v ); } } aggregatedResults.addAll( values ); } assert i == aggregatedResults.size(); return aggregatedResults; } /** * The actual business of fetching the differential expression results. * * @param resultSets * @param geneIds * @param searchResult holds the results */ private void fillBatchOfHeatmapCells( Map<ExpressionAnalysisResultSet, Collection<Long>> resultSetIdsToArrayDesignsUsed, List<Long> geneIds, DifferentialExpressionGenesConditionsValueObject searchResult ) { StopWatch watch = new StopWatch( "Fill diff ex heatmap cells" ); watch.start( "DB query for diff ex results" ); // Main query for results. Map<Long, Map<Long, DiffExprGeneSearchResult>> resultSetToGeneResults = differentialExpressionResultService .findDifferentialExpressionAnalysisResultIdsInResultSet( resultSetIdsToArrayDesignsUsed, geneIds ); watch.stop(); Map<Long, ExpressionAnalysisResultSet> resultSetMap = EntityUtils.getIdMap( resultSetIdsToArrayDesignsUsed .keySet() ); Collection<DiffExprGeneSearchResult> aggregatedResults = aggregateAcrossResultSets( resultSetToGeneResults ); watch.start( "Processing " + aggregatedResults.size() + " results from DB query" ); Map<Long, DifferentialExpressionAnalysisResult> detailedResults = getDetailsForContrasts( aggregatedResults ); processHits( searchResult, resultSetToGeneResults, resultSetMap, detailedResults ); watch.stop(); log.info( "Diff ex search finished:\n" + watch.prettyPrint() ); } /** * Staging for getting the diff ex results. * * @param resultSets to be searched * @param geneIds to be searched * @param searchResult holds the results */ private void fillHeatmapCells( List<ExpressionAnalysisResultSet> resultSets, List<Long> geneIds, DifferentialExpressionGenesConditionsValueObject searchResult ) { this.setTaskProgress( "Starting analysis ...", this.getTaskProgress().getProgressPercent() + 10 ); Map<ExpressionAnalysisResultSet, Collection<Long>> resultSetIdsToArrayDesignsUsed = new HashMap<ExpressionAnalysisResultSet, Collection<Long>>(); StopWatch timer = new StopWatch(); timer.start(); // DATABASE CALL HERE, but should be quite fast. for ( ExpressionAnalysisResultSet rs : resultSets ) { resultSetIdsToArrayDesignsUsed .put( rs, EntityUtils.getIds( eeService.getArrayDesignsUsed( rs.getAnalysis() .getExperimentAnalyzed() ) ) ); } timer.stop(); if ( timer.getTotalTimeMillis() > 100 ) { log.info( "Fetch array designs used: " + timer.getTotalTimeMillis() + "ms" ); } fillBatchOfHeatmapCells( resultSetIdsToArrayDesignsUsed, geneIds, searchResult ); } /** * If there are multiple analyses, pick the ones that "don't overlap" (see implementation for details, evolving) * * @param collection * @return a collection with either 0 or a small number of non-conflicting analyses. */ private Collection<DifferentialExpressionAnalysis> filterAnalyses( Collection<DifferentialExpressionAnalysis> analyses ) { // easy case. if ( analyses.size() == 1 ) return analyses; Collection<DifferentialExpressionAnalysis> filtered = new HashSet<DifferentialExpressionAnalysis>(); ExperimentalFactor subsetFactor = null; Map<DifferentialExpressionAnalysis, Collection<ExperimentalFactor>> analysisFactorsUsed = new HashMap<DifferentialExpressionAnalysis, Collection<ExperimentalFactor>>(); for ( DifferentialExpressionAnalysis analysis : analyses ) { // take the first subsetted analysis we see. if ( analysis.getExperimentAnalyzed() instanceof ExpressionExperimentSubSet ) { differentialExpressionAnalysisService.thaw( analysis ); // NOTE necessary, but possibly slows things // down if ( subsetFactor != null && subsetFactor.equals( analysis.getSubsetFactorValue().getExperimentalFactor() ) ) { filtered.add( analysis ); } else { filtered.add( analysis ); } subsetFactor = analysis.getSubsetFactorValue().getExperimentalFactor(); } else { List<ExpressionAnalysisResultSet> resultSets = filterResultSets( analysis.getResultSets() ); Collection<ExperimentalFactor> factorsUsed = new HashSet<ExperimentalFactor>(); for ( ExpressionAnalysisResultSet rs : resultSets ) { if ( isBatch( rs ) ) continue; Collection<ExperimentalFactor> facts = rs.getExperimentalFactors(); for ( ExperimentalFactor f : facts ) { if ( ExperimentalDesignUtils.isBatch( f ) ) continue; factorsUsed.add( f ); } } if ( factorsUsed.isEmpty() ) continue; analysisFactorsUsed.put( analysis, factorsUsed ); } } /* * If we got a subset analysis, just use it. */ if ( !filtered.isEmpty() ) { log.info( "Using subsetted analyses for " + analyses.iterator().next().getExperimentAnalyzed() ); return filtered; } if ( analysisFactorsUsed.isEmpty() ) { log.info( "No analyses were usable for " + analyses.iterator().next().getExperimentAnalyzed() ); return filtered; } /* * Look for the analysis that has the most factors. We might change this to pick more than one if they use * different factors, but this would be pretty rare. */ assert !analysisFactorsUsed.isEmpty(); DifferentialExpressionAnalysis best = null; for ( DifferentialExpressionAnalysis candidate : analysisFactorsUsed.keySet() ) { if ( best == null || analysisFactorsUsed.get( best ).size() < analysisFactorsUsed.get( candidate ).size() ) { best = candidate; } } return filtered; } /** * @param factorValues * @param baselineFactorValueId * @return */ private List<FactorValue> filterFactorValues( Collection<FactorValue> factorValues, long baselineFactorValueId ) { List<FactorValue> filteredFactorValues = new LinkedList<FactorValue>(); for ( FactorValue factorValue : factorValues ) { if ( factorValue.getId().equals( baselineFactorValueId ) ) continue; // Skip baseline filteredFactorValues.add( factorValue ); } return filteredFactorValues; } /** * Remove resultSets that are not usable for one reason or another (e.g., intearctions, batch effects) * * @param resultSets * @return */ private List<ExpressionAnalysisResultSet> filterResultSets( Collection<ExpressionAnalysisResultSet> resultSets ) { List<ExpressionAnalysisResultSet> filteredResultSets = new LinkedList<ExpressionAnalysisResultSet>(); for ( ExpressionAnalysisResultSet resultSet : resultSets ) { // Skip interactions. if ( resultSet.getExperimentalFactors().size() != 1 ) continue; // Skip batch effect ones. if ( isBatch( resultSet ) ) continue; // Skip if baseline is not specified. if ( resultSet.getBaselineGroup() == null ) { log.error( "Possible Data Issue: resultSet.getBaselineGroup() returned null for result set with ID=" + resultSet.getId() ); continue; } // must have hitlists populated if ( resultSet.getHitListSizes() == null ) { log.warn( "Possible data issue: resultSet.getHitListSizes() returned null for result set with ID=" + resultSet.getId() ); continue; } filteredResultSets.add( resultSet ); } return filteredResultSets; } /** * Retrieve the details (contrasts) for results which meet the criterion. (PVALUE_CONTRAST_SELECT_THRESHOLD) * * @param geneToProbeResult * @return */ private Map<Long, DifferentialExpressionAnalysisResult> getDetailsForContrasts( Collection<DiffExprGeneSearchResult> diffExResults ) { StopWatch timer = new StopWatch(); timer.start(); List<Long> resultsWithContrasts = new ArrayList<Long>(); for ( DiffExprGeneSearchResult r : diffExResults ) { if ( r.getResultId() == null ) { // it is a dummy result. It means there is no result for this gene in this resultset. continue; } // Here I am trying to avoid fetching them when there is no hope. if ( r.getCorrectedPvalue() == null || r.getCorrectedPvalue() > LinearModelAnalyzer.PVALUE_CONTRAST_SELECT_THRESHOLD ) { // Then it won't have contrasts; no need to fetch. continue; } resultsWithContrasts.add( r.getResultId() ); } Map<Long, DifferentialExpressionAnalysisResult> detailedResults = new HashMap<Long, DifferentialExpressionAnalysisResult>(); if ( !resultsWithContrasts.isEmpty() ) { detailedResults = EntityUtils .getIdMap( differentialExpressionResultService.load( resultsWithContrasts ) ); } timer.stop(); if ( timer.getTotalTimeMillis() > 1 ) { log.info( "Fetch contrasts for " + resultsWithContrasts.size() + " results: " + timer.getTotalTimeMillis() + "ms" ); } return detailedResults; } /** * @param g * @return */ private List<Long> getGeneIds( Collection<DifferentialExpressionGenesConditionsValueObject.Gene> g ) { List<Long> ids = new LinkedList<Long>(); for ( DifferentialExpressionGenesConditionsValueObject.Gene gene : g ) { ids.add( gene.getId() ); } return ids; } /** * @param resultSet * @return */ private boolean isBatch( ExpressionAnalysisResultSet resultSet ) { for ( ExperimentalFactor factor : resultSet.getExperimentalFactors() ) { if ( ExperimentalDesignUtils.isBatch( factor ) ) { return true; } } return false; } /** * @param resultSet * @param geneId * @param searchResult * @param correctedPvalue should not be null. * @param pValue should not be null. * @param numProbes * @param numProbesDiffExpressed */ private void markCellsBlack( ExpressionAnalysisResultSet resultSet, Long geneId, DifferentialExpressionGenesConditionsValueObject searchResult, Double correctedPvalue, Double pValue, int numProbes, int numProbesDiffExpressed ) { /* * Note that if the resultSet has more than one experimental factor, it is an interaction term. */ assert resultSet.getExperimentalFactors().size() == 1 : "Should not have been passed an interaction term"; ExperimentalFactor experimentalFactor = resultSet.getExperimentalFactors().iterator().next(); Collection<FactorValue> factorValues = experimentalFactor.getFactorValues(); for ( FactorValue factorValue : factorValues ) { String conditionId = DifferentialExpressionGenesConditionsValueObject.constructConditionId( resultSet.getId(), factorValue.getId() ); searchResult.addBlackCell( geneId, conditionId, correctedPvalue, pValue, numProbes, numProbesDiffExpressed ); } } /** * @param searchResult * @param resultSetToGeneResults * @param resultSetMap * @param detailedResults */ private void processHits( DifferentialExpressionGenesConditionsValueObject searchResult, Map<Long, Map<Long, DiffExprGeneSearchResult>> resultSetToGeneResults, Map<Long, ExpressionAnalysisResultSet> resultSetMap, Map<Long, DifferentialExpressionAnalysisResult> detailedResults ) { int i = 0; for ( Entry<Long, Map<Long, DiffExprGeneSearchResult>> resultSetEntry : resultSetToGeneResults.entrySet() ) { Map<Long, DiffExprGeneSearchResult> geneToProbeResult = resultSetEntry.getValue(); ExpressionAnalysisResultSet resultSet = resultSetMap.get( resultSetEntry.getKey() ); assert resultSet != null; processHitsForResultSet( searchResult, detailedResults, geneToProbeResult, resultSet ); if ( ++i % 2000 == 0 ) { log.info( "Processed " + i + "/" + resultSetToGeneResults.size() + " hits." ); } } } /** * @param searchResult * @param detailedResults * @param geneToProbeResult * @param resultSet */ private void processHitsForResultSet( DifferentialExpressionGenesConditionsValueObject searchResult, Map<Long, DifferentialExpressionAnalysisResult> detailedResults, Map<Long, DiffExprGeneSearchResult> geneToProbeResult, ExpressionAnalysisResultSet resultSet ) { // No database calls. log.info("Start processing hits for result sets."); try { for ( Long geneId : geneToProbeResult.keySet() ) { DiffExprGeneSearchResult diffExprGeneSearchResult = geneToProbeResult.get( geneId ); Double correctedPvalue = diffExprGeneSearchResult.getCorrectedPvalue(); Double uncorrectedPvalue = diffExprGeneSearchResult.getPvalue(); // this means we got a 'dummy' value, indicating missing data. if ( correctedPvalue == null ) continue; assert uncorrectedPvalue != null; // arbitrary fixing (meant to deal with zeros). Remember these are usually FDRs. if ( correctedPvalue < TINY_QVALUE ) { correctedPvalue = TINY_QVALUE; } if ( uncorrectedPvalue < TINY_PVALUE ) { uncorrectedPvalue = TINY_PVALUE; } int numberOfProbes = diffExprGeneSearchResult.getNumberOfProbes(); int numberOfProbesDiffExpressed = diffExprGeneSearchResult.getNumberOfProbesDiffExpressed(); markCellsBlack( resultSet, geneId, searchResult, correctedPvalue, uncorrectedPvalue, numberOfProbes, numberOfProbesDiffExpressed ); Long probeResultId = diffExprGeneSearchResult.getResultId(); if ( !detailedResults.containsKey( probeResultId ) ) { continue; } DifferentialExpressionAnalysisResult deaResult = detailedResults.get( probeResultId ); for ( ContrastResult cr : deaResult.getContrasts() ) { FactorValue factorValue = cr.getFactorValue(); assert factorValue != null : "Null factor value for contrast with id=" + cr.getId(); String conditionId = DifferentialExpressionGenesConditionsValueObject.constructConditionId( resultSet.getId(), factorValue.getId() ); searchResult.addCell( geneId, conditionId, correctedPvalue, cr.getLogFoldChange(), numberOfProbes, numberOfProbesDiffExpressed, uncorrectedPvalue ); } } } catch (Exception e) { log.error(e.getMessage()); throw e; } log.info("Done processing hits for result sets."); } /** * @param stage * @param percent */ private synchronized void setTaskProgress( String stage, double percent ) { this.taskProgressStage = stage; this.taskProgressPercent = percent; } } protected static Log log = LogFactory.getLog( DifferentialExpressionGeneConditionSearchServiceImpl.class.getName() ); @Autowired private DifferentialExpressionResultService differentialExpressionResultService; @Autowired private DifferentialExpressionAnalysisService differentialExpressionAnalysisService; @Autowired private ExpressionExperimentService eeService; @Autowired private CacheManager cacheManager; private Cache diffExpSearchTasksCache; /* * (non-Javadoc) * * @see * ubic.gemma.web.visualization.DifferentialExpressionGeneConditionSearchService#getDiffExpSearchTaskProgress(java * .lang.String) */ @Override public TaskProgress getDiffExpSearchTaskProgress( String taskId ) { if ( this.diffExpSearchTasksCache.isKeyInCache( taskId ) ) { DifferentialExpressionSearchTask diffExpSearchTask = ( DifferentialExpressionSearchTask ) this.diffExpSearchTasksCache .get( taskId ).getObjectValue(); TaskProgress taskProgress = diffExpSearchTask.getTaskProgress(); DifferentialExpressionGenesConditionsValueObject result = taskProgress.getTaskResult(); if ( result != null ) { this.diffExpSearchTasksCache.remove( taskId ); } return taskProgress; } return new TaskProgress( "Removed", 0.0, null ); } /* * (non-Javadoc) * * @see * ubic.gemma.web.visualization.DifferentialExpressionGeneConditionSearchService#scheduleDiffExpSearchTask(java. * util.List, java.util.List, java.util.List, java.util.List) */ @Override public String scheduleDiffExpSearchTask( List<List<Gene>> genes, List<Collection<ExpressionExperiment>> experiments, List<String> geneGroupNames, List<String> experimentGroupNames ) { log.info( "Got request to schedule search involving " + genes.size() + " gene groups and " + experiments.size() + " experiments groups" ); DifferentialExpressionSearchTask diffExpSearchTask = new DifferentialExpressionSearchTask( genes, experiments, geneGroupNames, experimentGroupNames ); String taskId = UUID.randomUUID().toString(); this.diffExpSearchTasksCache.put( new Element( taskId, diffExpSearchTask ) ); ExecutorService singleThreadExecutor = Executors.newSingleThreadExecutor(); Future<DifferentialExpressionGenesConditionsValueObject> submit = singleThreadExecutor .submit( diffExpSearchTask ); singleThreadExecutor.shutdown(); return taskId; } @PostConstruct protected void init() { this.diffExpSearchTasksCache = new Cache( "DifferentialExpressionVisSearchTasks", 300, false, false, 3600, 3600 ); this.cacheManager.addCache( this.diffExpSearchTasksCache ); } }
gemma-web/src/main/java/ubic/gemma/web/visualization/DifferentialExpressionGeneConditionSearchServiceImpl.java
/* * The Gemma project. * * Copyright (c) 2006-2007 University of British Columbia * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package ubic.gemma.web.visualization; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.UUID; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import javax.annotation.PostConstruct; import net.sf.ehcache.Cache; import net.sf.ehcache.CacheManager; import net.sf.ehcache.Element; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import org.springframework.util.StopWatch; import ubic.gemma.analysis.expression.diff.LinearModelAnalyzer; import ubic.gemma.analysis.util.ExperimentalDesignUtils; import ubic.gemma.expression.experiment.service.ExpressionExperimentService; import ubic.gemma.model.analysis.expression.diff.ContrastResult; import ubic.gemma.model.analysis.expression.diff.DiffExprGeneSearchResult; import ubic.gemma.model.analysis.expression.diff.DifferentialExpressionAnalysis; import ubic.gemma.model.analysis.expression.diff.DifferentialExpressionAnalysisResult; import ubic.gemma.model.analysis.expression.diff.DifferentialExpressionAnalysisService; import ubic.gemma.model.analysis.expression.diff.DifferentialExpressionResultService; import ubic.gemma.model.analysis.expression.diff.ExpressionAnalysisResultSet; import ubic.gemma.model.expression.experiment.BioAssaySet; import ubic.gemma.model.expression.experiment.ExperimentalFactor; import ubic.gemma.model.expression.experiment.ExpressionExperiment; import ubic.gemma.model.expression.experiment.ExpressionExperimentSubSet; import ubic.gemma.model.expression.experiment.FactorValue; import ubic.gemma.model.genome.Gene; import ubic.gemma.util.EntityUtils; import ubic.gemma.web.visualization.DifferentialExpressionGenesConditionsValueObject.Condition; /** * TODO Document Me * * @author anton * @version $Id$ */ @Component public class DifferentialExpressionGeneConditionSearchServiceImpl implements DifferentialExpressionGeneConditionSearchService { public static class TaskProgress { private double progressPercent; private String currentStage; private DifferentialExpressionGenesConditionsValueObject taskResult = null; public TaskProgress( String stage, double percent, DifferentialExpressionGenesConditionsValueObject result ) { this.currentStage = stage; this.progressPercent = percent; this.taskResult = result; } public String getCurrentStage() { return this.currentStage; } public double getProgressPercent() { return this.progressPercent; } public DifferentialExpressionGenesConditionsValueObject getTaskResult() { return taskResult; } public void setTaskResult( DifferentialExpressionGenesConditionsValueObject taskResult ) { this.taskResult = taskResult; } } /** * Encapulsates the search for differential expression results, for a set of genes and experiments (which can be * grouped) */ private class DifferentialExpressionSearchTask implements Callable<DifferentialExpressionGenesConditionsValueObject> { /** * Pvalues smaller than this (e.g., 0 are set to this value instead. */ private static final double TINY_PVALUE = 1e-16; private static final double TINY_QVALUE = 1e-10; private List<List<Gene>> geneGroups; private List<Collection<ExpressionExperiment>> experimentGroups; private List<String> geneGroupNames; private List<String> experimentGroupNames; private String taskProgressStage = "Query submitted..."; private double taskProgressPercent = 0.0; private DifferentialExpressionGenesConditionsValueObject taskResult = null; /** * @param geneGroups - the sets of genes to query * @param experimentGroups - the sets of experiments to query * @param geneGroupNames - metadata * @param experimentGroupNames */ public DifferentialExpressionSearchTask( List<List<Gene>> geneGroups, List<Collection<ExpressionExperiment>> experimentGroups, List<String> geneGroupNames, List<String> experimentGroupNames ) { assert !geneGroups.isEmpty() && !geneGroups.get( 0 ).isEmpty(); assert !experimentGroups.isEmpty() && !experimentGroups.get( 0 ).isEmpty(); assert geneGroups.size() == geneGroupNames.size(); assert experimentGroups.size() == experimentGroupNames.size(); this.geneGroups = geneGroups; this.experimentGroups = experimentGroups; this.geneGroupNames = geneGroupNames; this.experimentGroupNames = experimentGroupNames; } /* * Does all the actual work of the query. (non-Javadoc) * * @see java.util.concurrent.Callable#call() */ @Override public DifferentialExpressionGenesConditionsValueObject call() { log.info( "Call..." ); DifferentialExpressionGenesConditionsValueObject searchResult = new DifferentialExpressionGenesConditionsValueObject(); addGenesToSearchResultValueObject( searchResult ); List<ExpressionAnalysisResultSet> resultSets = addConditionsToSearchResultValueObject( searchResult ); fillHeatmapCells( resultSets, getGeneIds( searchResult.getGenes() ), searchResult ); this.taskResult = searchResult; log.info("Finished DiffExpSearchTask."); return searchResult; } /** * @return */ public synchronized TaskProgress getTaskProgress() { // I think this is safe only because String is immutable // and double is copied by value. FIXME ???? return new TaskProgress( this.taskProgressStage, this.taskProgressPercent, this.taskResult ); } /** * Get information on the conditions to be searched. This is not part of the query for the results themselves, * but uses the database to get metadata/summaries about the analyses that will be used. Initializes the * searchResult value object. * * @param searchResult to be initialized * @return lsit of the resultSets that should be queried. */ private List<ExpressionAnalysisResultSet> addConditionsToSearchResultValueObject( DifferentialExpressionGenesConditionsValueObject searchResult ) { StopWatch watch = new StopWatch( "addConditionsToSearchResultValueObject" ); watch.start( "Add conditions to search result value object" ); List<ExpressionAnalysisResultSet> usedResultSets = new LinkedList<ExpressionAnalysisResultSet>(); int experimentGroupIndex = 0; int i = 0; for ( Collection<ExpressionExperiment> experimentGroup : experimentGroups ) { String stage = "Loading " + experimentGroupNames.get( experimentGroupIndex ) + " experiments..."; double progress = 0.0; double progressStep = 100.0 / experimentGroup.size(); this.setTaskProgress( stage, progress ); // database hit: important that this be fast. Map<BioAssaySet, Collection<DifferentialExpressionAnalysis>> analyses = differentialExpressionAnalysisService .getAnalyses( experimentGroup ); experiment: for ( BioAssaySet bas : analyses.keySet() ) { if ( !( bas instanceof ExpressionExperiment ) ) { log.warn( "Subsets not supported yet (" + bas + "), skipping" ); continue; } ExpressionExperiment experiment = ( ExpressionExperiment ) bas; Collection<DifferentialExpressionAnalysis> analysesForExperiment = filterAnalyses( analyses .get( experiment ) ); if ( analysesForExperiment.isEmpty() ) { continue; } for ( DifferentialExpressionAnalysis analysis : analysesForExperiment ) { List<ExpressionAnalysisResultSet> resultSets = filterResultSets( analysis.getResultSets() ); usedResultSets.addAll( resultSets ); if ( resultSets.isEmpty() ) { log.info( "No resultSets usable for " + experiment.getShortName() ); } for ( ExpressionAnalysisResultSet resultSet : resultSets ) { // this is taken care of by the filterResultSets assert resultSet.getHitListSizes() != null; assert resultSet.getExperimentalFactors().size() == 1; ExperimentalFactor factor = resultSet.getExperimentalFactors().iterator().next(); Collection<FactorValue> factorValues = filterFactorValues( factor.getFactorValues(), resultSet.getBaselineGroup().getId() ); for ( FactorValue factorValue : factorValues ) { Condition condition = searchResult.new Condition( experiment, analysis, resultSet, factorValue ); condition.setExperimentGroupName( experimentGroupNames.get( experimentGroupIndex ) ); condition.setExperimentGroupIndex( experimentGroupIndex ); /* * SANITY CHECKS these fields should be filled in. If not, we are going to skip the * results. */ if ( condition.getNumberDiffExpressedProbes() == -1 ) { // Sorry, this is too slow and the hitlists should be filled in. // total = differentialExpressionAnalysisService.countProbesMeetingThreshold( // resultSet, 0.5 ); log.warn( bas + ": Error: No hit list sizes for resultSet with ID=" + resultSet.getId() ); continue; } if ( condition.getNumberOfProbesOnArray() == null || condition.getNumberDiffExpressedProbes() == null ) { log.error( bas + ": Error: Null counts for # diff ex probe or # probes on array, Skipping" ); continue experiment; } else if ( condition.getNumberOfProbesOnArray() < condition .getNumberDiffExpressedProbes() ) { log.error( bas + ": Error: More diff expressed probes than probes on array. Skipping." ); continue experiment; } searchResult.addCondition( condition ); i++; } } } progress += progressStep; this.setTaskProgress( stage, progress ); } experimentGroupIndex++; } watch.stop(); if ( watch.getTotalTimeMillis() > 100 ) { // This does not include getting the actual diff ex results. log.info( "Get information on conditions/analyses for " + i + " items: " + watch.getTotalTimeMillis() + "ms" ); } return usedResultSets; } /** * No database calls here, just organization. * * @param searchResult */ private void addGenesToSearchResultValueObject( DifferentialExpressionGenesConditionsValueObject searchResult ) { int geneGroupIndex = 0; for ( List<Gene> geneGroup : geneGroups ) { String geneGroupName = geneGroupNames.get( geneGroupIndex ); String stage = "Loading " + geneGroupName + " genes..."; double progress = 0.0; double progressStep = 100 / geneGroup.size(); this.setTaskProgress( stage, progress ); for ( Gene gene : geneGroup ) { DifferentialExpressionGenesConditionsValueObject.Gene g = searchResult.new Gene( gene.getId(), gene.getOfficialSymbol(), gene.getOfficialName() ); g.setGroupIndex( geneGroupIndex ); g.setGroupName( geneGroupName ); searchResult.addGene( g ); progress += progressStep; this.setTaskProgress( stage, progress ); } geneGroupIndex++; } } /** * Gets all the diff ex results, flattening out the relation with resultset and gene (the objs still have this * information in them) * * @param resultSetToGeneResults * @return */ private Collection<DiffExprGeneSearchResult> aggregateAcrossResultSets( Map<Long, Map<Long, DiffExprGeneSearchResult>> resultSetToGeneResults ) { int i = 0; Collection<DiffExprGeneSearchResult> aggregatedResults = new HashSet<DiffExprGeneSearchResult>(); for ( Entry<Long, Map<Long, DiffExprGeneSearchResult>> resultSetEntry : resultSetToGeneResults.entrySet() ) { Collection<DiffExprGeneSearchResult> values = resultSetEntry.getValue().values(); i += resultSetEntry.getValue().size(); for ( DiffExprGeneSearchResult v : values ) { if ( aggregatedResults.contains( v ) ) { log.warn( "Already have : " + v ); } } aggregatedResults.addAll( values ); } assert i == aggregatedResults.size(); return aggregatedResults; } /** * The actual business of fetching the differential expression results. * * @param resultSets * @param geneIds * @param searchResult holds the results */ private void fillBatchOfHeatmapCells( Map<ExpressionAnalysisResultSet, Collection<Long>> resultSetIdsToArrayDesignsUsed, List<Long> geneIds, DifferentialExpressionGenesConditionsValueObject searchResult ) { StopWatch watch = new StopWatch( "Fill diff ex heatmap cells" ); watch.start( "DB query for diff ex results" ); // Main query for results. Map<Long, Map<Long, DiffExprGeneSearchResult>> resultSetToGeneResults = differentialExpressionResultService .findDifferentialExpressionAnalysisResultIdsInResultSet( resultSetIdsToArrayDesignsUsed, geneIds ); watch.stop(); Map<Long, ExpressionAnalysisResultSet> resultSetMap = EntityUtils.getIdMap( resultSetIdsToArrayDesignsUsed .keySet() ); Collection<DiffExprGeneSearchResult> aggregatedResults = aggregateAcrossResultSets( resultSetToGeneResults ); watch.start( "Processing " + aggregatedResults.size() + " results from DB query" ); Map<Long, DifferentialExpressionAnalysisResult> detailedResults = getDetailsForContrasts( aggregatedResults ); processHits( searchResult, resultSetToGeneResults, resultSetMap, detailedResults ); watch.stop(); log.info( "Diff ex search finished:\n" + watch.prettyPrint() ); } /** * Staging for getting the diff ex results. * * @param resultSets to be searched * @param geneIds to be searched * @param searchResult holds the results */ private void fillHeatmapCells( List<ExpressionAnalysisResultSet> resultSets, List<Long> geneIds, DifferentialExpressionGenesConditionsValueObject searchResult ) { this.setTaskProgress( "Starting analysis ...", this.getTaskProgress().getProgressPercent() + 10 ); Map<ExpressionAnalysisResultSet, Collection<Long>> resultSetIdsToArrayDesignsUsed = new HashMap<ExpressionAnalysisResultSet, Collection<Long>>(); StopWatch timer = new StopWatch(); timer.start(); // DATABASE CALL HERE, but should be quite fast. for ( ExpressionAnalysisResultSet rs : resultSets ) { resultSetIdsToArrayDesignsUsed .put( rs, EntityUtils.getIds( eeService.getArrayDesignsUsed( rs.getAnalysis() .getExperimentAnalyzed() ) ) ); } timer.stop(); if ( timer.getTotalTimeMillis() > 100 ) { log.info( "Fetch array designs used: " + timer.getTotalTimeMillis() + "ms" ); } fillBatchOfHeatmapCells( resultSetIdsToArrayDesignsUsed, geneIds, searchResult ); } /** * If there are multiple analyses, pick the ones that "don't overlap" (see implementation for details, evolving) * * @param collection * @return a collection with either 0 or a small number of non-conflicting analyses. */ private Collection<DifferentialExpressionAnalysis> filterAnalyses( Collection<DifferentialExpressionAnalysis> analyses ) { // easy case. if ( analyses.size() == 1 ) return analyses; Collection<DifferentialExpressionAnalysis> filtered = new HashSet<DifferentialExpressionAnalysis>(); ExperimentalFactor subsetFactor = null; Map<DifferentialExpressionAnalysis, Collection<ExperimentalFactor>> analysisFactorsUsed = new HashMap<DifferentialExpressionAnalysis, Collection<ExperimentalFactor>>(); for ( DifferentialExpressionAnalysis analysis : analyses ) { // take the first subsetted analysis we see. if ( analysis.getExperimentAnalyzed() instanceof ExpressionExperimentSubSet ) { differentialExpressionAnalysisService.thaw( analysis ); // NOTE necessary, but possibly slows things // down if ( subsetFactor != null && subsetFactor.equals( analysis.getSubsetFactorValue().getExperimentalFactor() ) ) { filtered.add( analysis ); } else { filtered.add( analysis ); } subsetFactor = analysis.getSubsetFactorValue().getExperimentalFactor(); } else { List<ExpressionAnalysisResultSet> resultSets = filterResultSets( analysis.getResultSets() ); Collection<ExperimentalFactor> factorsUsed = new HashSet<ExperimentalFactor>(); for ( ExpressionAnalysisResultSet rs : resultSets ) { if ( isBatch( rs ) ) continue; Collection<ExperimentalFactor> facts = rs.getExperimentalFactors(); for ( ExperimentalFactor f : facts ) { if ( ExperimentalDesignUtils.isBatch( f ) ) continue; factorsUsed.add( f ); } } if ( factorsUsed.isEmpty() ) continue; analysisFactorsUsed.put( analysis, factorsUsed ); } } /* * If we got a subset analysis, just use it. */ if ( !filtered.isEmpty() ) { log.info( "Using subsetted analyses for " + analyses.iterator().next().getExperimentAnalyzed() ); return filtered; } if ( analysisFactorsUsed.isEmpty() ) { log.info( "No analyses were usable for " + analyses.iterator().next().getExperimentAnalyzed() ); return filtered; } /* * Look for the analysis that has the most factors. We might change this to pick more than one if they use * different factors, but this would be pretty rare. */ assert !analysisFactorsUsed.isEmpty(); DifferentialExpressionAnalysis best = null; for ( DifferentialExpressionAnalysis candidate : analysisFactorsUsed.keySet() ) { if ( best == null || analysisFactorsUsed.get( best ).size() < analysisFactorsUsed.get( candidate ).size() ) { best = candidate; } } return filtered; } /** * @param factorValues * @param baselineFactorValueId * @return */ private List<FactorValue> filterFactorValues( Collection<FactorValue> factorValues, long baselineFactorValueId ) { List<FactorValue> filteredFactorValues = new LinkedList<FactorValue>(); for ( FactorValue factorValue : factorValues ) { if ( factorValue.getId().equals( baselineFactorValueId ) ) continue; // Skip baseline filteredFactorValues.add( factorValue ); } return filteredFactorValues; } /** * Remove resultSets that are not usable for one reason or another (e.g., intearctions, batch effects) * * @param resultSets * @return */ private List<ExpressionAnalysisResultSet> filterResultSets( Collection<ExpressionAnalysisResultSet> resultSets ) { List<ExpressionAnalysisResultSet> filteredResultSets = new LinkedList<ExpressionAnalysisResultSet>(); for ( ExpressionAnalysisResultSet resultSet : resultSets ) { // Skip interactions. if ( resultSet.getExperimentalFactors().size() != 1 ) continue; // Skip batch effect ones. if ( isBatch( resultSet ) ) continue; // Skip if baseline is not specified. if ( resultSet.getBaselineGroup() == null ) { log.error( "Possible Data Issue: resultSet.getBaselineGroup() returned null for result set with ID=" + resultSet.getId() ); continue; } // must have hitlists populated if ( resultSet.getHitListSizes() == null ) { log.warn( "Possible data issue: resultSet.getHitListSizes() returned null for result set with ID=" + resultSet.getId() ); continue; } filteredResultSets.add( resultSet ); } return filteredResultSets; } /** * Retrieve the details (contrasts) for results which meet the criterion. (PVALUE_CONTRAST_SELECT_THRESHOLD) * * @param geneToProbeResult * @return */ private Map<Long, DifferentialExpressionAnalysisResult> getDetailsForContrasts( Collection<DiffExprGeneSearchResult> diffExResults ) { StopWatch timer = new StopWatch(); timer.start(); List<Long> resultsWithContrasts = new ArrayList<Long>(); for ( DiffExprGeneSearchResult r : diffExResults ) { if ( r.getResultId() == null ) { // it is a dummy result. It means there is no result for this gene in this resultset. continue; } // Here I am trying to avoid fetching them when there is no hope. if ( r.getCorrectedPvalue() == null || r.getCorrectedPvalue() > LinearModelAnalyzer.PVALUE_CONTRAST_SELECT_THRESHOLD ) { // Then it won't have contrasts; no need to fetch. continue; } resultsWithContrasts.add( r.getResultId() ); } Map<Long, DifferentialExpressionAnalysisResult> detailedResults = new HashMap<Long, DifferentialExpressionAnalysisResult>(); if ( !resultsWithContrasts.isEmpty() ) { detailedResults = EntityUtils .getIdMap( differentialExpressionResultService.load( resultsWithContrasts ) ); } timer.stop(); if ( timer.getTotalTimeMillis() > 1 ) { log.info( "Fetch contrasts for " + resultsWithContrasts.size() + " results: " + timer.getTotalTimeMillis() + "ms" ); } return detailedResults; } /** * @param g * @return */ private List<Long> getGeneIds( Collection<DifferentialExpressionGenesConditionsValueObject.Gene> g ) { List<Long> ids = new LinkedList<Long>(); for ( DifferentialExpressionGenesConditionsValueObject.Gene gene : g ) { ids.add( gene.getId() ); } return ids; } /** * @param resultSet * @return */ private boolean isBatch( ExpressionAnalysisResultSet resultSet ) { for ( ExperimentalFactor factor : resultSet.getExperimentalFactors() ) { if ( ExperimentalDesignUtils.isBatch( factor ) ) { return true; } } return false; } /** * @param resultSet * @param geneId * @param searchResult * @param correctedPvalue should not be null. * @param pValue should not be null. * @param numProbes * @param numProbesDiffExpressed */ private void markCellsBlack( ExpressionAnalysisResultSet resultSet, Long geneId, DifferentialExpressionGenesConditionsValueObject searchResult, Double correctedPvalue, Double pValue, int numProbes, int numProbesDiffExpressed ) { /* * Note that if the resultSet has more than one experimental factor, it is an interaction term. */ assert resultSet.getExperimentalFactors().size() == 1 : "Should not have been passed an interaction term"; ExperimentalFactor experimentalFactor = resultSet.getExperimentalFactors().iterator().next(); Collection<FactorValue> factorValues = experimentalFactor.getFactorValues(); for ( FactorValue factorValue : factorValues ) { String conditionId = DifferentialExpressionGenesConditionsValueObject.constructConditionId( resultSet.getId(), factorValue.getId() ); searchResult.addBlackCell( geneId, conditionId, correctedPvalue, pValue, numProbes, numProbesDiffExpressed ); } } /** * @param searchResult * @param resultSetToGeneResults * @param resultSetMap * @param detailedResults */ private void processHits( DifferentialExpressionGenesConditionsValueObject searchResult, Map<Long, Map<Long, DiffExprGeneSearchResult>> resultSetToGeneResults, Map<Long, ExpressionAnalysisResultSet> resultSetMap, Map<Long, DifferentialExpressionAnalysisResult> detailedResults ) { int i = 0; for ( Entry<Long, Map<Long, DiffExprGeneSearchResult>> resultSetEntry : resultSetToGeneResults.entrySet() ) { Map<Long, DiffExprGeneSearchResult> geneToProbeResult = resultSetEntry.getValue(); ExpressionAnalysisResultSet resultSet = resultSetMap.get( resultSetEntry.getKey() ); assert resultSet != null; processHitsForResultSet( searchResult, detailedResults, geneToProbeResult, resultSet ); if ( ++i % 2000 == 0 ) { log.info( "Processed " + i + "/" + resultSetToGeneResults.size() + " hits." ); } } } /** * @param searchResult * @param detailedResults * @param geneToProbeResult * @param resultSet */ private void processHitsForResultSet( DifferentialExpressionGenesConditionsValueObject searchResult, Map<Long, DifferentialExpressionAnalysisResult> detailedResults, Map<Long, DiffExprGeneSearchResult> geneToProbeResult, ExpressionAnalysisResultSet resultSet ) { // No database calls. log.info("Start processing hits for result sets."); try { for ( Long geneId : geneToProbeResult.keySet() ) { DiffExprGeneSearchResult diffExprGeneSearchResult = geneToProbeResult.get( geneId ); Double correctedPvalue = diffExprGeneSearchResult.getCorrectedPvalue(); Double uncorrectedPvalue = diffExprGeneSearchResult.getPvalue(); // this means we got a 'dummy' value, indicating missing data. if ( correctedPvalue == null ) continue; assert uncorrectedPvalue != null; // arbitrary fixing (meant to deal with zeros). Remember these are usually FDRs. if ( correctedPvalue < TINY_QVALUE ) { correctedPvalue = TINY_QVALUE; } if ( uncorrectedPvalue < TINY_PVALUE ) { uncorrectedPvalue = TINY_PVALUE; } int numberOfProbes = diffExprGeneSearchResult.getNumberOfProbes(); int numberOfProbesDiffExpressed = diffExprGeneSearchResult.getNumberOfProbesDiffExpressed(); markCellsBlack( resultSet, geneId, searchResult, correctedPvalue, uncorrectedPvalue, numberOfProbes, numberOfProbesDiffExpressed ); Long probeResultId = diffExprGeneSearchResult.getResultId(); if ( !detailedResults.containsKey( probeResultId ) ) { continue; } DifferentialExpressionAnalysisResult deaResult = detailedResults.get( probeResultId ); for ( ContrastResult cr : deaResult.getContrasts() ) { FactorValue factorValue = cr.getFactorValue(); assert factorValue != null : "Null factor value for contrast with id=" + cr.getId(); String conditionId = DifferentialExpressionGenesConditionsValueObject.constructConditionId( resultSet.getId(), factorValue.getId() ); searchResult.addCell( geneId, conditionId, correctedPvalue, cr.getLogFoldChange(), numberOfProbes, numberOfProbesDiffExpressed, uncorrectedPvalue ); } } } catch (Exception e) { log.error(e.getMessage()); throw e; } log.info("Done processing hits for result sets."); } /** * @param stage * @param percent */ private synchronized void setTaskProgress( String stage, double percent ) { this.taskProgressStage = stage; this.taskProgressPercent = percent; } } protected static Log log = LogFactory.getLog( DifferentialExpressionGeneConditionSearchServiceImpl.class.getName() ); @Autowired private DifferentialExpressionResultService differentialExpressionResultService; @Autowired private DifferentialExpressionAnalysisService differentialExpressionAnalysisService; @Autowired private ExpressionExperimentService eeService; @Autowired private CacheManager cacheManager; private Cache diffExpSearchTasksCache; /* * (non-Javadoc) * * @see * ubic.gemma.web.visualization.DifferentialExpressionGeneConditionSearchService#getDiffExpSearchTaskProgress(java * .lang.String) */ @Override public TaskProgress getDiffExpSearchTaskProgress( String taskId ) { if ( this.diffExpSearchTasksCache.isKeyInCache( taskId ) ) { DifferentialExpressionSearchTask diffExpSearchTask = ( DifferentialExpressionSearchTask ) this.diffExpSearchTasksCache .get( taskId ).getObjectValue(); TaskProgress taskProgress = diffExpSearchTask.getTaskProgress(); DifferentialExpressionGenesConditionsValueObject result = taskProgress.getTaskResult(); if ( result != null ) { this.diffExpSearchTasksCache.remove( taskId ); } return taskProgress; } return new TaskProgress( "Removed", 0.0, null ); } /* * (non-Javadoc) * * @see * ubic.gemma.web.visualization.DifferentialExpressionGeneConditionSearchService#scheduleDiffExpSearchTask(java. * util.List, java.util.List, java.util.List, java.util.List) */ @Override public String scheduleDiffExpSearchTask( List<List<Gene>> genes, List<Collection<ExpressionExperiment>> experiments, List<String> geneGroupNames, List<String> experimentGroupNames ) { log.info( "Got request to schedule search involving " + genes.size() + " gene groups and " + experiments.size() + " experiments groups" ); DifferentialExpressionSearchTask diffExpSearchTask = new DifferentialExpressionSearchTask( genes, experiments, geneGroupNames, experimentGroupNames ); String taskId = UUID.randomUUID().toString(); this.diffExpSearchTasksCache.put( new Element( taskId, diffExpSearchTask ) ); ExecutorService singleThreadExecutor = Executors.newSingleThreadExecutor(); Future<DifferentialExpressionGenesConditionsValueObject> submit = singleThreadExecutor .submit( diffExpSearchTask ); singleThreadExecutor.shutdown(); return taskId; } @PostConstruct protected void init() { this.diffExpSearchTasksCache = new Cache( "DifferentialExpressionVisSearchTasks", 300, false, false, 3600, 3600 ); this.cacheManager.addCache( this.diffExpSearchTasksCache ); } }
A bit more logging.
gemma-web/src/main/java/ubic/gemma/web/visualization/DifferentialExpressionGeneConditionSearchServiceImpl.java
A bit more logging.
Java
apache-2.0
78b5523443106181cbddc7b1e534ddbffcaf419a
0
bmwcarit/joynr,bmwcarit/joynr,bmwcarit/joynr,bmwcarit/joynr,bmwcarit/joynr,bmwcarit/joynr,bmwcarit/joynr,bmwcarit/joynr,bmwcarit/joynr
/* * #%L * %% * Copyright (C) 2011 - 2017 BMW Car IT GmbH * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package io.joynr.messaging.mqtt.paho.client; import static com.google.inject.util.Modules.override; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.atLeast; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.timeout; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import java.io.File; import java.io.PrintWriter; import java.net.URISyntaxException; import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Properties; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import org.eclipse.paho.client.mqttv3.MqttException; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import com.google.inject.AbstractModule; import com.google.inject.Guice; import com.google.inject.Injector; import com.google.inject.Key; import com.google.inject.TypeLiteral; import com.google.inject.multibindings.Multibinder; import com.google.inject.name.Names; import io.joynr.common.JoynrPropertiesModule; import io.joynr.exceptions.JoynrIllegalStateException; import io.joynr.exceptions.JoynrMessageNotSentException; import io.joynr.messaging.FailureAction; import io.joynr.messaging.JoynrMessageProcessor; import io.joynr.messaging.MessagingPropertyKeys; import io.joynr.messaging.NoOpRawMessagingPreprocessor; import io.joynr.messaging.RawMessagingPreprocessor; import io.joynr.messaging.mqtt.IMqttMessagingSkeleton; import io.joynr.messaging.mqtt.JoynrMqttClient; import io.joynr.messaging.mqtt.MqttClientFactory; import io.joynr.messaging.mqtt.MqttClientIdProvider; import io.joynr.messaging.mqtt.MqttModule; import io.joynr.messaging.mqtt.settings.LimitAndBackpressureSettings; import io.joynr.messaging.mqtt.statusmetrics.MqttStatusReceiver; import io.joynr.messaging.routing.MessageRouter; import joynr.system.RoutingTypes.MqttAddress; public class MqttPahoClientTest { private static int mqttBrokerPort; private static int mqttSecureBrokerPort; private static final String joynrUser = "joynr"; private static final String joynrPassword = "password"; private static final String KEYSTORE_PASSWORD = "password"; private static final boolean NON_SECURE_CONNECTION = false; private static Process mosquittoProcess; private Injector injector; private MqttClientFactory mqttClientFactory; private MqttAddress ownTopic; @Mock private IMqttMessagingSkeleton mockReceiver; @Mock private MessageRouter mockMessageRouter; private JoynrMqttClient joynrMqttClient; private Properties properties; private byte[] serializedMessage; private static Path passwordFilePath; private static Path configFilePath; @Rule public ExpectedException thrown = ExpectedException.none(); @BeforeClass public static void startBroker() throws Exception { mqttBrokerPort = 2883; mqttSecureBrokerPort = 9883; String path = System.getProperty("path") != null ? System.getProperty("path") : ""; passwordFilePath = Files.createTempFile("mosquitto_passwd_", null); configFilePath = Files.createTempFile("mosquitto_conf_", null); // create mosquitto configuration with referenced password file Path cafilePath = Paths.get("/", "data", "ssl-data", "certs", "ca.cert.pem"); Path certfilePath = Paths.get("/", "data", "ssl-data", "certs", "server.cert.pem"); Path keyfilePath = Paths.get("/", "data", "ssl-data", "private", "server.key.pem"); PrintWriter printWriter = new PrintWriter(configFilePath.toFile()); printWriter.println("max_queued_messages 0"); printWriter.println("persistence false"); printWriter.println("listener " + Integer.toString(mqttBrokerPort) + " 127.0.0.1"); printWriter.println("password_file " + passwordFilePath.toAbsolutePath().toString()); printWriter.println("listener " + Integer.toString(mqttSecureBrokerPort) + " 127.0.0.1"); printWriter.println("cafile " + cafilePath.toAbsolutePath().toString()); printWriter.println("certfile " + certfilePath.toAbsolutePath().toString()); printWriter.println("keyfile " + keyfilePath.toAbsolutePath().toString()); printWriter.println("require_certificate true"); printWriter.close(); // create mosquitto password file with an entry for user 'joynr' File file = passwordFilePath.toFile(); file.createNewFile(); ProcessBuilder processBuilder = new ProcessBuilder(path + "mosquitto_passwd", "-b", passwordFilePath.toAbsolutePath().toString(), joynrUser, joynrPassword); int exitValue = processBuilder.start().waitFor(); assertEquals(exitValue, 0); // start mosquitto with the above config file processBuilder = new ProcessBuilder(path + "mosquitto", "-c", configFilePath.toAbsolutePath().toString()); mosquittoProcess = processBuilder.start(); } @AfterClass public static void stopBroker() throws Exception { mosquittoProcess.destroy(); Files.deleteIfExists(configFilePath); Files.deleteIfExists(passwordFilePath); } @Before public void setUp() { MockitoAnnotations.initMocks(this); properties = new Properties(); properties.put(MqttModule.PROPERTY_KEY_MQTT_RECONNECT_SLEEP_MS, "100"); properties.put(MqttModule.PROPERTY_KEY_MQTT_KEEP_ALIVE_TIMER_SEC, "60"); properties.put(MqttModule.PROPERTY_KEY_MQTT_CONNECTION_TIMEOUT_SEC, "30"); properties.put(MqttModule.PROPERTY_KEY_MQTT_TIME_TO_WAIT_MS, "-1"); properties.put(MqttModule.PROPERTY_KEY_MQTT_ENABLE_SHARED_SUBSCRIPTIONS, "false"); properties.put(MessagingPropertyKeys.MQTT_TOPIC_PREFIX_MULTICAST, ""); properties.put(MessagingPropertyKeys.MQTT_TOPIC_PREFIX_REPLYTO, ""); properties.put(MessagingPropertyKeys.MQTT_TOPIC_PREFIX_UNICAST, ""); properties.put(MqttModule.PROPERTY_KEY_MQTT_MAX_MSGS_INFLIGHT, "100"); properties.put(MessagingPropertyKeys.CHANNELID, "myChannelId"); properties.put(LimitAndBackpressureSettings.PROPERTY_MAX_INCOMING_MQTT_REQUESTS, "0"); properties.put(LimitAndBackpressureSettings.PROPERTY_BACKPRESSURE_ENABLED, "false"); properties.put(LimitAndBackpressureSettings.PROPERTY_BACKPRESSURE_INCOMING_MQTT_REQUESTS_UPPER_THRESHOLD, "80"); properties.put(LimitAndBackpressureSettings.PROPERTY_BACKPRESSURE_INCOMING_MQTT_REQUESTS_LOWER_THRESHOLD, "20"); properties.put(MqttModule.PROPERTY_MQTT_CLEAN_SESSION, "false"); properties.put(MqttModule.PROPERTY_KEY_MQTT_MAX_MESSAGE_SIZE_BYTES, "0"); properties.put(MqttModule.PROPERTY_KEY_MQTT_BROKER_URI, "tcp://localhost:" + mqttBrokerPort); properties.put(MqttModule.PROPERTY_KEY_MQTT_USERNAME, joynrUser); properties.put(MqttModule.PROPERTY_KEY_MQTT_PASSWORD, joynrPassword); serializedMessage = new byte[10]; } @After public void tearDown() { if (joynrMqttClient != null) { joynrMqttClient.shutdown(); } } private void createJoynrMqttClient() { try { createJoynrMqttClient(NON_SECURE_CONNECTION); } catch (Exception e) { e.printStackTrace(); } } // Get the path of the test resources private static String getResourcePath(String filename) throws URISyntaxException { URL resource = ClassLoader.getSystemClassLoader().getResource(filename); return resource.getPath(); } private void createJoynrMqttClient(boolean isSecureConnection) { joynrMqttClient = createMqttClientWithoutSubscription(isSecureConnection, null); ownTopic = injector.getInstance((Key.get(MqttAddress.class, Names.named(MqttModule.PROPERTY_MQTT_GLOBAL_ADDRESS)))); joynrMqttClient.subscribe(ownTopic.getTopic()); } private JoynrMqttClient createMqttClientWithoutSubscription() { return createMqttClientWithoutSubscription(NON_SECURE_CONNECTION, null); } private JoynrMqttClient createMqttClientWithoutSubscription(boolean isSecureConnection, final MqttStatusReceiver mqttStatusReceiver) { if (isSecureConnection) { properties.put(MqttModule.PROPERTY_KEY_MQTT_BROKER_URI, "ssl://localhost:" + mqttSecureBrokerPort); } else { properties.put(MqttModule.PROPERTY_KEY_MQTT_BROKER_URI, "tcp://localhost:" + mqttBrokerPort); } JoynrMqttClient client = createMqttClientInternal(mqttStatusReceiver); final Semaphore startSemaphore = new Semaphore(0); Thread thread = new Thread(new Runnable() { public void run() { client.start(); startSemaphore.release(); } }); thread.start(); try { boolean started = startSemaphore.tryAcquire(2000, TimeUnit.MILLISECONDS); if (started) { thread.join(); return client; } } catch (Exception e) { // ignore } try { client.shutdown(); thread.join(); } catch (Exception e) { // ignore } throw new JoynrIllegalStateException("failed to start client"); } private JoynrMqttClient createMqttClientInternal(final MqttStatusReceiver mqttStatusReceiver) { // always create a new Factory because the factory caches its client. createMqttClientFactory(mqttStatusReceiver); JoynrMqttClient client = mqttClientFactory.createSender(); client.setMessageListener(mockReceiver); return client; } private void createMqttClientFactory(final MqttStatusReceiver mqttStatusReceiver) { injector = Guice.createInjector(override(new MqttPahoModule()).with(new AbstractModule() { @Override protected void configure() { if (mqttStatusReceiver != null) { bind(MqttStatusReceiver.class).toInstance(mqttStatusReceiver); } } }), new JoynrPropertiesModule(properties), new AbstractModule() { @Override protected void configure() { bind(MessageRouter.class).toInstance(mockMessageRouter); bind(ScheduledExecutorService.class).annotatedWith(Names.named(MessageRouter.SCHEDULEDTHREADPOOL)) .toInstance(Executors.newScheduledThreadPool(10)); bind(RawMessagingPreprocessor.class).to(NoOpRawMessagingPreprocessor.class); Multibinder.newSetBinder(binder(), new TypeLiteral<JoynrMessageProcessor>() { }); } }); mqttClientFactory = injector.getInstance(MqttClientFactory.class); } @Test public void mqttClientTestWithTwoConnections() throws Exception { final boolean separateConnections = true; final MqttStatusReceiver mqttStatusReceiver = mock(MqttStatusReceiver.class); properties.put(MqttModule.PROPERTY_KEY_MQTT_SEPARATE_CONNECTIONS, String.valueOf(separateConnections)); properties.put(MqttModule.PROPERTY_MQTT_CLEAN_SESSION, "true"); createMqttClientFactory(mqttStatusReceiver); ownTopic = injector.getInstance((Key.get(MqttAddress.class, Names.named(MqttModule.PROPERTY_MQTT_GLOBAL_ADDRESS)))); JoynrMqttClient clientSender = mqttClientFactory.createSender(); JoynrMqttClient clientReceiver = mqttClientFactory.createReceiver(); assertNotEquals(clientSender, clientReceiver); clientReceiver.setMessageListener(mockReceiver); clientSender.start(); clientReceiver.start(); verify(mqttStatusReceiver, times(2)).notifyConnectionStatusChanged(MqttStatusReceiver.ConnectionStatus.CONNECTED); clientReceiver.subscribe(ownTopic.getTopic()); clientSender.publishMessage(ownTopic.getTopic(), serializedMessage); verify(mockReceiver, timeout(500).times(1)).transmit(eq(serializedMessage), any(FailureAction.class)); clientReceiver.shutdown(); clientSender.shutdown(); verify(mqttStatusReceiver, timeout(500).times(2)).notifyConnectionStatusChanged(MqttStatusReceiver.ConnectionStatus.NOT_CONNECTED); } @Test public void mqttClientTestWithOneConnection() throws Exception { final MqttStatusReceiver mqttStatusReceiver = mock(MqttStatusReceiver.class); createMqttClientFactory(mqttStatusReceiver); JoynrMqttClient clientSender = mqttClientFactory.createSender(); JoynrMqttClient clientReceiver = mqttClientFactory.createReceiver(); assertEquals(clientSender, clientReceiver); clientSender.start(); clientReceiver.start(); verify(mqttStatusReceiver, times(1)).notifyConnectionStatusChanged(MqttStatusReceiver.ConnectionStatus.CONNECTED); clientReceiver.shutdown(); clientSender.shutdown(); verify(mqttStatusReceiver, timeout(500).times(1)).notifyConnectionStatusChanged(MqttStatusReceiver.ConnectionStatus.NOT_CONNECTED); } private void joynrMqttClientPublishAndVerifyReceivedMessage(byte[] serializedMessage) { joynrMqttClient.publishMessage(ownTopic.getTopic(), serializedMessage); verify(mockReceiver, timeout(100).times(1)).transmit(eq(serializedMessage), any(FailureAction.class)); } @Test public void mqttClientTestWithEnabledMessageSizeCheck() throws Exception { final int maxMessageSize = 100; properties.put(MqttModule.PROPERTY_KEY_MQTT_MAX_MESSAGE_SIZE_BYTES, String.valueOf(maxMessageSize)); createJoynrMqttClient(); byte[] shortSerializedMessage = new byte[maxMessageSize]; joynrMqttClientPublishAndVerifyReceivedMessage(shortSerializedMessage); byte[] largeSerializedMessage = new byte[maxMessageSize + 1]; thrown.expect(JoynrMessageNotSentException.class); thrown.expectMessage("MQTT Publish failed: maximum allowed message size of " + maxMessageSize + " bytes exceeded, actual size is " + largeSerializedMessage.length + " bytes"); joynrMqttClient.publishMessage(ownTopic.getTopic(), largeSerializedMessage); } private void mqttClientTestWithDisabledMessageSizeCheck(boolean isSecureConnection) throws Exception { final int initialMessageSize = 100; properties.put(MqttModule.PROPERTY_KEY_MQTT_MAX_MESSAGE_SIZE_BYTES, "0"); createJoynrMqttClient(isSecureConnection); byte[] shortSerializedMessage = new byte[initialMessageSize]; joynrMqttClientPublishAndVerifyReceivedMessage(shortSerializedMessage); byte[] largeSerializedMessage = new byte[initialMessageSize + 1]; joynrMqttClientPublishAndVerifyReceivedMessage(largeSerializedMessage); } @Test public void mqttClientTestWithDisabledMessageSizeCheckWithoutTls() throws Exception { final boolean isSecureConnection = false; mqttClientTestWithDisabledMessageSizeCheck(isSecureConnection); } private void mqttClientTestWithCredentials(boolean expectException) throws Exception { final boolean isSecureConnection = false; if (expectException) { thrown.expect(JoynrIllegalStateException.class); } mqttClientTestWithDisabledMessageSizeCheck(isSecureConnection); } @Test public void mqttClientTestWithWrongUserAndSomePassword() throws Exception { boolean expectException = true; properties.put(MqttModule.PROPERTY_KEY_MQTT_USERNAME, "wronguser"); mqttClientTestWithCredentials(expectException); } @Test public void mqttClientTestWithCorrectUserButWrongPassword() throws Exception { boolean expectException = true; properties.put(MqttModule.PROPERTY_KEY_MQTT_PASSWORD, "wrongpassword"); mqttClientTestWithCredentials(expectException); } @Test public void mqttClientTestWithCorrectUserAndCorrectPassword() throws Exception { boolean expectException = false; mqttClientTestWithCredentials(expectException); } @Test public void mqttClientTestWithEmptyUser() throws Exception { final boolean isSecureConnection = false; properties.put(MqttModule.PROPERTY_KEY_MQTT_USERNAME, ""); thrown.expect(JoynrIllegalStateException.class); mqttClientTestWithDisabledMessageSizeCheck(isSecureConnection); } @Test public void mqttClientTestWithCorrectUserButEmptyPassword() throws Exception { final boolean isSecureConnection = false; properties.put(MqttModule.PROPERTY_KEY_MQTT_PASSWORD, ""); thrown.expect(JoynrIllegalStateException.class); mqttClientTestWithDisabledMessageSizeCheck(isSecureConnection); } @Test public void mqttClientTestWithDisabledMessageSizeCheckWithTlsAndDefaultJksStore() throws Exception { final String keyStorePath = getResourcePath("clientkeystore.jks"); final String trustStorePath = getResourcePath("catruststore.jks"); properties.put(MqttModule.PROPERTY_KEY_MQTT_KEYSTORE_PATH, keyStorePath); properties.put(MqttModule.PROPERTY_KEY_MQTT_TRUSTSTORE_PATH, trustStorePath); properties.put(MqttModule.PROPERTY_KEY_MQTT_KEYSTORE_PWD, KEYSTORE_PASSWORD); properties.put(MqttModule.PROPERTY_KEY_MQTT_TRUSTSTORE_PWD, KEYSTORE_PASSWORD); final boolean isSecureConnection = true; mqttClientTestWithDisabledMessageSizeCheck(isSecureConnection); } @Test public void mqttClientTestWithDisabledMessageSizeCheckWithTlsAndP12Store() throws Exception { final String keyStorePath = getResourcePath("clientkeystore.p12"); final String trustStorePath = getResourcePath("catruststore.p12"); properties.put(MqttModule.PROPERTY_KEY_MQTT_KEYSTORE_PATH, keyStorePath); properties.put(MqttModule.PROPERTY_KEY_MQTT_TRUSTSTORE_PATH, trustStorePath); properties.put(MqttModule.PROPERTY_KEY_MQTT_KEYSTORE_TYPE, "PKCS12"); properties.put(MqttModule.PROPERTY_KEY_MQTT_TRUSTSTORE_TYPE, "PKCS12"); properties.put(MqttModule.PROPERTY_KEY_MQTT_KEYSTORE_PWD, KEYSTORE_PASSWORD); properties.put(MqttModule.PROPERTY_KEY_MQTT_TRUSTSTORE_PWD, KEYSTORE_PASSWORD); final boolean isSecureConnection = true; mqttClientTestWithDisabledMessageSizeCheck(isSecureConnection); } private void testCreateMqttClientFailsWithJoynrIllegalArgumentException() { final boolean isSecureConnection = true; try { createJoynrMqttClient(isSecureConnection); fail("Expected JoynrIllegalStateException"); } catch (JoynrIllegalStateException e) { // expected behaviour } } @Test public void mqttClientTLSCreationFailsIfKeystorePasswordIsWrongOrMissing() throws URISyntaxException { final String wrongPassword = "wrongPassword"; final String keyStorePath = getResourcePath("clientkeystore.jks"); final String trustStorePath = getResourcePath("catruststore.jks"); properties.put(MqttModule.PROPERTY_KEY_MQTT_KEYSTORE_PATH, keyStorePath); properties.put(MqttModule.PROPERTY_KEY_MQTT_TRUSTSTORE_PATH, trustStorePath); // test missing keystore password properties.remove(MqttModule.PROPERTY_KEY_MQTT_KEYSTORE_PWD); properties.put(MqttModule.PROPERTY_KEY_MQTT_TRUSTSTORE_PWD, KEYSTORE_PASSWORD); testCreateMqttClientFailsWithJoynrIllegalArgumentException(); // test wrong keystore password properties.put(MqttModule.PROPERTY_KEY_MQTT_KEYSTORE_PWD, wrongPassword); testCreateMqttClientFailsWithJoynrIllegalArgumentException(); } @Test public void mqttClientTLSCreationFailsIfTrustorePasswordIsWrongOrMissing() throws URISyntaxException { final String wrongPassword = "wrongPassword"; final String keyStorePath = getResourcePath("clientkeystore.jks"); final String trustStorePath = getResourcePath("catruststore.jks"); properties.put(MqttModule.PROPERTY_KEY_MQTT_KEYSTORE_PATH, keyStorePath); properties.put(MqttModule.PROPERTY_KEY_MQTT_TRUSTSTORE_PATH, trustStorePath); // test missing truststore password properties.put(MqttModule.PROPERTY_KEY_MQTT_KEYSTORE_PWD, KEYSTORE_PASSWORD); properties.remove(MqttModule.PROPERTY_KEY_MQTT_TRUSTSTORE_PWD); testCreateMqttClientFailsWithJoynrIllegalArgumentException(); // test wrong truststore password properties.put(MqttModule.PROPERTY_KEY_MQTT_TRUSTSTORE_PWD, wrongPassword); testCreateMqttClientFailsWithJoynrIllegalArgumentException(); } @Test public void mqttClientTLSCreationFailsIfKeystorePathIsWrongOrMissing() throws URISyntaxException { final String wrongKeyStorePath = getResourcePath("clientkeystore.jks") + "42"; final String trustStorePath = getResourcePath("catruststore.jks"); properties.put(MqttModule.PROPERTY_KEY_MQTT_TRUSTSTORE_PATH, trustStorePath); properties.put(MqttModule.PROPERTY_KEY_MQTT_KEYSTORE_PWD, KEYSTORE_PASSWORD); properties.put(MqttModule.PROPERTY_KEY_MQTT_TRUSTSTORE_PWD, KEYSTORE_PASSWORD); // test missing keystore path properties.remove(MqttModule.PROPERTY_KEY_MQTT_KEYSTORE_PATH); testCreateMqttClientFailsWithJoynrIllegalArgumentException(); // test wrong keystore path properties.put(MqttModule.PROPERTY_KEY_MQTT_KEYSTORE_PATH, wrongKeyStorePath); testCreateMqttClientFailsWithJoynrIllegalArgumentException(); } @Test public void mqttClientTLSCreationFailsIfTrustorePathIsWrongOrMissing() throws URISyntaxException { final String wrongTrustStorePath = getResourcePath("catruststore.jks") + "42"; final String keyStorePath = getResourcePath("clientkeystore.jks"); properties.put(MqttModule.PROPERTY_KEY_MQTT_KEYSTORE_PATH, keyStorePath); properties.put(MqttModule.PROPERTY_KEY_MQTT_KEYSTORE_PWD, KEYSTORE_PASSWORD); properties.put(MqttModule.PROPERTY_KEY_MQTT_TRUSTSTORE_PWD, KEYSTORE_PASSWORD); // test missing truststore path properties.remove(MqttModule.PROPERTY_KEY_MQTT_TRUSTSTORE_PATH); testCreateMqttClientFailsWithJoynrIllegalArgumentException(); // test wrong truststore path properties.put(MqttModule.PROPERTY_KEY_MQTT_TRUSTSTORE_PATH, wrongTrustStorePath); testCreateMqttClientFailsWithJoynrIllegalArgumentException(); } @Test public void mqttClientTestWithDisabledCleanSession() throws Exception { properties.put(MqttModule.PROPERTY_MQTT_CLEAN_SESSION, "false"); String topic = "otherTopic"; // create a MqttClient which was subscribed on the topic and shut it down. joynrMqttClient = createMqttClientWithoutSubscription(); joynrMqttClient.subscribe(topic); joynrMqttClient.shutdown(); // use another MqttClient to publish a message for the first topic joynrMqttClient = createMqttClientWithoutSubscription(); joynrMqttClient.publishMessage(topic, serializedMessage); Thread.sleep(100); joynrMqttClient.shutdown(); // create a MqttClient and subscribe to the same topic as the first one // MqttClient will receive message if cleanSession is disabled joynrMqttClient = createMqttClientWithoutSubscription(); joynrMqttClient.subscribe(topic); Thread.sleep(100); verify(mockReceiver, atLeast(1)).transmit(eq(serializedMessage), any(FailureAction.class)); } @Test public void mqttClientTestWithEnabledCleanSession() throws Exception { properties.put(MqttModule.PROPERTY_MQTT_CLEAN_SESSION, "true"); String topic = "otherTopic1"; // create a MqttClient which was subscribed on the topic and shut it down. joynrMqttClient = createMqttClientWithoutSubscription(); joynrMqttClient.subscribe(topic); joynrMqttClient.shutdown(); // use another MqttClient to publish a message for the first topic joynrMqttClient = createMqttClientWithoutSubscription(); joynrMqttClient.publishMessage(topic, serializedMessage); Thread.sleep(100); joynrMqttClient.shutdown(); // create a MqttClient and subscribe to the same topic as the first one // MqttClient will receive message if cleanSession is disabled joynrMqttClient = createMqttClientWithoutSubscription(); joynrMqttClient.subscribe(topic); Thread.sleep(100); verify(mockReceiver, times(0)).transmit(eq(serializedMessage), any(FailureAction.class)); } @Test public void mqttClientTestResubscriptionWithCleanRestartEnabled() throws Exception { properties.put(MqttModule.PROPERTY_KEY_MQTT_BROKER_URI, "tcp://localhost:" + mqttBrokerPort); injector = Guice.createInjector(new MqttPahoModule(), new JoynrPropertiesModule(properties), new AbstractModule() { @Override protected void configure() { bind(MessageRouter.class).toInstance(mockMessageRouter); bind(ScheduledExecutorService.class).annotatedWith(Names.named(MessageRouter.SCHEDULEDTHREADPOOL)) .toInstance(Executors.newScheduledThreadPool(10)); bind(RawMessagingPreprocessor.class).to(NoOpRawMessagingPreprocessor.class); Multibinder.newSetBinder(binder(), new TypeLiteral<JoynrMessageProcessor>() { }); } }); ownTopic = injector.getInstance((Key.get(MqttAddress.class, Names.named(MqttModule.PROPERTY_MQTT_GLOBAL_ADDRESS)))); ScheduledExecutorService scheduledExecutorService = Executors.newScheduledThreadPool(10); MqttClientIdProvider mqttClientIdProvider = injector.getInstance(MqttClientIdProvider.class); String clientId = mqttClientIdProvider.getClientId(); String brokerUri = "tcp://localhost:" + mqttBrokerPort; int reconnectSleepMs = 100; int keepAliveTimerSec = 60; int connectionTimeoutSec = 60; int timeToWaitMs = -1; int maxMsgsInflight = 100; int maxMsgSizeBytes = 0; boolean cleanSession = true; final boolean isReceiver = true; final boolean separateConnections = false; String username = joynrUser; String password = joynrPassword; joynrMqttClient = new MqttPahoClient(new MqttAddress(brokerUri, "sometopic"), clientId, scheduledExecutorService, reconnectSleepMs, keepAliveTimerSec, connectionTimeoutSec, timeToWaitMs, maxMsgsInflight, maxMsgSizeBytes, cleanSession, isReceiver, separateConnections, "", "", "", "", "", "", username, password, mock(MqttStatusReceiver.class)); joynrMqttClient.start(); joynrMqttClient.setMessageListener(mockReceiver); joynrMqttClient.subscribe(ownTopic.getTopic()); // manually call disconnect and connectionLost MqttPahoClient mqttPahoClient = (MqttPahoClient) joynrMqttClient; mqttPahoClient.getMqttClient().disconnect(500); MqttException exception = new MqttException(MqttException.REASON_CODE_CLIENT_TIMEOUT); mqttPahoClient.connectionLost(exception); joynrMqttClientPublishAndVerifyReceivedMessage(serializedMessage); } // This test was disabled, because it runs perfectly on a local machine but not in the CI. // Further investigations are required to stabilize this test. @Test @Ignore public void testClientNotifiesStatusReceiverAboutBrokerDisconnect() throws Exception { final MqttStatusReceiver mqttStatusReceiver = mock(MqttStatusReceiver.class); @SuppressWarnings("unused") final JoynrMqttClient mqttClient = createMqttClientWithoutSubscription(false, mqttStatusReceiver); verify(mqttStatusReceiver).notifyConnectionStatusChanged(MqttStatusReceiver.ConnectionStatus.CONNECTED); stopBroker(); Thread.sleep(1000); verify(mqttStatusReceiver).notifyConnectionStatusChanged(MqttStatusReceiver.ConnectionStatus.NOT_CONNECTED); startBroker(); Thread.sleep(2000); verify(mqttStatusReceiver, times(2)).notifyConnectionStatusChanged(MqttStatusReceiver.ConnectionStatus.CONNECTED); } @Test public void testClientNotifiesStatusReceiverAboutShutdownDisconnect() throws Exception { final MqttStatusReceiver mqttStatusReceiver = mock(MqttStatusReceiver.class); final JoynrMqttClient mqttClient = createMqttClientWithoutSubscription(false, mqttStatusReceiver); verify(mqttStatusReceiver).notifyConnectionStatusChanged(MqttStatusReceiver.ConnectionStatus.CONNECTED); mqttClient.shutdown(); verify(mqttStatusReceiver).notifyConnectionStatusChanged(MqttStatusReceiver.ConnectionStatus.NOT_CONNECTED); } @Test public void mqttClientTestShutdownIfDisconnectFromMQTT() throws Exception { properties.put(MqttModule.PROPERTY_KEY_MQTT_BROKER_URI, "tcp://localhost:1111"); properties.put(MqttModule.PROPERTY_KEY_MQTT_RECONNECT_SLEEP_MS, "100"); // create and start client final JoynrMqttClient client = createMqttClientInternal(mock(MqttStatusReceiver.class)); final Semaphore semaphoreBeforeStartMethod = new Semaphore(0); final Semaphore semaphoreAfterStartMethod = new Semaphore(0); final int timeout = 500; Runnable myRunnable = new Runnable() { @Override public void run() { semaphoreBeforeStartMethod.release(); client.start(); semaphoreAfterStartMethod.release(); } }; new Thread(myRunnable).start(); assertTrue(semaphoreBeforeStartMethod.tryAcquire(timeout, TimeUnit.MILLISECONDS)); // sleep in order to increase the probability of the runnable // to be in the sleep part of the start method Thread.sleep(timeout); // At this point the semaphoreAfterStartMethod is supposed to be not released // because we expect to be still in start() assertFalse(semaphoreAfterStartMethod.tryAcquire()); client.shutdown(); assertTrue(semaphoreAfterStartMethod.tryAcquire(timeout, TimeUnit.MILLISECONDS)); } }
java/messaging/mqtt/joynr-mqtt-client/src/test/java/io/joynr/messaging/mqtt/paho/client/MqttPahoClientTest.java
/* * #%L * %% * Copyright (C) 2011 - 2017 BMW Car IT GmbH * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package io.joynr.messaging.mqtt.paho.client; import static com.google.inject.util.Modules.override; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.atLeast; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.timeout; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import java.io.File; import java.io.PrintWriter; import java.net.URISyntaxException; import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Properties; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import org.eclipse.paho.client.mqttv3.MqttException; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import com.google.inject.AbstractModule; import com.google.inject.Guice; import com.google.inject.Injector; import com.google.inject.Key; import com.google.inject.TypeLiteral; import com.google.inject.multibindings.Multibinder; import com.google.inject.name.Names; import io.joynr.common.JoynrPropertiesModule; import io.joynr.exceptions.JoynrIllegalStateException; import io.joynr.exceptions.JoynrMessageNotSentException; import io.joynr.messaging.FailureAction; import io.joynr.messaging.JoynrMessageProcessor; import io.joynr.messaging.MessagingPropertyKeys; import io.joynr.messaging.NoOpRawMessagingPreprocessor; import io.joynr.messaging.RawMessagingPreprocessor; import io.joynr.messaging.mqtt.IMqttMessagingSkeleton; import io.joynr.messaging.mqtt.JoynrMqttClient; import io.joynr.messaging.mqtt.MqttClientFactory; import io.joynr.messaging.mqtt.MqttClientIdProvider; import io.joynr.messaging.mqtt.MqttModule; import io.joynr.messaging.mqtt.settings.LimitAndBackpressureSettings; import io.joynr.messaging.mqtt.statusmetrics.MqttStatusReceiver; import io.joynr.messaging.routing.MessageRouter; import joynr.system.RoutingTypes.MqttAddress; public class MqttPahoClientTest { private static int mqttBrokerPort; private static int mqttSecureBrokerPort; private static final String joynrUser = "joynr"; private static final String joynrPassword = "password"; private static final String KEYSTORE_PASSWORD = "password"; private static final boolean NON_SECURE_CONNECTION = false; private static Process mosquittoProcess; private Injector injector; private MqttClientFactory mqttClientFactory; private MqttAddress ownTopic; @Mock private IMqttMessagingSkeleton mockReceiver; @Mock private MessageRouter mockMessageRouter; private JoynrMqttClient joynrMqttClient; private Properties properties; private byte[] serializedMessage; private static Path passwordFilePath; private static Path configFilePath; @Rule public ExpectedException thrown = ExpectedException.none(); @BeforeClass public static void startBroker() throws Exception { mqttBrokerPort = 2883; mqttSecureBrokerPort = 9883; String path = System.getProperty("path") != null ? System.getProperty("path") : ""; passwordFilePath = Files.createTempFile("mosquitto_passwd_", null); configFilePath = Files.createTempFile("mosquitto_conf_", null); // create mosquitto configuration with referenced password file Path cafilePath = Paths.get("/", "data", "ssl-data", "certs", "ca.cert.pem"); Path certfilePath = Paths.get("/", "data", "ssl-data", "certs", "server.cert.pem"); Path keyfilePath = Paths.get("/", "data", "ssl-data", "private", "server.key.pem"); PrintWriter printWriter = new PrintWriter(configFilePath.toFile()); printWriter.println("max_queued_messages 0"); printWriter.println("persistence false"); printWriter.println("listener " + Integer.toString(mqttBrokerPort) + " 127.0.0.1"); printWriter.println("password_file " + passwordFilePath.toAbsolutePath().toString()); printWriter.println("listener " + Integer.toString(mqttSecureBrokerPort) + " 127.0.0.1"); printWriter.println("cafile " + cafilePath.toAbsolutePath().toString()); printWriter.println("certfile " + certfilePath.toAbsolutePath().toString()); printWriter.println("keyfile " + keyfilePath.toAbsolutePath().toString()); printWriter.println("require_certificate true"); printWriter.close(); // create mosquitto password file with an entry for user 'joynr' File file = passwordFilePath.toFile(); file.createNewFile(); ProcessBuilder processBuilder = new ProcessBuilder(path + "mosquitto_passwd", "-b", passwordFilePath.toAbsolutePath().toString(), joynrUser, joynrPassword); int exitValue = processBuilder.start().waitFor(); assertEquals(exitValue, 0); // start mosquitto with the above config file processBuilder = new ProcessBuilder(path + "mosquitto", "-c", configFilePath.toAbsolutePath().toString()); mosquittoProcess = processBuilder.start(); } @AfterClass public static void stopBroker() throws Exception { mosquittoProcess.destroy(); Files.deleteIfExists(configFilePath); Files.deleteIfExists(passwordFilePath); } @Before public void setUp() { MockitoAnnotations.initMocks(this); properties = new Properties(); properties.put(MqttModule.PROPERTY_KEY_MQTT_RECONNECT_SLEEP_MS, "100"); properties.put(MqttModule.PROPERTY_KEY_MQTT_KEEP_ALIVE_TIMER_SEC, "60"); properties.put(MqttModule.PROPERTY_KEY_MQTT_CONNECTION_TIMEOUT_SEC, "30"); properties.put(MqttModule.PROPERTY_KEY_MQTT_TIME_TO_WAIT_MS, "-1"); properties.put(MqttModule.PROPERTY_KEY_MQTT_ENABLE_SHARED_SUBSCRIPTIONS, "false"); properties.put(MessagingPropertyKeys.MQTT_TOPIC_PREFIX_MULTICAST, ""); properties.put(MessagingPropertyKeys.MQTT_TOPIC_PREFIX_REPLYTO, ""); properties.put(MessagingPropertyKeys.MQTT_TOPIC_PREFIX_UNICAST, ""); properties.put(MqttModule.PROPERTY_KEY_MQTT_MAX_MSGS_INFLIGHT, "100"); properties.put(MessagingPropertyKeys.CHANNELID, "myChannelId"); properties.put(LimitAndBackpressureSettings.PROPERTY_MAX_INCOMING_MQTT_REQUESTS, "0"); properties.put(LimitAndBackpressureSettings.PROPERTY_BACKPRESSURE_ENABLED, "false"); properties.put(LimitAndBackpressureSettings.PROPERTY_BACKPRESSURE_INCOMING_MQTT_REQUESTS_UPPER_THRESHOLD, "80"); properties.put(LimitAndBackpressureSettings.PROPERTY_BACKPRESSURE_INCOMING_MQTT_REQUESTS_LOWER_THRESHOLD, "20"); properties.put(MqttModule.PROPERTY_MQTT_CLEAN_SESSION, "false"); properties.put(MqttModule.PROPERTY_KEY_MQTT_MAX_MESSAGE_SIZE_BYTES, "0"); properties.put(MqttModule.PROPERTY_KEY_MQTT_BROKER_URI, "tcp://localhost:" + mqttBrokerPort); serializedMessage = new byte[10]; } @After public void tearDown() { if (joynrMqttClient != null) { joynrMqttClient.shutdown(); } } private void createJoynrMqttClient() { try { createJoynrMqttClient(NON_SECURE_CONNECTION); } catch (Exception e) { e.printStackTrace(); } } // Get the path of the test resources private static String getResourcePath(String filename) throws URISyntaxException { URL resource = ClassLoader.getSystemClassLoader().getResource(filename); return resource.getPath(); } private void createJoynrMqttClient(boolean isSecureConnection) { joynrMqttClient = createMqttClientWithoutSubscription(isSecureConnection, null); ownTopic = injector.getInstance((Key.get(MqttAddress.class, Names.named(MqttModule.PROPERTY_MQTT_GLOBAL_ADDRESS)))); joynrMqttClient.subscribe(ownTopic.getTopic()); } private JoynrMqttClient createMqttClientWithoutSubscription() { return createMqttClientWithoutSubscription(NON_SECURE_CONNECTION, null); } private JoynrMqttClient createMqttClientWithoutSubscription(boolean isSecureConnection, final MqttStatusReceiver mqttStatusReceiver) { if (isSecureConnection) { properties.put(MqttModule.PROPERTY_KEY_MQTT_BROKER_URI, "ssl://localhost:" + mqttSecureBrokerPort); } else { properties.put(MqttModule.PROPERTY_KEY_MQTT_BROKER_URI, "tcp://localhost:" + mqttBrokerPort); } JoynrMqttClient client = createMqttClientInternal(mqttStatusReceiver); final Semaphore startSemaphore = new Semaphore(0); Thread thread = new Thread(new Runnable() { public void run() { client.start(); startSemaphore.release(); } }); thread.start(); try { boolean started = startSemaphore.tryAcquire(2000, TimeUnit.MILLISECONDS); if (started) { thread.join(); return client; } } catch (Exception e) { // ignore } try { client.shutdown(); thread.join(); } catch (Exception e) { // ignore } throw new JoynrIllegalStateException("failed to start client"); } private JoynrMqttClient createMqttClientInternal(final MqttStatusReceiver mqttStatusReceiver) { // always create a new Factory because the factory caches its client. createMqttClientFactory(mqttStatusReceiver); JoynrMqttClient client = mqttClientFactory.createSender(); client.setMessageListener(mockReceiver); return client; } private void createMqttClientFactory(final MqttStatusReceiver mqttStatusReceiver) { injector = Guice.createInjector(override(new MqttPahoModule()).with(new AbstractModule() { @Override protected void configure() { if (mqttStatusReceiver != null) { bind(MqttStatusReceiver.class).toInstance(mqttStatusReceiver); } } }), new JoynrPropertiesModule(properties), new AbstractModule() { @Override protected void configure() { bind(MessageRouter.class).toInstance(mockMessageRouter); bind(ScheduledExecutorService.class).annotatedWith(Names.named(MessageRouter.SCHEDULEDTHREADPOOL)) .toInstance(Executors.newScheduledThreadPool(10)); bind(RawMessagingPreprocessor.class).to(NoOpRawMessagingPreprocessor.class); Multibinder.newSetBinder(binder(), new TypeLiteral<JoynrMessageProcessor>() { }); } }); mqttClientFactory = injector.getInstance(MqttClientFactory.class); } @Test public void mqttClientTestWithTwoConnections() throws Exception { final boolean separateConnections = true; final MqttStatusReceiver mqttStatusReceiver = mock(MqttStatusReceiver.class); properties.put(MqttModule.PROPERTY_KEY_MQTT_SEPARATE_CONNECTIONS, String.valueOf(separateConnections)); properties.put(MqttModule.PROPERTY_MQTT_CLEAN_SESSION, "true"); createMqttClientFactory(mqttStatusReceiver); ownTopic = injector.getInstance((Key.get(MqttAddress.class, Names.named(MqttModule.PROPERTY_MQTT_GLOBAL_ADDRESS)))); JoynrMqttClient clientSender = mqttClientFactory.createSender(); JoynrMqttClient clientReceiver = mqttClientFactory.createReceiver(); assertNotEquals(clientSender, clientReceiver); clientReceiver.setMessageListener(mockReceiver); clientSender.start(); clientReceiver.start(); verify(mqttStatusReceiver, times(2)).notifyConnectionStatusChanged(MqttStatusReceiver.ConnectionStatus.CONNECTED); clientReceiver.subscribe(ownTopic.getTopic()); clientSender.publishMessage(ownTopic.getTopic(), serializedMessage); verify(mockReceiver, timeout(500).times(1)).transmit(eq(serializedMessage), any(FailureAction.class)); clientReceiver.shutdown(); clientSender.shutdown(); verify(mqttStatusReceiver, timeout(500).times(2)).notifyConnectionStatusChanged(MqttStatusReceiver.ConnectionStatus.NOT_CONNECTED); } @Test public void mqttClientTestWithOneConnection() throws Exception { final MqttStatusReceiver mqttStatusReceiver = mock(MqttStatusReceiver.class); createMqttClientFactory(mqttStatusReceiver); JoynrMqttClient clientSender = mqttClientFactory.createSender(); JoynrMqttClient clientReceiver = mqttClientFactory.createReceiver(); assertEquals(clientSender, clientReceiver); clientSender.start(); clientReceiver.start(); verify(mqttStatusReceiver, times(1)).notifyConnectionStatusChanged(MqttStatusReceiver.ConnectionStatus.CONNECTED); clientReceiver.shutdown(); clientSender.shutdown(); verify(mqttStatusReceiver, timeout(500).times(1)).notifyConnectionStatusChanged(MqttStatusReceiver.ConnectionStatus.NOT_CONNECTED); } private void joynrMqttClientPublishAndVerifyReceivedMessage(byte[] serializedMessage) { joynrMqttClient.publishMessage(ownTopic.getTopic(), serializedMessage); verify(mockReceiver, timeout(100).times(1)).transmit(eq(serializedMessage), any(FailureAction.class)); } @Test public void mqttClientTestWithEnabledMessageSizeCheck() throws Exception { final int maxMessageSize = 100; properties.put(MqttModule.PROPERTY_KEY_MQTT_MAX_MESSAGE_SIZE_BYTES, String.valueOf(maxMessageSize)); createJoynrMqttClient(); byte[] shortSerializedMessage = new byte[maxMessageSize]; joynrMqttClientPublishAndVerifyReceivedMessage(shortSerializedMessage); byte[] largeSerializedMessage = new byte[maxMessageSize + 1]; thrown.expect(JoynrMessageNotSentException.class); thrown.expectMessage("MQTT Publish failed: maximum allowed message size of " + maxMessageSize + " bytes exceeded, actual size is " + largeSerializedMessage.length + " bytes"); joynrMqttClient.publishMessage(ownTopic.getTopic(), largeSerializedMessage); } private void mqttClientTestWithDisabledMessageSizeCheck(boolean isSecureConnection) throws Exception { final int initialMessageSize = 100; properties.put(MqttModule.PROPERTY_KEY_MQTT_MAX_MESSAGE_SIZE_BYTES, "0"); createJoynrMqttClient(isSecureConnection); byte[] shortSerializedMessage = new byte[initialMessageSize]; joynrMqttClientPublishAndVerifyReceivedMessage(shortSerializedMessage); byte[] largeSerializedMessage = new byte[initialMessageSize + 1]; joynrMqttClientPublishAndVerifyReceivedMessage(largeSerializedMessage); } @Test public void mqttClientTestWithDisabledMessageSizeCheckWithoutTls() throws Exception { final boolean isSecureConnection = false; mqttClientTestWithDisabledMessageSizeCheck(isSecureConnection); } private void mqttClientTestWithCredentials(boolean expectException) throws Exception { final boolean isSecureConnection = false; if (expectException) { thrown.expect(JoynrIllegalStateException.class); } mqttClientTestWithDisabledMessageSizeCheck(isSecureConnection); } @Test public void mqttClientTestWithWrongUserAndSomePassword() throws Exception { boolean expectException = true; properties.put(MqttModule.PROPERTY_KEY_MQTT_USERNAME, "wronguser"); properties.put(MqttModule.PROPERTY_KEY_MQTT_PASSWORD, joynrPassword); mqttClientTestWithCredentials(expectException); } @Test public void mqttClientTestWithCorrectUserButWrongPassword() throws Exception { boolean expectException = true; properties.put(MqttModule.PROPERTY_KEY_MQTT_USERNAME, joynrUser); properties.put(MqttModule.PROPERTY_KEY_MQTT_PASSWORD, "wrongpassword"); mqttClientTestWithCredentials(expectException); } @Test public void mqttClientTestWithCorrectUserAndCorrectPassword() throws Exception { boolean expectException = false; properties.put(MqttModule.PROPERTY_KEY_MQTT_USERNAME, joynrUser); properties.put(MqttModule.PROPERTY_KEY_MQTT_PASSWORD, joynrPassword); mqttClientTestWithCredentials(expectException); } @Test public void mqttClientTestWithEmptyUser() throws Exception { final boolean isSecureConnection = false; properties.put(MqttModule.PROPERTY_KEY_MQTT_USERNAME, ""); mqttClientTestWithDisabledMessageSizeCheck(isSecureConnection); } @Test public void mqttClientTestWithCorrectUserButEmptyPassword() throws Exception { final boolean isSecureConnection = false; properties.put(MqttModule.PROPERTY_KEY_MQTT_USERNAME, joynrUser); properties.put(MqttModule.PROPERTY_KEY_MQTT_PASSWORD, ""); thrown.expect(JoynrIllegalStateException.class); mqttClientTestWithDisabledMessageSizeCheck(isSecureConnection); } @Test public void mqttClientTestWithDisabledMessageSizeCheckWithTlsAndDefaultJksStore() throws Exception { final String keyStorePath = getResourcePath("clientkeystore.jks"); final String trustStorePath = getResourcePath("catruststore.jks"); properties.put(MqttModule.PROPERTY_KEY_MQTT_KEYSTORE_PATH, keyStorePath); properties.put(MqttModule.PROPERTY_KEY_MQTT_TRUSTSTORE_PATH, trustStorePath); properties.put(MqttModule.PROPERTY_KEY_MQTT_KEYSTORE_PWD, KEYSTORE_PASSWORD); properties.put(MqttModule.PROPERTY_KEY_MQTT_TRUSTSTORE_PWD, KEYSTORE_PASSWORD); final boolean isSecureConnection = true; mqttClientTestWithDisabledMessageSizeCheck(isSecureConnection); } @Test public void mqttClientTestWithDisabledMessageSizeCheckWithTlsAndP12Store() throws Exception { final String keyStorePath = getResourcePath("clientkeystore.p12"); final String trustStorePath = getResourcePath("catruststore.p12"); properties.put(MqttModule.PROPERTY_KEY_MQTT_KEYSTORE_PATH, keyStorePath); properties.put(MqttModule.PROPERTY_KEY_MQTT_TRUSTSTORE_PATH, trustStorePath); properties.put(MqttModule.PROPERTY_KEY_MQTT_KEYSTORE_TYPE, "PKCS12"); properties.put(MqttModule.PROPERTY_KEY_MQTT_TRUSTSTORE_TYPE, "PKCS12"); properties.put(MqttModule.PROPERTY_KEY_MQTT_KEYSTORE_PWD, KEYSTORE_PASSWORD); properties.put(MqttModule.PROPERTY_KEY_MQTT_TRUSTSTORE_PWD, KEYSTORE_PASSWORD); final boolean isSecureConnection = true; mqttClientTestWithDisabledMessageSizeCheck(isSecureConnection); } private void testCreateMqttClientFailsWithJoynrIllegalArgumentException() { final boolean isSecureConnection = true; try { createJoynrMqttClient(isSecureConnection); fail("Expected JoynrIllegalStateException"); } catch (JoynrIllegalStateException e) { // expected behaviour } } @Test public void mqttClientTLSCreationFailsIfKeystorePasswordIsWrongOrMissing() throws URISyntaxException { final String wrongPassword = "wrongPassword"; final String keyStorePath = getResourcePath("clientkeystore.jks"); final String trustStorePath = getResourcePath("catruststore.jks"); properties.put(MqttModule.PROPERTY_KEY_MQTT_KEYSTORE_PATH, keyStorePath); properties.put(MqttModule.PROPERTY_KEY_MQTT_TRUSTSTORE_PATH, trustStorePath); // test missing keystore password properties.remove(MqttModule.PROPERTY_KEY_MQTT_KEYSTORE_PWD); properties.put(MqttModule.PROPERTY_KEY_MQTT_TRUSTSTORE_PWD, KEYSTORE_PASSWORD); testCreateMqttClientFailsWithJoynrIllegalArgumentException(); // test wrong keystore password properties.put(MqttModule.PROPERTY_KEY_MQTT_KEYSTORE_PWD, wrongPassword); testCreateMqttClientFailsWithJoynrIllegalArgumentException(); } @Test public void mqttClientTLSCreationFailsIfTrustorePasswordIsWrongOrMissing() throws URISyntaxException { final String wrongPassword = "wrongPassword"; final String keyStorePath = getResourcePath("clientkeystore.jks"); final String trustStorePath = getResourcePath("catruststore.jks"); properties.put(MqttModule.PROPERTY_KEY_MQTT_KEYSTORE_PATH, keyStorePath); properties.put(MqttModule.PROPERTY_KEY_MQTT_TRUSTSTORE_PATH, trustStorePath); // test missing truststore password properties.put(MqttModule.PROPERTY_KEY_MQTT_KEYSTORE_PWD, KEYSTORE_PASSWORD); properties.remove(MqttModule.PROPERTY_KEY_MQTT_TRUSTSTORE_PWD); testCreateMqttClientFailsWithJoynrIllegalArgumentException(); // test wrong truststore password properties.put(MqttModule.PROPERTY_KEY_MQTT_TRUSTSTORE_PWD, wrongPassword); testCreateMqttClientFailsWithJoynrIllegalArgumentException(); } @Test public void mqttClientTLSCreationFailsIfKeystorePathIsWrongOrMissing() throws URISyntaxException { final String wrongKeyStorePath = getResourcePath("clientkeystore.jks") + "42"; final String trustStorePath = getResourcePath("catruststore.jks"); properties.put(MqttModule.PROPERTY_KEY_MQTT_TRUSTSTORE_PATH, trustStorePath); properties.put(MqttModule.PROPERTY_KEY_MQTT_KEYSTORE_PWD, KEYSTORE_PASSWORD); properties.put(MqttModule.PROPERTY_KEY_MQTT_TRUSTSTORE_PWD, KEYSTORE_PASSWORD); // test missing keystore path properties.remove(MqttModule.PROPERTY_KEY_MQTT_KEYSTORE_PATH); testCreateMqttClientFailsWithJoynrIllegalArgumentException(); // test wrong keystore path properties.put(MqttModule.PROPERTY_KEY_MQTT_KEYSTORE_PATH, wrongKeyStorePath); testCreateMqttClientFailsWithJoynrIllegalArgumentException(); } @Test public void mqttClientTLSCreationFailsIfTrustorePathIsWrongOrMissing() throws URISyntaxException { final String wrongTrustStorePath = getResourcePath("catruststore.jks") + "42"; final String keyStorePath = getResourcePath("clientkeystore.jks"); properties.put(MqttModule.PROPERTY_KEY_MQTT_KEYSTORE_PATH, keyStorePath); properties.put(MqttModule.PROPERTY_KEY_MQTT_KEYSTORE_PWD, KEYSTORE_PASSWORD); properties.put(MqttModule.PROPERTY_KEY_MQTT_TRUSTSTORE_PWD, KEYSTORE_PASSWORD); // test missing truststore path properties.remove(MqttModule.PROPERTY_KEY_MQTT_TRUSTSTORE_PATH); testCreateMqttClientFailsWithJoynrIllegalArgumentException(); // test wrong truststore path properties.put(MqttModule.PROPERTY_KEY_MQTT_TRUSTSTORE_PATH, wrongTrustStorePath); testCreateMqttClientFailsWithJoynrIllegalArgumentException(); } @Test public void mqttClientTestWithDisabledCleanSession() throws Exception { properties.put(MqttModule.PROPERTY_MQTT_CLEAN_SESSION, "false"); String topic = "otherTopic"; // create a MqttClient which was subscribed on the topic and shut it down. joynrMqttClient = createMqttClientWithoutSubscription(); joynrMqttClient.subscribe(topic); joynrMqttClient.shutdown(); // use another MqttClient to publish a message for the first topic joynrMqttClient = createMqttClientWithoutSubscription(); joynrMqttClient.publishMessage(topic, serializedMessage); Thread.sleep(100); joynrMqttClient.shutdown(); // create a MqttClient and subscribe to the same topic as the first one // MqttClient will receive message if cleanSession is disabled joynrMqttClient = createMqttClientWithoutSubscription(); joynrMqttClient.subscribe(topic); Thread.sleep(100); verify(mockReceiver, atLeast(1)).transmit(eq(serializedMessage), any(FailureAction.class)); } @Test public void mqttClientTestWithEnabledCleanSession() throws Exception { properties.put(MqttModule.PROPERTY_MQTT_CLEAN_SESSION, "true"); String topic = "otherTopic1"; // create a MqttClient which was subscribed on the topic and shut it down. joynrMqttClient = createMqttClientWithoutSubscription(); joynrMqttClient.subscribe(topic); joynrMqttClient.shutdown(); // use another MqttClient to publish a message for the first topic joynrMqttClient = createMqttClientWithoutSubscription(); joynrMqttClient.publishMessage(topic, serializedMessage); Thread.sleep(100); joynrMqttClient.shutdown(); // create a MqttClient and subscribe to the same topic as the first one // MqttClient will receive message if cleanSession is disabled joynrMqttClient = createMqttClientWithoutSubscription(); joynrMqttClient.subscribe(topic); Thread.sleep(100); verify(mockReceiver, times(0)).transmit(eq(serializedMessage), any(FailureAction.class)); } @Test public void mqttClientTestResubscriptionWithCleanRestartEnabled() throws Exception { properties.put(MqttModule.PROPERTY_KEY_MQTT_BROKER_URI, "tcp://localhost:" + mqttBrokerPort); injector = Guice.createInjector(new MqttPahoModule(), new JoynrPropertiesModule(properties), new AbstractModule() { @Override protected void configure() { bind(MessageRouter.class).toInstance(mockMessageRouter); bind(ScheduledExecutorService.class).annotatedWith(Names.named(MessageRouter.SCHEDULEDTHREADPOOL)) .toInstance(Executors.newScheduledThreadPool(10)); bind(RawMessagingPreprocessor.class).to(NoOpRawMessagingPreprocessor.class); Multibinder.newSetBinder(binder(), new TypeLiteral<JoynrMessageProcessor>() { }); } }); ownTopic = injector.getInstance((Key.get(MqttAddress.class, Names.named(MqttModule.PROPERTY_MQTT_GLOBAL_ADDRESS)))); ScheduledExecutorService scheduledExecutorService = Executors.newScheduledThreadPool(10); MqttClientIdProvider mqttClientIdProvider = injector.getInstance(MqttClientIdProvider.class); String clientId = mqttClientIdProvider.getClientId(); String brokerUri = "tcp://localhost:" + mqttBrokerPort; int reconnectSleepMs = 100; int keepAliveTimerSec = 60; int connectionTimeoutSec = 60; int timeToWaitMs = -1; int maxMsgsInflight = 100; int maxMsgSizeBytes = 0; boolean cleanSession = true; final boolean isReceiver = true; final boolean separateConnections = false; String username = null; String password = null; joynrMqttClient = new MqttPahoClient(new MqttAddress(brokerUri, "sometopic"), clientId, scheduledExecutorService, reconnectSleepMs, keepAliveTimerSec, connectionTimeoutSec, timeToWaitMs, maxMsgsInflight, maxMsgSizeBytes, cleanSession, isReceiver, separateConnections, "", "", "", "", "", "", username, password, mock(MqttStatusReceiver.class)); joynrMqttClient.start(); joynrMqttClient.setMessageListener(mockReceiver); joynrMqttClient.subscribe(ownTopic.getTopic()); // manually call disconnect and connectionLost MqttPahoClient mqttPahoClient = (MqttPahoClient) joynrMqttClient; mqttPahoClient.getMqttClient().disconnect(500); MqttException exception = new MqttException(MqttException.REASON_CODE_CLIENT_TIMEOUT); mqttPahoClient.connectionLost(exception); joynrMqttClientPublishAndVerifyReceivedMessage(serializedMessage); } // This test was disabled, because it runs perfectly on a local machine but not in the CI. // Further investigations are required to stabilize this test. @Test @Ignore public void testClientNotifiesStatusReceiverAboutBrokerDisconnect() throws Exception { final MqttStatusReceiver mqttStatusReceiver = mock(MqttStatusReceiver.class); @SuppressWarnings("unused") final JoynrMqttClient mqttClient = createMqttClientWithoutSubscription(false, mqttStatusReceiver); verify(mqttStatusReceiver).notifyConnectionStatusChanged(MqttStatusReceiver.ConnectionStatus.CONNECTED); stopBroker(); Thread.sleep(1000); verify(mqttStatusReceiver).notifyConnectionStatusChanged(MqttStatusReceiver.ConnectionStatus.NOT_CONNECTED); startBroker(); Thread.sleep(2000); verify(mqttStatusReceiver, times(2)).notifyConnectionStatusChanged(MqttStatusReceiver.ConnectionStatus.CONNECTED); } @Test public void testClientNotifiesStatusReceiverAboutShutdownDisconnect() throws Exception { final MqttStatusReceiver mqttStatusReceiver = mock(MqttStatusReceiver.class); final JoynrMqttClient mqttClient = createMqttClientWithoutSubscription(false, mqttStatusReceiver); verify(mqttStatusReceiver).notifyConnectionStatusChanged(MqttStatusReceiver.ConnectionStatus.CONNECTED); mqttClient.shutdown(); verify(mqttStatusReceiver).notifyConnectionStatusChanged(MqttStatusReceiver.ConnectionStatus.NOT_CONNECTED); } @Test public void mqttClientTestShutdownIfDisconnectFromMQTT() throws Exception { properties.put(MqttModule.PROPERTY_KEY_MQTT_BROKER_URI, "tcp://localhost:1111"); properties.put(MqttModule.PROPERTY_KEY_MQTT_RECONNECT_SLEEP_MS, "100"); // create and start client final JoynrMqttClient client = createMqttClientInternal(mock(MqttStatusReceiver.class)); final Semaphore semaphoreBeforeStartMethod = new Semaphore(0); final Semaphore semaphoreAfterStartMethod = new Semaphore(0); final int timeout = 500; Runnable myRunnable = new Runnable() { @Override public void run() { semaphoreBeforeStartMethod.release(); client.start(); semaphoreAfterStartMethod.release(); } }; new Thread(myRunnable).start(); assertTrue(semaphoreBeforeStartMethod.tryAcquire(timeout, TimeUnit.MILLISECONDS)); // sleep in order to increase the probability of the runnable // to be in the sleep part of the start method Thread.sleep(timeout); // At this point the semaphoreAfterStartMethod is supposed to be not released // because we expect to be still in start() assertFalse(semaphoreAfterStartMethod.tryAcquire()); client.shutdown(); assertTrue(semaphoreAfterStartMethod.tryAcquire(timeout, TimeUnit.MILLISECONDS)); } }
[Java] Fixed MqttPahoClientTest * Provide correct username and password for login to mosquitto broker
java/messaging/mqtt/joynr-mqtt-client/src/test/java/io/joynr/messaging/mqtt/paho/client/MqttPahoClientTest.java
[Java] Fixed MqttPahoClientTest
Java
apache-2.0
826aeefba7740f625e342e34f4b820aace2458b2
0
wyona/yanel,wyona/yanel,baszero/yanel,baszero/yanel,baszero/yanel,wyona/yanel,wyona/yanel,baszero/yanel,wyona/yanel,wyona/yanel,baszero/yanel,baszero/yanel
/* * Copyright 2006 Wyona */ package org.wyona.yanel.impl.resources.navigation.lookup; import javax.xml.transform.Transformer; import org.w3c.dom.Document; import org.wyona.yanel.core.map.RealmConfigPathResolver; import org.wyona.yanel.core.navigation.Node; import org.wyona.yanel.core.navigation.Sitetree; import org.wyona.yanel.core.source.ResourceResolver; import org.wyona.yanel.core.source.SourceResolver; import org.wyona.yanel.impl.resources.BasicXMLResource; import org.wyona.yanel.impl.resources.usecase.ExecutableUsecaseResource; import org.apache.avalon.framework.configuration.Configuration; import org.apache.avalon.framework.configuration.ConfigurationException; import org.apache.avalon.framework.configuration.ConfigurationUtil; import org.apache.log4j.Logger; /** * */ public class LookupResource extends ExecutableUsecaseResource { private static Logger log = Logger.getLogger(LookupResource.class); private static final String REQUEST_PARAMETER_TYPE = "type"; private Sitetree sitetree; /** * Get sitetree as XML */ public String getSitetreeAsXML() throws Exception { String name4pathParameter = "path"; if (getResourceConfigProperty("name4path-parameter") != null) { name4pathParameter = getResourceConfigProperty("name4path-parameter"); } StringBuilder sb = new StringBuilder("<sitetree>"); if (getEnvironment().getRequest().getParameter(name4pathParameter) != null) { sb.append(getNodeAsXML(request.getParameter(name4pathParameter))); } else { sb.append(getNodeAsXML("/")); } sb.append("</sitetree>"); return sb.toString(); } /** * Get node as XML */ private String getNodeAsXML(String path) throws Exception { String collectionsOnly = "false"; if (getResourceConfigProperty("show-collections-only") != null) { collectionsOnly = getResourceConfigProperty("show-collections-only"); } if (getEnvironment().getRequest().getParameter("show-collections-only") != null) { collectionsOnly = getEnvironment().getRequest().getParameter("show-collections-only"); } Sitetree sitetree = getSitetree(); Node node = sitetree.getNode(getRealm(), path); StringBuilder sb = new StringBuilder(); if (node != null) { if (node.isCollection()) { Node[] children = node.getChildren(); for (int i = 0; i < children.length; i++) { String childPath = path + "/" + children[i].getName(); if (path.equals("/")) { childPath = path + children[i].getName(); } String nodeName = children[i].getName(); if (children[i].isCollection()) { if(Boolean.parseBoolean(collectionsOnly)) { sb.append("<collection path=\"" + childPath + "\" name=\"" + children[i].getName() + "\">"); // TODO: ... sb.append("<label><![CDATA[" +children[i].getName() + "]]></label>"); sb.append("</collection>"); } else if (filterMatch(nodeName)) { sb.append("<collection path=\"" + childPath + "\" name=\"" + children[i].getName() + "\">"); // TODO: ... sb.append("<label><![CDATA[" +children[i].getName() + "]]></label>"); sb.append("</collection>"); } } else if (children[i].isResource() && !Boolean.parseBoolean(collectionsOnly)) { if (filterMatch(nodeName)) { sb.append("<resource path=\"" + childPath + "\" name=\"" + nodeName + "\">"); sb.append("<label><![CDATA[" + nodeName + "]]></label>"); sb.append("</resource>"); } } else { sb.append("<neither-resource-nor-collection path=\"" + childPath + "\" name=\"" + children[i].getName() + "\"/>"); } } } else if (!Boolean.parseBoolean(collectionsOnly)) { String nodeName = node.getName(); if (filterMatch(nodeName)) { sb.append("<resource path=\"" + path + "\" name=\"" + nodeName + "\">"); // TODO ... sb.append("<label><![CDATA[" + nodeName + "]]></label>"); sb.append("</resource>"); } } } else { String errorMessage = "node is null for path: " + path; sb.append("<exception>" + errorMessage + "</exception>"); log.error(errorMessage); } return sb.toString(); } private Sitetree getSitetree() { if (sitetree == null) { try { Document customConfigDoc = getConfiguration().getCustomConfiguration(); Configuration config = ConfigurationUtil.toConfiguration(customConfigDoc.getDocumentElement()); Configuration sitetreeConfig = config.getChild("sitetree", false); String sitetreeImplClassName = sitetreeConfig.getAttribute("class"); sitetree = (Sitetree) Class.forName(sitetreeImplClassName).newInstance(); sitetree.init(org.wyona.yanel.core.util.ConfigurationUtil.getCustomConfiguration(sitetreeConfig, "sitetree-config", "http://www.wyona.org/yanel/realm/1.0"), new SourceResolver(this)); } catch (Exception e) { log.info("Sitree is not configured, falling back to realm-repo-navigation"); log.info(e.getMessage(), e); sitetree = getRealm().getRepoNavigation(); } } return sitetree; } /** * filters allows to show only certain file-types within the lookup. * the filters are configured by resource-type-property and request-parameter. * define some filter-patterns in the rc by adding some resource-config-properties named filter-pattern-$TYPE. Type will be * used as group-id. when requested with the request parameter type=$TYPE it will apply all the filters named filter-pattern-$TYPE. * * e.g. &lt;yanel:property name="filter-pattern-image" value=".*[.]gif"/> * @return String[] with filter-patterns. */ private String[] getFilters() { try { String type = getParameterAsString(REQUEST_PARAMETER_TYPE); return getResourceConfigProperties("filter-pattern-" + type); } catch (Exception e) { log.error("Could not get filter: " + e.getMessage(),e); return null; } } /** * @return */ private boolean filterMatch(String nodeName) { String[] filters = getFilters(); if (filters != null && filters.length > 0) { for (int i = 0; i < filters.length; i++) { if (nodeName.matches(filters[i])) { return true; } } } else { return true; } return false; } }
src/contributions/resources/lookup/src/java/org/wyona/yanel/impl/resources/navigation/lookup/LookupResource.java
/* * Copyright 2006 Wyona */ package org.wyona.yanel.impl.resources.navigation; import javax.xml.transform.Transformer; import org.w3c.dom.Document; import org.wyona.yanel.core.map.RealmConfigPathResolver; import org.wyona.yanel.core.navigation.Node; import org.wyona.yanel.core.navigation.Sitetree; import org.wyona.yanel.core.source.ResourceResolver; import org.wyona.yanel.core.source.SourceResolver; import org.wyona.yanel.impl.resources.BasicXMLResource; import org.wyona.yanel.impl.resources.usecase.ExecutableUsecaseResource; import org.apache.avalon.framework.configuration.Configuration; import org.apache.avalon.framework.configuration.ConfigurationException; import org.apache.avalon.framework.configuration.ConfigurationUtil; import org.apache.log4j.Logger; /** * */ public class LookupResource extends ExecutableUsecaseResource { private static Logger log = Logger.getLogger(LookupResource.class); private static final String REQUEST_PARAMETER_TYPE = "type"; private Sitetree sitetree; /** * Get sitetree as XML */ public String getSitetreeAsXML() throws Exception { String name4pathParameter = "path"; if (getResourceConfigProperty("name4path-parameter") != null) { name4pathParameter = getResourceConfigProperty("name4path-parameter"); } StringBuilder sb = new StringBuilder("<sitetree>"); if (getEnvironment().getRequest().getParameter(name4pathParameter) != null) { sb.append(getNodeAsXML(request.getParameter(name4pathParameter))); } else { sb.append(getNodeAsXML("/")); } sb.append("</sitetree>"); return sb.toString(); } /** * Get node as XML */ private String getNodeAsXML(String path) throws Exception { String collectionsOnly = "false"; if (getResourceConfigProperty("show-collections-only") != null) { collectionsOnly = getResourceConfigProperty("show-collections-only"); } if (getEnvironment().getRequest().getParameter("show-collections-only") != null) { collectionsOnly = getEnvironment().getRequest().getParameter("show-collections-only"); } Sitetree sitetree = getSitetree(); Node node = sitetree.getNode(getRealm(), path); StringBuilder sb = new StringBuilder(); if (node != null) { if (node.isCollection()) { Node[] children = node.getChildren(); for (int i = 0; i < children.length; i++) { String childPath = path + "/" + children[i].getName(); if (path.equals("/")) { childPath = path + children[i].getName(); } String nodeName = children[i].getName(); if (children[i].isCollection()) { if(Boolean.parseBoolean(collectionsOnly)) { sb.append("<collection path=\"" + childPath + "\" name=\"" + children[i].getName() + "\">"); // TODO: ... sb.append("<label><![CDATA[" +children[i].getName() + "]]></label>"); sb.append("</collection>"); } else if (filterMatch(nodeName)) { sb.append("<collection path=\"" + childPath + "\" name=\"" + children[i].getName() + "\">"); // TODO: ... sb.append("<label><![CDATA[" +children[i].getName() + "]]></label>"); sb.append("</collection>"); } } else if (children[i].isResource() && !Boolean.parseBoolean(collectionsOnly)) { if (filterMatch(nodeName)) { sb.append("<resource path=\"" + childPath + "\" name=\"" + nodeName + "\">"); sb.append("<label><![CDATA[" + nodeName + "]]></label>"); sb.append("</resource>"); } } else { sb.append("<neither-resource-nor-collection path=\"" + childPath + "\" name=\"" + children[i].getName() + "\"/>"); } } } else if (!Boolean.parseBoolean(collectionsOnly)) { String nodeName = node.getName(); if (filterMatch(nodeName)) { sb.append("<resource path=\"" + path + "\" name=\"" + nodeName + "\">"); // TODO ... sb.append("<label><![CDATA[" + nodeName + "]]></label>"); sb.append("</resource>"); } } } else { String errorMessage = "node is null for path: " + path; sb.append("<exception>" + errorMessage + "</exception>"); log.error(errorMessage); } return sb.toString(); } private Sitetree getSitetree() { if (sitetree == null) { try { Document customConfigDoc = getConfiguration().getCustomConfiguration(); Configuration config = ConfigurationUtil.toConfiguration(customConfigDoc.getDocumentElement()); Configuration sitetreeConfig = config.getChild("sitetree", false); String sitetreeImplClassName = sitetreeConfig.getAttribute("class"); sitetree = (Sitetree) Class.forName(sitetreeImplClassName).newInstance(); sitetree.init(org.wyona.yanel.core.util.ConfigurationUtil.getCustomConfiguration(sitetreeConfig, "sitetree-config", "http://www.wyona.org/yanel/realm/1.0"), new SourceResolver(this)); } catch (Exception e) { log.info("Sitree is not configured, falling back to realm-repo-navigation"); log.info(e.getMessage(), e); sitetree = getRealm().getRepoNavigation(); } } return sitetree; } /** * filters allows to show only certain file-types within the lookup. * the filters are configured by resource-type-property and request-parameter. * define some filter-patterns in the rc by adding some resource-config-properties named filter-pattern-$TYPE. Type will be * used as group-id. when requested with the request parameter type=$TYPE it will apply all the filters named filter-pattern-$TYPE. * * e.g. &lt;yanel:property name="filter-pattern-image" value=".*[.]gif"/> * @return String[] with filter-patterns. */ private String[] getFilters() { try { String type = getParameterAsString(REQUEST_PARAMETER_TYPE); return getResourceConfigProperties("filter-pattern-" + type); } catch (Exception e) { log.error("Could not get filter: " + e.getMessage(),e); return null; } } /** * @return */ private boolean filterMatch(String nodeName) { String[] filters = getFilters(); if (filters != null && filters.length > 0) { for (int i = 0; i < filters.length; i++) { if (nodeName.matches(filters[i])) { return true; } } } else { return true; } return false; } }
package name fixed
src/contributions/resources/lookup/src/java/org/wyona/yanel/impl/resources/navigation/lookup/LookupResource.java
package name fixed
Java
apache-2.0
6277a9b3ee77882a44158b603cf61854aaa2fd53
0
google-code-export/nutz,google-code-export/nutz,google-code-export/nutz
package org.nutz.dao; import java.sql.Connection; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Statement; import java.sql.Types; import javax.sql.DataSource; import org.nutz.lang.Lang; import org.nutz.trans.Trans; import org.nutz.trans.Transaction; /** * Dao 的帮助函数,基本上,你不会用到这个类 * * @author zozoh([email protected]) */ public class Daos { /** * 获取连接 * * @param dataSource * 数据源 * @return 连接持有者 */ public static ConnectionHolder getConnection(DataSource dataSource) { try { Transaction trans = Trans.get(); Connection conn = null; if (trans != null) conn = trans.getConnection(dataSource); else conn = dataSource.getConnection(); return ConnectionHolder.make(trans, conn); } catch (SQLException e) { throw Lang.makeThrow("Could not get JDBC Connection : %s", e.getMessage()); } } /** * 释放连接 * * @param ch * 连接持有者 */ public static void releaseConnection(ConnectionHolder ch) { try { ch.close(); } catch (Throwable e) { throw Lang.wrapThrow(e); } } public static int getColumnIndex(ResultSetMetaData meta, String colName) throws SQLException { int ci = 0; if (null != meta) { int columnCount = meta.getColumnCount(); for (int i = 1; i <= columnCount; i++) if (meta.getColumnName(i).equalsIgnoreCase(colName)) { ci = i; break; } if (ci == 0) throw Lang.makeThrow(SQLException.class, "Can not find @Column(%s)", colName); } return ci; } public static boolean isIntLikeColumn(ResultSetMetaData meta, String colName) throws SQLException { return isIntLikeColumn(meta, getColumnIndex(meta, colName)); } public static boolean isIntLikeColumn(ResultSetMetaData meta, int index) throws SQLException { boolean isIntLike = false; int colType = meta.getColumnType(index); switch (colType) { case Types.BIGINT: case Types.INTEGER: case Types.SMALLINT: case Types.TINYINT: case Types.NUMERIC: isIntLike = true; } return isIntLike; } public static void safeClose(Statement stat, ResultSet rs) { safeClose(rs); safeClose(stat); } public static void safeClose(Statement stat) { if (null != stat) try { stat.close(); } catch (Throwable e) {} } public static void safeClose(ResultSet rs) { if (null != rs) try { rs.close(); } catch (Throwable e) {} } }
src/org/nutz/dao/Daos.java
package org.nutz.dao; import java.sql.Connection; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Statement; import java.sql.Types; import javax.sql.DataSource; import org.nutz.lang.Lang; import org.nutz.trans.Trans; import org.nutz.trans.Transaction; /** * Dao 的帮助函数,基本上,你不会用到这个类 * * @author zozoh([email protected]) */ public class Daos { /** * 获取连接 * * @param dataSource * 数据源 * @return 连接持有者 */ public static ConnectionHolder getConnection(DataSource dataSource) { try { Transaction trans = Trans.get(); Connection conn = null; if (trans != null) conn = trans.getConnection(dataSource); else conn = dataSource.getConnection(); return ConnectionHolder.make(trans, conn); } catch (SQLException e) { throw Lang.makeThrow("Could not get JDBC Connection : %s", e.getMessage()); } } /** * 释放连接 * * @param ch * 连接持有者 */ public static void releaseConnection(ConnectionHolder ch) { try { ch.close(); } catch (Throwable e) { throw Lang.wrapThrow(e); } } public static int getColumnIndex(ResultSetMetaData meta, String colName) throws SQLException { int ci = 0; if (null != meta) { for (int i = 1; i <= meta.getColumnCount(); i++) if (meta.getColumnName(i).equalsIgnoreCase(colName)) { ci = i; break; } if (ci == 0) throw Lang.makeThrow(SQLException.class, "Can not find @Column(%s)", colName); } return ci; } public static boolean isIntLikeColumn(ResultSetMetaData meta, String colName) throws SQLException { return isIntLikeColumn(meta, getColumnIndex(meta, colName)); } public static boolean isIntLikeColumn(ResultSetMetaData meta, int index) throws SQLException { boolean isIntLike = false; int colType = meta.getColumnType(index); switch (colType) { case Types.BIGINT: case Types.INTEGER: case Types.SMALLINT: case Types.TINYINT: case Types.NUMERIC: isIntLike = true; } return isIntLike; } public static void safeClose(Statement stat, ResultSet rs) { safeClose(rs); safeClose(stat); } public static void safeClose(Statement stat) { if (null != stat) try { stat.close(); } catch (Throwable e) {} } public static void safeClose(ResultSet rs) { if (null != rs) try { rs.close(); } catch (Throwable e) {} } }
columnCount 放到外部进行计算
src/org/nutz/dao/Daos.java
columnCount 放到外部进行计算
Java
apache-2.0
649b07fcc178f8c4432a8075d7bd403105646f49
0
ant4g0nist/binnavi,nihilus/binnavi,ispras/binnavi,chubbymaggie/binnavi,AmesianX/binnavi,noikiy/binnavi,ispras/binnavi,chubbymaggie/binnavi,ant4g0nist/binnavi,firebitsbr/binnavi,chubbymaggie/binnavi,mayl8822/binnavi,mayl8822/binnavi,AmesianX/binnavi,firebitsbr/binnavi,firebitsbr/binnavi,firebitsbr/binnavi,ant4g0nist/binnavi,google/binnavi,AmesianX/binnavi,mayl8822/binnavi,ant4g0nist/binnavi,google/binnavi,hoangcuongflp/binnavi,nihilus/binnavi,ant4g0nist/binnavi,mayl8822/binnavi,google/binnavi,noikiy/binnavi,hoangcuongflp/binnavi,AmesianX/binnavi,nihilus/binnavi,firebitsbr/binnavi,hoangcuongflp/binnavi,noikiy/binnavi,nihilus/binnavi,noikiy/binnavi,google/binnavi,ispras/binnavi,chubbymaggie/binnavi,AmesianX/binnavi,google/binnavi,chubbymaggie/binnavi,mayl8822/binnavi,google/binnavi,hoangcuongflp/binnavi,hoangcuongflp/binnavi,nihilus/binnavi,ispras/binnavi,ispras/binnavi,chubbymaggie/binnavi,noikiy/binnavi,ispras/binnavi
/* Copyright 2015 Google Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.google.security.zynamics.binnavi.Database.PostgreSQL.Functions; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import com.google.common.base.Preconditions; import com.google.security.zynamics.binnavi.Database.AbstractSQLProvider; import com.google.security.zynamics.binnavi.Database.CConnection; import com.google.security.zynamics.binnavi.Database.CTableNames; import com.google.security.zynamics.binnavi.Database.Exceptions.CouldntDeleteException; import com.google.security.zynamics.binnavi.Database.Exceptions.CouldntLoadDataException; import com.google.security.zynamics.binnavi.Database.Exceptions.CouldntSaveDataException; import com.google.security.zynamics.binnavi.Database.PostgreSQL.PostgreSQLHelpers; import com.google.security.zynamics.binnavi.Log.NaviLogger; import com.google.security.zynamics.binnavi.debug.debugger.DebuggerTemplate; import com.google.security.zynamics.binnavi.debug.debugger.DebuggerTemplateManager; import com.google.security.zynamics.zylib.net.NetHelpers; /** * This class contains PostgreSQL queries for working with debuggers. */ public final class PostgresSQLDebuggerFunctions { /** * Do not instantiate this class. */ private PostgresSQLDebuggerFunctions() { // You are not supposed to instantiate this class } /** * Creates a new debugger template in the database. * * @param provider SQL provider of the new debugger template. * @param name Name of the new debugger template. This argument must be non-empty. * @param host Host of the new debugger template. This argument must be non-empty. * @param port Port of the new debugger template. This argument must be a valid port number. * * @return The new debugger template. * * @throws CouldntSaveDataException Thrown if the new debugger template could not be written to * the database. */ public static DebuggerTemplate createDebuggerTemplate(final AbstractSQLProvider provider, final String name, final String host, final int port) throws CouldntSaveDataException { Preconditions.checkNotNull(name, "IE00417: Debugger names can not be null"); Preconditions.checkArgument(!name.isEmpty(), "IE00418: Debugger names can not be empty"); Preconditions.checkNotNull(host, "IE00419: Debugger host can not be null"); Preconditions.checkArgument(!host.isEmpty(), "IE00418: Debugger host can not be empty"); Preconditions.checkArgument((port > 0) && (port <= 65535), "IE00421: Debugger port is out of bounds"); NaviLogger.info("Creating new debugger %s (%s:%d)", name, host, port); final CConnection connection = provider.getConnection(); final String query = "INSERT INTO " + CTableNames.DEBUGGERS_TABLE + "(name, host, port) VALUES(?, ?, ?) RETURNING id"; try (PreparedStatement statement = connection.getConnection().prepareStatement(query); ResultSet resultSet = statement.executeQuery()) { statement.setString(1, name); statement.setString(2, host); statement.setInt(3, port); int id = -1; while (resultSet.next()) { id = resultSet.getInt("id"); } return new DebuggerTemplate(id, name, host, port, provider); } catch (final SQLException e) { throw new CouldntSaveDataException(e); } } /** * Deletes a debugger template from the database. * * The given debugger template must be stored in the database connected to by the provider * argument. * * @param provider The connection to the database. * @param debugger The debugger template to delete. * * @throws CouldntDeleteException Thrown if the debugger template could not be deleted. */ public static void deleteDebugger(final AbstractSQLProvider provider, final DebuggerTemplate debugger) throws CouldntDeleteException { Preconditions.checkNotNull(debugger, "IE00709: Debugger template argument can not be null"); Preconditions.checkArgument(debugger.inSameDatabase(provider), "IE00710: Debugger template is not part of this database"); NaviLogger.info("Deleting debugger %d", debugger.getId()); PostgreSQLHelpers.deleteById(provider.getConnection(), CTableNames.DEBUGGERS_TABLE, debugger.getId()); } /** * Loads all debugger templates of a database. * * The debugger template manager must belong to the database connected to by the provider * argument. * * @param provider The connection to the database. * @param manager Debugger template manager where the loaded debuggers are added to. * * @throws CouldntLoadDataException Thrown if the debugger templates could not be loaded. */ public static void loadDebuggers(final AbstractSQLProvider provider, final DebuggerTemplateManager manager) throws CouldntLoadDataException { final CConnection connection = provider.getConnection(); final String query = "SELECT * FROM " + CTableNames.DEBUGGERS_TABLE; try (ResultSet resultSet = connection.executeQuery(query, true)) { while (resultSet.next()) { final DebuggerTemplate debugger = new DebuggerTemplate(resultSet.getInt("id"), PostgreSQLHelpers.readString(resultSet, "name"), PostgreSQLHelpers.readString(resultSet, "host"), resultSet.getInt("port"), provider); manager.addDebugger(debugger); } } catch (final SQLException e) { throw new CouldntLoadDataException(e); } } /** * Changes the host of an existing debugger template. * * The debugger must be stored in the database the provider argument is connected to. * * @param provider The connection to the database. * @param debugger The debugger whose host value is changed. * @param host The new host value of the debugger template. * * @throws CouldntSaveDataException Thrown if the host value could not be updated. */ public static void setHost(final AbstractSQLProvider provider, final DebuggerTemplate debugger, final String host) throws CouldntSaveDataException { Preconditions.checkNotNull(debugger, "IE00422: Debugger argument can not be null"); Preconditions.checkNotNull(host, "IE00423: Host argument can not be null"); Preconditions.checkArgument(debugger.inSameDatabase(provider), "IE00424: Debugger is not part of this database"); final String query = "UPDATE " + CTableNames.DEBUGGERS_TABLE + " SET host = ? WHERE id = ?"; try (PreparedStatement statement = provider.getConnection().getConnection().prepareStatement(query)) { statement.setString(1, host); statement.setInt(2, debugger.getId()); statement.executeUpdate(); } catch (final SQLException e) { throw new CouldntSaveDataException(e); } } /** * Changes the name of an existing debugger template. * * The debugger must be stored in the database the provider argument is connected to. * * @param provider The connection to the database. * @param debugger The debugger whose name value is changed. * @param name The new name value of the debugger template. * * @throws CouldntSaveDataException Thrown if the name value could not be updated. */ public static void setName(final AbstractSQLProvider provider, final DebuggerTemplate debugger, final String name) throws CouldntSaveDataException { Preconditions.checkNotNull(debugger, "IE00425: Debugger argument can not be null"); Preconditions.checkNotNull(name, "IE00426: Name argument can not be null"); Preconditions.checkArgument(debugger.inSameDatabase(provider), "IE00427: Debugger is not part of this database"); final String query = "UPDATE " + CTableNames.DEBUGGERS_TABLE + " SET name = ? WHERE id = ?"; try (PreparedStatement statement = provider.getConnection().getConnection().prepareStatement(query)) { statement.setString(1, name); statement.setInt(2, debugger.getId()); statement.executeUpdate(); } catch (final SQLException e) { throw new CouldntSaveDataException(e); } } /** * Changes the port of an existing debugger template. * * The debugger must be stored in the database the provider argument is connected to. * * @param provider The connection to the database. * @param debugger The debugger whose port value is changed. * @param port The new port value of the debugger template. This argument must be a valid port * number. * * @throws CouldntSaveDataException Thrown if the port value could not be updated. */ public static void setPort(final AbstractSQLProvider provider, final DebuggerTemplate debugger, final int port) throws CouldntSaveDataException { Preconditions.checkNotNull(debugger, "IE00428: Debugger argument can not be null"); Preconditions.checkArgument(NetHelpers.isValidPort(port), "IE00429: Invalid port argument"); Preconditions.checkArgument(debugger.inSameDatabase(provider), "IE00430: Debugger is not part of this database"); final String query = "UPDATE " + CTableNames.DEBUGGERS_TABLE + " SET port = ? WHERE id = ?"; try (PreparedStatement statement = provider.getConnection().getConnection().prepareStatement(query)) { statement.setInt(1, port); statement.setInt(2, debugger.getId()); statement.executeUpdate(); } catch (final SQLException e) { throw new CouldntSaveDataException(e); } } }
src/main/java/com/google/security/zynamics/binnavi/Database/PostgreSQL/Functions/PostgresSQLDebuggerFunctions.java
/* Copyright 2015 Google Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.google.security.zynamics.binnavi.Database.PostgreSQL.Functions; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import com.google.common.base.Preconditions; import com.google.security.zynamics.binnavi.Database.AbstractSQLProvider; import com.google.security.zynamics.binnavi.Database.CConnection; import com.google.security.zynamics.binnavi.Database.CTableNames; import com.google.security.zynamics.binnavi.Database.Exceptions.CouldntDeleteException; import com.google.security.zynamics.binnavi.Database.Exceptions.CouldntLoadDataException; import com.google.security.zynamics.binnavi.Database.Exceptions.CouldntSaveDataException; import com.google.security.zynamics.binnavi.Database.PostgreSQL.PostgreSQLHelpers; import com.google.security.zynamics.binnavi.Log.NaviLogger; import com.google.security.zynamics.binnavi.debug.debugger.DebuggerTemplate; import com.google.security.zynamics.binnavi.debug.debugger.DebuggerTemplateManager; import com.google.security.zynamics.zylib.net.NetHelpers; /** * This class contains PostgreSQL queries for working with debuggers. */ public final class PostgresSQLDebuggerFunctions { /** * Do not instantiate this class. */ private PostgresSQLDebuggerFunctions() { // You are not supposed to instantiate this class } /** * Creates a new debugger template in the database. * * @param provider SQL provider of the new debugger template. * @param name Name of the new debugger template. This argument must be non-empty. * @param host Host of the new debugger template. This argument must be non-empty. * @param port Port of the new debugger template. This argument must be a valid port number. * * @return The new debugger template. * * @throws CouldntSaveDataException Thrown if the new debugger template could not be written to * the database. */ public static DebuggerTemplate createDebuggerTemplate(final AbstractSQLProvider provider, final String name, final String host, final int port) throws CouldntSaveDataException { Preconditions.checkNotNull(name, "IE00417: Debugger names can not be null"); Preconditions.checkArgument(!name.isEmpty(), "IE00418: Debugger names can not be empty"); Preconditions.checkNotNull(host, "IE00419: Debugger host can not be null"); Preconditions.checkArgument(!host.isEmpty(), "IE00418: Debugger host can not be empty"); Preconditions.checkArgument((port > 0) && (port <= 65535), "IE00421: Debugger port is out of bounds"); NaviLogger.info("Creating new debugger %s (%s:%d)", name, host, port); final CConnection connection = provider.getConnection(); try { final String query = "INSERT INTO " + CTableNames.DEBUGGERS_TABLE + "(name, host, port) VALUES(?, ?, ?) RETURNING id"; final PreparedStatement statement = connection.getConnection().prepareStatement(query); statement.setString(1, name); statement.setString(2, host); statement.setInt(3, port); final ResultSet resultSet = statement.executeQuery(); int id = -1; try { while (resultSet.next()) { id = resultSet.getInt("id"); } } finally { resultSet.close(); statement.close(); } return new DebuggerTemplate(id, name, host, port, provider); } catch (final SQLException e) { throw new CouldntSaveDataException(e); } } /** * Deletes a debugger template from the database. * * The given debugger template must be stored in the database connected to by the provider * argument. * * @param provider The connection to the database. * @param debugger The debugger template to delete. * * @throws CouldntDeleteException Thrown if the debugger template could not be deleted. */ public static void deleteDebugger(final AbstractSQLProvider provider, final DebuggerTemplate debugger) throws CouldntDeleteException { Preconditions.checkNotNull(debugger, "IE00709: Debugger template argument can not be null"); Preconditions.checkArgument(debugger.inSameDatabase(provider), "IE00710: Debugger template is not part of this database"); NaviLogger.info("Deleting debugger %d", debugger.getId()); PostgreSQLHelpers.deleteById(provider.getConnection(), CTableNames.DEBUGGERS_TABLE, debugger.getId()); } /** * Loads all debugger templates of a database. * * The debugger template manager must belong to the database connected to by the provider * argument. * * @param provider The connection to the database. * @param manager Debugger template manager where the loaded debuggers are added to. * * @throws CouldntLoadDataException Thrown if the debugger templates could not be loaded. */ public static void loadDebuggers(final AbstractSQLProvider provider, final DebuggerTemplateManager manager) throws CouldntLoadDataException { final CConnection connection = provider.getConnection(); final String query = "SELECT * FROM " + CTableNames.DEBUGGERS_TABLE; try { final ResultSet resultSet = connection.executeQuery(query, true); try { while (resultSet.next()) { final DebuggerTemplate debugger = new DebuggerTemplate(resultSet.getInt("id"), PostgreSQLHelpers.readString(resultSet, "name"), PostgreSQLHelpers.readString(resultSet, "host"), resultSet.getInt("port"), provider); manager.addDebugger(debugger); } } finally { resultSet.close(); } } catch (final SQLException e) { throw new CouldntLoadDataException(e); } } /** * Changes the host of an existing debugger template. * * The debugger must be stored in the database the provider argument is connected to. * * @param provider The connection to the database. * @param debugger The debugger whose host value is changed. * @param host The new host value of the debugger template. * * @throws CouldntSaveDataException Thrown if the host value could not be updated. */ public static void setHost(final AbstractSQLProvider provider, final DebuggerTemplate debugger, final String host) throws CouldntSaveDataException { Preconditions.checkNotNull(debugger, "IE00422: Debugger argument can not be null"); Preconditions.checkNotNull(host, "IE00423: Host argument can not be null"); Preconditions.checkArgument(debugger.inSameDatabase(provider), "IE00424: Debugger is not part of this database"); final String query = "UPDATE " + CTableNames.DEBUGGERS_TABLE + " SET host = ? WHERE id = ?"; try { final PreparedStatement statement = provider.getConnection().getConnection().prepareStatement(query); try { statement.setString(1, host); statement.setInt(2, debugger.getId()); statement.executeUpdate(); } finally { statement.close(); } } catch (final SQLException e) { throw new CouldntSaveDataException(e); } } /** * Changes the name of an existing debugger template. * * The debugger must be stored in the database the provider argument is connected to. * * @param provider The connection to the database. * @param debugger The debugger whose name value is changed. * @param name The new name value of the debugger template. * * @throws CouldntSaveDataException Thrown if the name value could not be updated. */ public static void setName(final AbstractSQLProvider provider, final DebuggerTemplate debugger, final String name) throws CouldntSaveDataException { Preconditions.checkNotNull(debugger, "IE00425: Debugger argument can not be null"); Preconditions.checkNotNull(name, "IE00426: Name argument can not be null"); Preconditions.checkArgument(debugger.inSameDatabase(provider), "IE00427: Debugger is not part of this database"); final String query = "UPDATE " + CTableNames.DEBUGGERS_TABLE + " SET name = ? WHERE id = ?"; try { final PreparedStatement statement = provider.getConnection().getConnection().prepareStatement(query); try { statement.setString(1, name); statement.setInt(2, debugger.getId()); statement.executeUpdate(); } finally { statement.close(); } } catch (final SQLException e) { throw new CouldntSaveDataException(e); } } /** * Changes the port of an existing debugger template. * * The debugger must be stored in the database the provider argument is connected to. * * @param provider The connection to the database. * @param debugger The debugger whose port value is changed. * @param port The new port value of the debugger template. This argument must be a valid port * number. * * @throws CouldntSaveDataException Thrown if the port value could not be updated. */ public static void setPort(final AbstractSQLProvider provider, final DebuggerTemplate debugger, final int port) throws CouldntSaveDataException { Preconditions.checkNotNull(debugger, "IE00428: Debugger argument can not be null"); Preconditions.checkArgument(NetHelpers.isValidPort(port), "IE00429: Invalid port argument"); Preconditions.checkArgument(debugger.inSameDatabase(provider), "IE00430: Debugger is not part of this database"); final String query = "UPDATE " + CTableNames.DEBUGGERS_TABLE + " SET port = ? WHERE id = ?"; try { final PreparedStatement statement = provider.getConnection().getConnection().prepareStatement(query); try { statement.setInt(1, port); statement.setInt(2, debugger.getId()); statement.executeUpdate(); } finally { statement.close(); } } catch (final SQLException e) { throw new CouldntSaveDataException(e); } } }
Update PostgresSQLDebuggerFunctions.java
src/main/java/com/google/security/zynamics/binnavi/Database/PostgreSQL/Functions/PostgresSQLDebuggerFunctions.java
Update PostgresSQLDebuggerFunctions.java
Java
apache-2.0
0a1cec24ec24ffa1f7f89c983a9bfef4d2d7907c
0
GlenRSmith/elasticsearch,nknize/elasticsearch,GlenRSmith/elasticsearch,nknize/elasticsearch,GlenRSmith/elasticsearch,GlenRSmith/elasticsearch,robin13/elasticsearch,robin13/elasticsearch,robin13/elasticsearch,robin13/elasticsearch,nknize/elasticsearch,nknize/elasticsearch,nknize/elasticsearch,robin13/elasticsearch,GlenRSmith/elasticsearch
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.xpack.searchablesnapshots.cache; import org.apache.lucene.document.Document; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.ShardPath; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.xpack.searchablesnapshots.BaseSearchableSnapshotsIntegTestCase; import java.io.IOException; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.util.HashSet; import java.util.Locale; import java.util.Map; import java.util.Set; import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_PREFIX; import static org.elasticsearch.index.IndexSettings.INDEX_SOFT_DELETES_SETTING; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.xpack.searchablesnapshots.cache.PersistentCache.resolveCacheIndexFolder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.notNullValue; public class SearchableSnapshotsPersistentCacheIntegTests extends BaseSearchableSnapshotsIntegTestCase { @Override protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder() .put(super.nodeSettings(nodeOrdinal)) // ensure the cache is definitely used .put(CacheService.SNAPSHOT_CACHE_SIZE_SETTING.getKey(), new ByteSizeValue(1L, ByteSizeUnit.GB)) // to make cache synchronization predictable .put(CacheService.SNAPSHOT_CACHE_SYNC_INTERVAL_SETTING.getKey(), TimeValue.timeValueHours(1L)) .build(); } public void testCacheSurviveRestart() throws Exception { final String fsRepoName = randomAlphaOfLength(10); final String indexName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); final String restoredIndexName = randomBoolean() ? indexName : randomAlphaOfLength(10).toLowerCase(Locale.ROOT); final String snapshotName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); createRepository(fsRepoName, "fs"); final Settings.Builder originalIndexSettings = Settings.builder() .put(INDEX_SOFT_DELETES_SETTING.getKey(), true) .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1); createAndPopulateIndex(indexName, originalIndexSettings); final SnapshotInfo snapshotInfo = createFullSnapshot(fsRepoName, snapshotName); assertThat(snapshotInfo.successfulShards(), greaterThan(0)); assertThat(snapshotInfo.successfulShards(), equalTo(snapshotInfo.totalShards())); assertAcked(client().admin().indices().prepareDelete(indexName)); final DiscoveryNodes discoveryNodes = client().admin().cluster().prepareState().clear().setNodes(true).get().getState().nodes(); final String dataNode = randomFrom(discoveryNodes.getDataNodes().values().toArray(DiscoveryNode.class)).getName(); mountSnapshot( fsRepoName, snapshotName, indexName, restoredIndexName, Settings.builder().put(INDEX_ROUTING_REQUIRE_GROUP_PREFIX + "._name", dataNode).build() ); ensureGreen(restoredIndexName); final Index restoredIndex = client().admin() .cluster() .prepareState() .clear() .setMetadata(true) .get() .getState() .metadata() .index(restoredIndexName) .getIndex(); final IndexService indexService = internalCluster().getInstance(IndicesService.class, dataNode).indexService(restoredIndex); final ShardPath shardPath = indexService.getShard(0).shardPath(); final Path shardCachePath = CacheService.getShardCachePath(shardPath); assertTrue(Files.isDirectory(shardCachePath)); final Set<Path> cacheFiles = new HashSet<>(); try (DirectoryStream<Path> snapshotCacheStream = Files.newDirectoryStream(shardCachePath)) { for (final Path snapshotCachePath : snapshotCacheStream) { assertTrue(snapshotCachePath + " should be a directory", Files.isDirectory(snapshotCachePath)); try (DirectoryStream<Path> cacheFileStream = Files.newDirectoryStream(snapshotCachePath)) { for (final Path cacheFilePath : cacheFileStream) { assertTrue(cacheFilePath + " should be a file", Files.isRegularFile(cacheFilePath)); cacheFiles.add(cacheFilePath); } } } } assertFalse("no cache files found", cacheFiles.isEmpty()); CacheService cacheService = internalCluster().getInstance(CacheService.class, dataNode); cacheService.synchronizeCache(); PersistentCache persistentCache = cacheService.getPersistentCache(); assertThat(persistentCache.getNumDocs(), equalTo((long) cacheFiles.size())); internalCluster().restartNode(dataNode, new InternalTestCluster.RestartCallback() { @Override public Settings onNodeStopped(String nodeName) { try { assertTrue(Files.isDirectory(shardCachePath)); final Path persistentCacheIndexDir = resolveCacheIndexFolder(shardPath.getRootDataPath()); assertTrue(Files.isDirectory(persistentCacheIndexDir)); final Map<String, Document> documents = PersistentCache.loadDocuments(persistentCacheIndexDir); assertThat(documents.size(), equalTo(cacheFiles.size())); for (Path cacheFile : cacheFiles) { final String cacheFileName = cacheFile.getFileName().toString(); assertTrue(cacheFileName + " should exist on disk", Files.isRegularFile(cacheFile)); assertThat(cacheFileName + " should exist in persistent cache index", documents.get(cacheFileName), notNullValue()); } } catch (IOException e) { throw new AssertionError(e); } return Settings.EMPTY; } }); cacheService = internalCluster().getInstance(CacheService.class, dataNode); persistentCache = cacheService.getPersistentCache(); ensureGreen(restoredIndexName); cacheFiles.forEach(cacheFile -> assertTrue(cacheFile + " should have survived node restart", Files.exists(cacheFile))); assertThat("Cache files should be repopulated in cache", persistentCache.getNumDocs(), equalTo((long) cacheFiles.size())); assertAcked(client().admin().indices().prepareDelete(restoredIndexName)); assertBusy(() -> { cacheFiles.forEach(cacheFile -> assertFalse(cacheFile + " should have been cleaned up", Files.exists(cacheFile))); assertTrue(internalCluster().getInstance(CacheService.class, dataNode).getPersistentCache().hasDeletions()); }); cacheService.synchronizeCache(); assertThat(persistentCache.getNumDocs(), equalTo(0L)); } }
x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/SearchableSnapshotsPersistentCacheIntegTests.java
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.xpack.searchablesnapshots.cache; import org.apache.lucene.document.Document; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.ShardPath; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.xpack.searchablesnapshots.BaseSearchableSnapshotsIntegTestCase; import java.io.IOException; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.util.HashSet; import java.util.Locale; import java.util.Map; import java.util.Set; import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_PREFIX; import static org.elasticsearch.index.IndexSettings.INDEX_SOFT_DELETES_SETTING; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.xpack.searchablesnapshots.cache.PersistentCache.resolveCacheIndexFolder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.notNullValue; @LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/66278") public class SearchableSnapshotsPersistentCacheIntegTests extends BaseSearchableSnapshotsIntegTestCase { @Override protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder() .put(super.nodeSettings(nodeOrdinal)) // ensure the cache is definitely used .put(CacheService.SNAPSHOT_CACHE_SIZE_SETTING.getKey(), new ByteSizeValue(1L, ByteSizeUnit.GB)) .build(); } public void testCacheSurviveRestart() throws Exception { final String fsRepoName = randomAlphaOfLength(10); final String indexName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); final String restoredIndexName = randomBoolean() ? indexName : randomAlphaOfLength(10).toLowerCase(Locale.ROOT); final String snapshotName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); createRepository(fsRepoName, "fs"); final Settings.Builder originalIndexSettings = Settings.builder() .put(INDEX_SOFT_DELETES_SETTING.getKey(), true) .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1); createAndPopulateIndex(indexName, originalIndexSettings); final SnapshotInfo snapshotInfo = createFullSnapshot(fsRepoName, snapshotName); assertThat(snapshotInfo.successfulShards(), greaterThan(0)); assertThat(snapshotInfo.successfulShards(), equalTo(snapshotInfo.totalShards())); assertAcked(client().admin().indices().prepareDelete(indexName)); final DiscoveryNodes discoveryNodes = client().admin().cluster().prepareState().clear().setNodes(true).get().getState().nodes(); final String dataNode = randomFrom(discoveryNodes.getDataNodes().values().toArray(DiscoveryNode.class)).getName(); mountSnapshot( fsRepoName, snapshotName, indexName, restoredIndexName, Settings.builder().put(INDEX_ROUTING_REQUIRE_GROUP_PREFIX + "._name", dataNode).build() ); ensureGreen(restoredIndexName); final Index restoredIndex = client().admin() .cluster() .prepareState() .clear() .setMetadata(true) .get() .getState() .metadata() .index(restoredIndexName) .getIndex(); final IndexService indexService = internalCluster().getInstance(IndicesService.class, dataNode).indexService(restoredIndex); final ShardPath shardPath = indexService.getShard(0).shardPath(); final Path shardCachePath = CacheService.getShardCachePath(shardPath); assertTrue(Files.isDirectory(shardCachePath)); final Set<Path> cacheFiles = new HashSet<>(); try (DirectoryStream<Path> snapshotCacheStream = Files.newDirectoryStream(shardCachePath)) { for (final Path snapshotCachePath : snapshotCacheStream) { assertTrue(snapshotCachePath + " should be a directory", Files.isDirectory(snapshotCachePath)); try (DirectoryStream<Path> cacheFileStream = Files.newDirectoryStream(snapshotCachePath)) { for (final Path cacheFilePath : cacheFileStream) { assertTrue(cacheFilePath + " should be a file", Files.isRegularFile(cacheFilePath)); cacheFiles.add(cacheFilePath); } } } } assertFalse("no cache files found", cacheFiles.isEmpty()); CacheService cacheService = internalCluster().getInstance(CacheService.class, dataNode); cacheService.synchronizeCache(); PersistentCache persistentCache = cacheService.getPersistentCache(); assertThat(persistentCache.getNumDocs(), equalTo((long) cacheFiles.size())); internalCluster().restartNode(dataNode, new InternalTestCluster.RestartCallback() { @Override public Settings onNodeStopped(String nodeName) { try { assertTrue(Files.isDirectory(shardCachePath)); final Path persistentCacheIndexDir = resolveCacheIndexFolder(shardPath.getRootDataPath()); assertTrue(Files.isDirectory(persistentCacheIndexDir)); final Map<String, Document> documents = PersistentCache.loadDocuments(persistentCacheIndexDir); assertThat(documents.size(), equalTo(cacheFiles.size())); for (Path cacheFile : cacheFiles) { final String cacheFileName = cacheFile.getFileName().toString(); assertTrue(cacheFileName + " should exist on disk", Files.isRegularFile(cacheFile)); assertThat(cacheFileName + " should exist in persistent cache index", documents.get(cacheFileName), notNullValue()); } } catch (IOException e) { throw new AssertionError(e); } return Settings.EMPTY; } }); persistentCache = internalCluster().getInstance(CacheService.class, dataNode).getPersistentCache(); assertThat(persistentCache.getNumDocs(), equalTo((long) cacheFiles.size())); ensureGreen(restoredIndexName); cacheFiles.forEach(cacheFile -> assertTrue(cacheFile + " should have survived node restart", Files.exists(cacheFile))); assertAcked(client().admin().indices().prepareDelete(restoredIndexName)); assertBusy(() -> cacheFiles.forEach(cacheFile -> assertFalse(cacheFile + " should have been cleaned up", Files.exists(cacheFile)))); cacheService = internalCluster().getInstance(CacheService.class, dataNode); cacheService.synchronizeCache(); persistentCache = cacheService.getPersistentCache(); assertThat(persistentCache.getNumDocs(), equalTo(0L)); } }
Fix SearchableSnapshotsPersistentCacheIntegTests.testCacheSurviveRestart (#66354) Closes #66278
x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/SearchableSnapshotsPersistentCacheIntegTests.java
Fix SearchableSnapshotsPersistentCacheIntegTests.testCacheSurviveRestart (#66354)
Java
apache-2.0
c9a8419e22f3397bb588a5bf5186202da151cb91
0
spring-projects/spring-security,spring-projects/spring-security,spring-projects/spring-security,spring-projects/spring-security,spring-projects/spring-security,spring-projects/spring-security,spring-projects/spring-security
/* * Copyright 2002-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.security.oauth2.client.web; import org.junit.Before; import org.junit.Test; import org.springframework.mock.web.MockHttpServletRequest; import org.springframework.mock.web.MockHttpServletResponse; import org.springframework.security.oauth2.core.endpoint.OAuth2AuthorizationRequest; import org.springframework.security.oauth2.core.endpoint.OAuth2ParameterNames; import static org.assertj.core.api.Assertions.assertThat; /** * Tests for {@link HttpSessionOAuth2AuthorizationRequestRepository} when * {@link HttpSessionOAuth2AuthorizationRequestRepository#setAllowMultipleAuthorizationRequests(boolean)} * is enabled. * * @author Joe Grandja * @author Craig Andrews */ public class HttpSessionOAuth2AuthorizationRequestRepositoryAllowMultipleAuthorizationRequestsTests extends HttpSessionOAuth2AuthorizationRequestRepositoryTests { @Before public void setup() { this.authorizationRequestRepository = new HttpSessionOAuth2AuthorizationRequestRepository(); this.authorizationRequestRepository.setAllowMultipleAuthorizationRequests(true); } // gh-5110 @Test public void loadAuthorizationRequestWhenMultipleSavedThenReturnMatchingAuthorizationRequest() { MockHttpServletRequest request = new MockHttpServletRequest(); MockHttpServletResponse response = new MockHttpServletResponse(); String state1 = "state-1122"; OAuth2AuthorizationRequest authorizationRequest1 = createAuthorizationRequest().state(state1).build(); this.authorizationRequestRepository.saveAuthorizationRequest(authorizationRequest1, request, response); String state2 = "state-3344"; OAuth2AuthorizationRequest authorizationRequest2 = createAuthorizationRequest().state(state2).build(); this.authorizationRequestRepository.saveAuthorizationRequest(authorizationRequest2, request, response); String state3 = "state-5566"; OAuth2AuthorizationRequest authorizationRequest3 = createAuthorizationRequest().state(state3).build(); this.authorizationRequestRepository.saveAuthorizationRequest(authorizationRequest3, request, response); request.addParameter(OAuth2ParameterNames.STATE, state1); OAuth2AuthorizationRequest loadedAuthorizationRequest1 = this.authorizationRequestRepository .loadAuthorizationRequest(request); assertThat(loadedAuthorizationRequest1).isEqualTo(authorizationRequest1); request.removeParameter(OAuth2ParameterNames.STATE); request.addParameter(OAuth2ParameterNames.STATE, state2); OAuth2AuthorizationRequest loadedAuthorizationRequest2 = this.authorizationRequestRepository .loadAuthorizationRequest(request); assertThat(loadedAuthorizationRequest2).isEqualTo(authorizationRequest2); request.removeParameter(OAuth2ParameterNames.STATE); request.addParameter(OAuth2ParameterNames.STATE, state3); OAuth2AuthorizationRequest loadedAuthorizationRequest3 = this.authorizationRequestRepository .loadAuthorizationRequest(request); assertThat(loadedAuthorizationRequest3).isEqualTo(authorizationRequest3); } @Test public void loadAuthorizationRequestWhenSavedWithAllowMultipleAuthorizationRequests() { // save 2 requests with legacy (allowMultipleAuthorizationRequests=true) and load // with new HttpSessionOAuth2AuthorizationRequestRepository legacy = new HttpSessionOAuth2AuthorizationRequestRepository(); legacy.setAllowMultipleAuthorizationRequests(true); MockHttpServletRequest request = new MockHttpServletRequest(); MockHttpServletResponse response = new MockHttpServletResponse(); String state1 = "state-1122"; OAuth2AuthorizationRequest authorizationRequest1 = createAuthorizationRequest().state(state1).build(); legacy.saveAuthorizationRequest(authorizationRequest1, request, response); String state2 = "state-3344"; OAuth2AuthorizationRequest authorizationRequest2 = createAuthorizationRequest().state(state2).build(); legacy.saveAuthorizationRequest(authorizationRequest2, request, response); request.setParameter(OAuth2ParameterNames.STATE, state1); OAuth2AuthorizationRequest loaded = this.authorizationRequestRepository.loadAuthorizationRequest(request); assertThat(loaded).isEqualTo(authorizationRequest1); } @Test public void saveAuthorizationRequestWhenSavedWithAllowMultipleAuthorizationRequests() { // save 2 requests with legacy (allowMultipleAuthorizationRequests=true), save // with new, and load with new HttpSessionOAuth2AuthorizationRequestRepository legacy = new HttpSessionOAuth2AuthorizationRequestRepository(); legacy.setAllowMultipleAuthorizationRequests(true); MockHttpServletRequest request = new MockHttpServletRequest(); MockHttpServletResponse response = new MockHttpServletResponse(); String state1 = "state-1122"; OAuth2AuthorizationRequest authorizationRequest1 = createAuthorizationRequest().state(state1).build(); legacy.saveAuthorizationRequest(authorizationRequest1, request, response); String state2 = "state-3344"; OAuth2AuthorizationRequest authorizationRequest2 = createAuthorizationRequest().state(state2).build(); legacy.saveAuthorizationRequest(authorizationRequest2, request, response); String state3 = "state-5566"; OAuth2AuthorizationRequest authorizationRequest3 = createAuthorizationRequest().state(state3).build(); this.authorizationRequestRepository.saveAuthorizationRequest(authorizationRequest3, request, response); request.setParameter(OAuth2ParameterNames.STATE, state3); OAuth2AuthorizationRequest loaded = this.authorizationRequestRepository.loadAuthorizationRequest(request); assertThat(loaded).isEqualTo(authorizationRequest3); } }
oauth2/oauth2-client/src/test/java/org/springframework/security/oauth2/client/web/HttpSessionOAuth2AuthorizationRequestRepositoryAllowMultipleAuthorizationRequestsTests.java
/* * Copyright 2002-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.security.oauth2.client.web; import org.junit.Before; import org.junit.Test; import org.springframework.mock.web.MockHttpServletRequest; import org.springframework.mock.web.MockHttpServletResponse; import org.springframework.security.oauth2.core.endpoint.OAuth2AuthorizationRequest; import org.springframework.security.oauth2.core.endpoint.OAuth2ParameterNames; import static org.assertj.core.api.Assertions.assertThat; /** * Tests for {@link HttpSessionOAuth2AuthorizationRequestRepository} when * {@link HttpSessionOAuth2AuthorizationRequestRepository#setAllowMultipleAuthorizationRequests(boolean)} * is enabled. * * @author Joe Grandja * @author Craig Andrews */ public class HttpSessionOAuth2AuthorizationRequestRepositoryAllowMultipleAuthorizationRequestsTests extends HttpSessionOAuth2AuthorizationRequestRepositoryTests { @Before public void setup() { this.authorizationRequestRepository = new HttpSessionOAuth2AuthorizationRequestRepository(); this.authorizationRequestRepository.setAllowMultipleAuthorizationRequests(true); } // gh-5110 @Test public void loadAuthorizationRequestWhenMultipleSavedThenReturnMatchingAuthorizationRequest() { MockHttpServletRequest request = new MockHttpServletRequest(); MockHttpServletResponse response = new MockHttpServletResponse(); String state1 = "state-1122"; OAuth2AuthorizationRequest authorizationRequest1 = createAuthorizationRequest().state(state1).build(); this.authorizationRequestRepository.saveAuthorizationRequest(authorizationRequest1, request, response); String state2 = "state-3344"; OAuth2AuthorizationRequest authorizationRequest2 = createAuthorizationRequest().state(state2).build(); this.authorizationRequestRepository.saveAuthorizationRequest(authorizationRequest2, request, response); String state3 = "state-5566"; OAuth2AuthorizationRequest authorizationRequest3 = createAuthorizationRequest().state(state3).build(); this.authorizationRequestRepository.saveAuthorizationRequest(authorizationRequest3, request, response); request.addParameter(OAuth2ParameterNames.STATE, state1); OAuth2AuthorizationRequest loadedAuthorizationRequest1 = this.authorizationRequestRepository .loadAuthorizationRequest(request); assertThat(loadedAuthorizationRequest1).isEqualTo(authorizationRequest1); request.removeParameter(OAuth2ParameterNames.STATE); request.addParameter(OAuth2ParameterNames.STATE, state2); OAuth2AuthorizationRequest loadedAuthorizationRequest2 = this.authorizationRequestRepository .loadAuthorizationRequest(request); assertThat(loadedAuthorizationRequest2).isEqualTo(authorizationRequest2); request.removeParameter(OAuth2ParameterNames.STATE); request.addParameter(OAuth2ParameterNames.STATE, state3); OAuth2AuthorizationRequest loadedAuthorizationRequest3 = this.authorizationRequestRepository .loadAuthorizationRequest(request); assertThat(loadedAuthorizationRequest3).isEqualTo(authorizationRequest3); } }
Additional HttpSessionOAuth2AuthorizationRequestRepository tests Issue gh-5145
oauth2/oauth2-client/src/test/java/org/springframework/security/oauth2/client/web/HttpSessionOAuth2AuthorizationRequestRepositoryAllowMultipleAuthorizationRequestsTests.java
Additional HttpSessionOAuth2AuthorizationRequestRepository tests
Java
apache-2.0
94b699e6dd91556ea0c1c482fa0a354c3ae619c7
0
krasserm/ipf,oehf/ipf,oehf/ipf,krasserm/ipf,oehf/ipf,oehf/ipf
/* * Copyright 2008 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openehealth.ipf.platform.manager.connection.ui.wizards; import java.io.BufferedReader; import java.io.FileNotFoundException; import java.io.FileReader; import org.eclipse.jface.resource.ImageDescriptor; import org.eclipse.jface.resource.JFaceResources; import org.eclipse.jface.viewers.IStructuredSelection; import org.eclipse.jface.wizard.IWizardPage; import org.eclipse.jface.wizard.Wizard; import org.eclipse.swt.graphics.Image; import org.eclipse.swt.widgets.Display; import org.eclipse.ui.IMemento; import org.eclipse.ui.INewWizard; import org.eclipse.ui.IWorkbench; import org.eclipse.ui.WorkbenchException; import org.eclipse.ui.XMLMemento; import org.openehealth.ipf.platform.manager.connection.ConnectionConfigurationImpl; import org.openehealth.ipf.platform.manager.connection.IConnectionConfiguration; import org.openehealth.ipf.platform.manager.connection.IJMXConnectionManager; import org.openehealth.ipf.platform.manager.connection.ui.jobs.OpenJMXConnectionJob; import org.openehealth.ipf.platform.manager.connection.ui.osgi.Activator; import org.openehealth.ipf.platform.manager.connection.ui.utils.encoding.Base64Utils; import org.openehealth.ipf.platform.manager.connection.ui.utils.messages.Messages; /** * * Wizard for creating new connections * <p> * * @author Mitko Kolev */ public class NewConnectionWizard extends Wizard implements INewWizard { private IConnectionConfiguration connectionConfigurationObject; IWizardPage newWizardPage; private boolean shouldConnect = true; private static final String WIZARD_TITLE_KEY = "NewConnectionWizardPage.title"; private static final String IMG_CONNECTION_KEY = "icons/connection/computer_add_64x64.png"; private final static String CONNECTIONS_MEMENTO__KEY = "connections"; private final static String lasConnectionHostKey = "lastConnectionHost"; private final static String lasConnectionPortKey = "lastConnectionPort"; private final static String lasConnectionNameKey = "lasConnectionName"; private final static String lasConnectionUserKey = "lastConnection1"; private final static String lasConnectionPasswordKey = "lastConnection2"; private Image connectionImage; private ImageDescriptor connectionImageDescriptor; /** * Constructs a new Connection wizard. The pages will initialize its * connectionObject. * * @param connectionConfiguration * the connection from which to initialize the data. */ public NewConnectionWizard(IConnectionConfiguration connectionConfiguration) { super(); this.connectionConfigurationObject = connectionConfiguration; this.setWindowTitle(Messages.getLabelString(WIZARD_TITLE_KEY)); } private void loadImages() { connectionImageDescriptor = Activator.getDefault().getImageDescriptor( IMG_CONNECTION_KEY); if (connectionImageDescriptor != null) connectionImage = connectionImageDescriptor.createImage(); } /* * (non-Javadoc) * * @see org.eclipse.jface.wizard.Wizard#performFinish() */ @Override public boolean performFinish() { IJMXConnectionManager manager = Activator.getDefault() .getJMXConnectionManager(); IWizardPage page = newWizardPage.getNextPage(); if (page == null) { if (connectionConfigurationObject != null) { manager .addConnectionConfiguration(connectionConfigurationObject); } } this.saveState(); if (this.shouldConnect) { new OpenJMXConnectionJob(Display.getCurrent(), manager, connectionConfigurationObject).schedule(); } return true; } /* * (non-Javadoc) Method declared on IWizard. */ @Override public Image getDefaultPageImage() { loadImages(); if (connectionImage == null) { connectionImage = JFaceResources.getResources() .createImageWithDefault(connectionImageDescriptor); } return connectionImage; } /* * (non-Javadoc) * * @see org.eclipse.ui.IWorkbenchWizard#init(org.eclipse.ui.IWorkbench, * org.eclipse.jface.viewers.IStructuredSelection) */ @Override public void init(IWorkbench workbench, IStructuredSelection selection) { } @Override public void addPages() { IConnectionConfiguration lastConnetionPrototype = this.connectionConfigurationObject; if (lastConnetionPrototype == null) { lastConnetionPrototype = this.readAndSetState(); } if (lastConnetionPrototype != null) { newWizardPage = new NewConnectionWizardPage(this, lastConnetionPrototype); } else { newWizardPage = new NewConnectionWizardPage(this); } this.addPage(newWizardPage); } public void setConnectionConfigurationObject( IConnectionConfiguration connectionConfiguration) { this.connectionConfigurationObject = connectionConfiguration; } public boolean isShouldConnect() { return shouldConnect; } public void setShouldConnect(boolean shouldConnect) { this.shouldConnect = shouldConnect; } private IConnectionConfiguration readComponentState(IMemento memento) { String lastConnectionHost = memento.getString(lasConnectionHostKey); if (lastConnectionHost == null) { // we have saved no connection return null; } Integer lastConnectionPort = memento.getInteger(lasConnectionPortKey); if (lastConnectionPort == null || lastConnectionPort <= 0) { // we have saved no connection return null; } String lastConnectionName = memento.getString(lasConnectionNameKey); if (lastConnectionName == null) { // we have saved no connection return null; } String lastConnectionUserName = memento.getString(lasConnectionUserKey); String lastConnectionPassword = memento .getString(lasConnectionPasswordKey); if (lastConnectionUserName == null || lastConnectionPassword == null) { IConnectionConfiguration connectionConfiguration = new ConnectionConfigurationImpl( lastConnectionName, lastConnectionHost, lastConnectionPort); // return connection with no authentication return connectionConfiguration; } final String lastConnectionPasswordDecoded = new String(Base64Utils .decode(lastConnectionPassword)); final String lastConnectionUserNameDecoded = Base64Utils .decode(lastConnectionUserName); IConnectionConfiguration connectionConfiguration = new ConnectionConfigurationImpl( lastConnectionName, lastConnectionHost, lastConnectionPort, lastConnectionUserNameDecoded, lastConnectionPasswordDecoded); return connectionConfiguration; } private void writeComponentState(IMemento memento) { memento.putString(lasConnectionNameKey, this.connectionConfigurationObject.getName()); memento.putString(lasConnectionHostKey, this.connectionConfigurationObject.getHost()); memento.putInteger(lasConnectionPortKey, this.connectionConfigurationObject.getPort()); if (connectionConfigurationObject.getAuthenticationCredentials() .isValid()) { String userNameEncoded = Base64Utils .encode(connectionConfigurationObject .getAuthenticationCredentials().getUserName()); String passwordEncoded = Base64Utils .encode(connectionConfigurationObject .getAuthenticationCredentials().getPassword()); memento.putString(lasConnectionUserKey, userNameEncoded); memento.putString(lasConnectionPasswordKey, passwordEncoded); } } protected synchronized void saveState() { XMLMemento memento = XMLMemento .createWriteRoot(CONNECTIONS_MEMENTO__KEY); IMemento child = memento.createChild(CONNECTIONS_MEMENTO__KEY); writeComponentState(child); Activator.saveMementoToFile(memento); } private synchronized IConnectionConfiguration readAndSetState() { try { XMLMemento memento = XMLMemento.createReadRoot(new BufferedReader( new FileReader(Activator.getStateFile()))); IMemento thisMemento = memento.getChild(CONNECTIONS_MEMENTO__KEY); if (thisMemento != null) { return readComponentState(thisMemento); } } catch (WorkbenchException we) { // ignoreit } catch (FileNotFoundException fnfe) { // ignore } return null; } @Override public void dispose() { super.dispose(); if (connectionImage != null) { JFaceResources.getResources().destroyImage( connectionImageDescriptor); connectionImage = null; } } }
platform-manager/plugins/org.openehealth.ipf.platform.manager.connection.ui/src/main/java/org/openehealth/ipf/platform/manager/connection/ui/wizards/NewConnectionWizard.java
/* * Copyright 2008 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openehealth.ipf.platform.manager.connection.ui.wizards; import java.io.BufferedReader; import java.io.FileNotFoundException; import java.io.FileReader; import org.eclipse.jface.resource.ImageDescriptor; import org.eclipse.jface.resource.JFaceResources; import org.eclipse.jface.viewers.IStructuredSelection; import org.eclipse.jface.wizard.IWizardPage; import org.eclipse.jface.wizard.Wizard; import org.eclipse.swt.graphics.Image; import org.eclipse.swt.widgets.Display; import org.eclipse.ui.IMemento; import org.eclipse.ui.INewWizard; import org.eclipse.ui.IWorkbench; import org.eclipse.ui.WorkbenchException; import org.eclipse.ui.XMLMemento; import org.openehealth.ipf.platform.manager.connection.ConnectionConfigurationImpl; import org.openehealth.ipf.platform.manager.connection.IConnectionConfiguration; import org.openehealth.ipf.platform.manager.connection.IJMXConnectionManager; import org.openehealth.ipf.platform.manager.connection.ui.jobs.OpenJMXConnectionJob; import org.openehealth.ipf.platform.manager.connection.ui.osgi.Activator; import org.openehealth.ipf.platform.manager.connection.ui.utils.encoding.Base64Utils; import org.openehealth.ipf.platform.manager.connection.ui.utils.messages.Messages; /** * * Wizard for creating new connections * <p> * * @author Mitko Kolev (i000174) */ public class NewConnectionWizard extends Wizard implements INewWizard { private IConnectionConfiguration connectionConfigurationObject; IWizardPage newWizardPage; private boolean shouldConnect = true; private static final String WIZARD_TITLE_KEY = "NewConnectionWizardPage.title"; private static final String IMG_CONNECTION_KEY = "icons/connection/computer_add_64x64.png"; private final static String CONNECTIONS_MEMENTO__KEY = "connections"; private final static String lasConnectionHostKey = "lastConnectionHost"; private final static String lasConnectionPortKey = "lastConnectionPort"; private final static String lasConnectionNameKey = "lasConnectionName"; private final static String lasConnectionUserKey = "lastConnection1"; private final static String lasConnectionPasswordKey = "lastConnection2"; private Image connectionImage; private ImageDescriptor connectionImageDescriptor; /** * Constructs a new Connection wizard. The pages will initialize its * connectionObject. * * @param connectionConfiguration * the connection from which to initialize the data. */ public NewConnectionWizard(IConnectionConfiguration connectionConfiguration) { super(); this.connectionConfigurationObject = connectionConfiguration; this.setWindowTitle(Messages.getLabelString(WIZARD_TITLE_KEY)); } private void loadImages() { connectionImageDescriptor = Activator.getDefault().getImageDescriptor( IMG_CONNECTION_KEY); if (connectionImageDescriptor != null) connectionImage = connectionImageDescriptor.createImage(); } /* * (non-Javadoc) * * @see org.eclipse.jface.wizard.Wizard#performFinish() */ @Override public boolean performFinish() { IJMXConnectionManager manager = Activator.getDefault() .getJMXConnectionManager(); IWizardPage page = newWizardPage.getNextPage(); if (page == null) { if (connectionConfigurationObject != null) { manager .addConnectionConfiguration(connectionConfigurationObject); } } this.saveState(); if (this.shouldConnect) { new OpenJMXConnectionJob(Display.getCurrent(), manager, connectionConfigurationObject).schedule(); } return true; } /* * (non-Javadoc) Method declared on IWizard. */ @Override public Image getDefaultPageImage() { loadImages(); if (connectionImage == null) { connectionImage = JFaceResources.getResources() .createImageWithDefault(connectionImageDescriptor); } return connectionImage; } /* * (non-Javadoc) * * @see org.eclipse.ui.IWorkbenchWizard#init(org.eclipse.ui.IWorkbench, * org.eclipse.jface.viewers.IStructuredSelection) */ @Override public void init(IWorkbench workbench, IStructuredSelection selection) { } @Override public void addPages() { IConnectionConfiguration lastConnetionPrototype = this.connectionConfigurationObject; if (lastConnetionPrototype == null) { lastConnetionPrototype = this.readAndSetState(); } if (lastConnetionPrototype != null) { newWizardPage = new NewConnectionWizardPage(this, lastConnetionPrototype); } else { newWizardPage = new NewConnectionWizardPage(this); } this.addPage(newWizardPage); } public void setConnectionConfigurationObject( IConnectionConfiguration connectionConfiguration) { this.connectionConfigurationObject = connectionConfiguration; } public boolean isShouldConnect() { return shouldConnect; } public void setShouldConnect(boolean shouldConnect) { this.shouldConnect = shouldConnect; } private IConnectionConfiguration readComponentState(IMemento memento) { String lastConnectionHost = memento.getString(lasConnectionHostKey); if (lastConnectionHost == null) { // we have saved no connection return null; } Integer lastConnectionPort = memento.getInteger(lasConnectionPortKey); if (lastConnectionPort == null || lastConnectionPort <= 0) { // we have saved no connection return null; } String lastConnectionName = memento.getString(lasConnectionNameKey); if (lastConnectionName == null) { // we have saved no connection return null; } String lastConnectionUserName = memento.getString(lasConnectionUserKey); String lastConnectionPassword = memento .getString(lasConnectionPasswordKey); if (lastConnectionUserName == null || lastConnectionPassword == null) { IConnectionConfiguration connectionConfiguration = new ConnectionConfigurationImpl( lastConnectionName, lastConnectionHost, lastConnectionPort); // return connection with no authentication return connectionConfiguration; } final String lastConnectionPasswordDecoded = new String(Base64Utils .decode(lastConnectionPassword)); final String lastConnectionUserNameDecoded = Base64Utils .decode(lastConnectionUserName); IConnectionConfiguration connectionConfiguration = new ConnectionConfigurationImpl( lastConnectionName, lastConnectionHost, lastConnectionPort, lastConnectionUserNameDecoded, lastConnectionPasswordDecoded); return connectionConfiguration; } private void writeComponentState(IMemento memento) { memento.putString(lasConnectionNameKey, this.connectionConfigurationObject.getName()); memento.putString(lasConnectionHostKey, this.connectionConfigurationObject.getHost()); memento.putInteger(lasConnectionPortKey, this.connectionConfigurationObject.getPort()); if (connectionConfigurationObject.getAuthenticationCredentials() .isValid()) { String userNameEncoded = Base64Utils .encode(connectionConfigurationObject .getAuthenticationCredentials().getUserName()); String passwordEncoded = Base64Utils .encode(connectionConfigurationObject .getAuthenticationCredentials().getPassword()); memento.putString(lasConnectionUserKey, userNameEncoded); memento.putString(lasConnectionPasswordKey, passwordEncoded); } } protected synchronized void saveState() { XMLMemento memento = XMLMemento .createWriteRoot(CONNECTIONS_MEMENTO__KEY); IMemento child = memento.createChild(CONNECTIONS_MEMENTO__KEY); writeComponentState(child); Activator.saveMementoToFile(memento); } private synchronized IConnectionConfiguration readAndSetState() { try { XMLMemento memento = XMLMemento.createReadRoot(new BufferedReader( new FileReader(Activator.getStateFile()))); IMemento thisMemento = memento.getChild(CONNECTIONS_MEMENTO__KEY); if (thisMemento != null) { return readComponentState(thisMemento); } } catch (WorkbenchException we) { // ignoreit } catch (FileNotFoundException fnfe) { // ignore } return null; } @Override public void dispose() { super.dispose(); if (connectionImage != null) { JFaceResources.getResources().destroyImage( connectionImageDescriptor); connectionImage = null; } } }
removed internal user number from class-level javadocs git-svn-id: 15d4771d83c46956b550e66b034a1e73d7ee3103@81 d594d982-de57-441b-9a2b-38caaf397e83
platform-manager/plugins/org.openehealth.ipf.platform.manager.connection.ui/src/main/java/org/openehealth/ipf/platform/manager/connection/ui/wizards/NewConnectionWizard.java
removed internal user number from class-level javadocs
Java
apache-2.0
9a770a26365f8fd16887bd2023b7afefd36a6b40
0
thusithathilina/carbon-transports,shafreenAnfar/carbon-transports,wso2/carbon-transports,wggihan/carbon-transports,chanakaudaya/carbon-transports
/* * Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and limitations under the License. */ package org.wso2.carbon.transport.http.netty.common; import io.netty.handler.codec.http.DefaultHttpRequest; import io.netty.handler.codec.http.DefaultHttpResponse; import io.netty.handler.codec.http.HttpHeaders; import io.netty.handler.codec.http.HttpMessage; import io.netty.handler.codec.http.HttpMethod; import io.netty.handler.codec.http.HttpRequest; import io.netty.handler.codec.http.HttpResponse; import io.netty.handler.codec.http.HttpResponseStatus; import io.netty.handler.codec.http.HttpVersion; import org.wso2.carbon.kernel.utils.Utils; import org.wso2.carbon.messaging.CarbonMessage; import org.wso2.carbon.messaging.Header; import org.wso2.carbon.messaging.Headers; import org.wso2.carbon.transport.http.netty.common.ssl.SSLConfig; import org.wso2.carbon.transport.http.netty.config.Parameter; import java.io.File; import java.util.LinkedList; import java.util.List; import java.util.Map; import static io.netty.handler.codec.http.HttpVersion.HTTP_1_1; /** * Includes utility methods for creating http requests and responses and their related properties. */ public class Util { private static final String DEFAULT_HTTP_METHOD_POST = "POST"; private static final String DEFAULT_VERSION_HTTP_1_1 = "HTTP/1.1"; public static Headers getHeaders(HttpMessage message) { List<Header> headers = new LinkedList<>(); if (message.headers() != null) { for (Map.Entry<String, String> k : message.headers().entries()) { headers.add(new Header(k.getKey(), k.getValue())); } } return new Headers(headers); } public static void setHeaders(HttpMessage message, Headers headers) { HttpHeaders httpHeaders = message.headers(); for (Header header : headers.getAll()) { httpHeaders.add(header.getName(), header.getValue()); } } public static String getStringValue(CarbonMessage msg, String key, String defaultValue) { String value = (String) msg.getProperty(key); if (value == null) { return defaultValue; } return value; } public static int getIntValue(CarbonMessage msg, String key, int defaultValue) { Integer value = (Integer) msg.getProperty(key); if (value == null) { return defaultValue; } return value; } @SuppressWarnings("unchecked") public static HttpResponse createHttpResponse(CarbonMessage msg) { HttpVersion httpVersion = new HttpVersion(Util.getStringValue(msg, Constants.HTTP_VERSION, HTTP_1_1.text()), true); int statusCode = Util.getIntValue(msg, Constants.HTTP_STATUS_CODE, 200); HttpResponseStatus httpResponseStatus = new HttpResponseStatus(statusCode, HttpResponseStatus.valueOf(statusCode).reasonPhrase()); DefaultHttpResponse outgoingResponse = new DefaultHttpResponse(httpVersion, httpResponseStatus, false); Headers headers = msg.getHeaders(); Util.setHeaders(outgoingResponse, headers); return outgoingResponse; } @SuppressWarnings("unchecked") public static HttpRequest createHttpRequest(CarbonMessage msg) { HttpMethod httpMethod; if (null != msg.getProperty(Constants.HTTP_METHOD)) { httpMethod = new HttpMethod((String) msg.getProperty(Constants.HTTP_METHOD)); } else { httpMethod = new HttpMethod(DEFAULT_HTTP_METHOD_POST); } HttpVersion httpVersion; if (null != msg.getProperty(Constants.HTTP_VERSION)) { httpVersion = new HttpVersion((String) msg.getProperty(Constants.HTTP_VERSION), true); } else { httpVersion = new HttpVersion(DEFAULT_VERSION_HTTP_1_1, true); } if ((String) msg.getProperty(Constants.TO) == null) { msg.setProperty(Constants.TO, "/"); } HttpRequest outgoingRequest = new DefaultHttpRequest(httpVersion, httpMethod, (String) msg.getProperty(Constants.TO), false); Headers headers = msg.getHeaders(); Util.setHeaders(outgoingRequest, headers); return outgoingRequest; } public static SSLConfig getSSLConfigForListener(String certPass, String keyStorePass, String keyStoreFilePath, String trustStoreFilePath, String trustStorePass, List<Parameter> parametersList) { if (certPass == null) { certPass = keyStorePass; } if (keyStoreFilePath == null || keyStorePass == null) { throw new IllegalArgumentException("keyStoreFile or keyStorePass not defined for HTTPS scheme"); } File keyStore = new File(Utils.substituteVariables(keyStoreFilePath)); if (!keyStore.exists()) { throw new IllegalArgumentException("KeyStore File " + keyStoreFilePath + " not found"); } SSLConfig sslConfig = new SSLConfig(keyStore, keyStorePass).setCertPass(certPass); for (Parameter parameter : parametersList) { if (parameter.getName() .equals(Constants.SERVER_SUPPORT_CIPHERS)) { sslConfig.setCipherSuites(parameter.getValue()); } else if (parameter.getName() .equals(Constants.SERVER_SUPPORT_HTTPS_PROTOCOLS)) { sslConfig.setEnableProtocols(parameter.getValue()); } else if (parameter.getName() .equals(Constants.SERVER_SUPPORTED_SNIMATCHERS)) { sslConfig.setSniMatchers(parameter.getValue()); } else if (parameter.getName() .equals(Constants.SERVER_SUPPORTED_SERVER_NAMES)) { sslConfig.setServerNames(parameter.getValue()); } else if (parameter.getName() .equals(Constants.SERVER_ENABLE_SESSION_CREATION)) { sslConfig.setEnableSessionCreation(Boolean.parseBoolean(parameter.getValue())); } else if (parameter.getName() .equals(Constants.SSL_VERIFY_CLIENT)) { sslConfig.setNeedClientAuth(Boolean.parseBoolean(parameter.getValue())); } } if (trustStoreFilePath != null) { File trustStore = new File(Utils.substituteVariables(trustStoreFilePath)); if (!trustStore.exists()) { throw new IllegalArgumentException("trustStore File " + trustStoreFilePath + " not found"); } if (trustStorePass == null) { throw new IllegalArgumentException("trustStorePass is not defined for HTTPS scheme"); } sslConfig.setTrustStore(trustStore).setTrustStorePass(trustStorePass); } return sslConfig; } public static SSLConfig getSSLConfigForSender(String certPass, String keyStorePass, String keyStoreFilePath, String trustStoreFilePath, String trustStorePass, List<Parameter> parametersList) { if (certPass == null) { certPass = keyStorePass; } if (trustStoreFilePath == null || trustStorePass == null) { throw new IllegalArgumentException("TrusStoreFile or trustStorePass not defined for HTTPS scheme"); } SSLConfig sslConfig = new SSLConfig(null, null).setCertPass(null); if (keyStoreFilePath != null) { File keyStore = new File(Utils.substituteVariables(keyStoreFilePath)); if (!keyStore.exists()) { throw new IllegalArgumentException("KeyStore File " + trustStoreFilePath + " not found"); } sslConfig = new SSLConfig(keyStore, keyStorePass).setCertPass(certPass); } File trustStore = new File(Utils.substituteVariables(trustStoreFilePath)); sslConfig.setTrustStore(trustStore).setTrustStorePass(trustStorePass); sslConfig.setClientMode(true); for (Parameter parameter : parametersList) { if (parameter.getName() .equals(Constants.CLIENT_SUPPORT_CIPHERS)) { sslConfig.setCipherSuites(parameter.getValue()); } else if (parameter.getName() .equals(Constants.CLIENT_SUPPORT_HTTPS_PROTOCOLS)) { sslConfig.setEnableProtocols(parameter.getValue()); } else if (parameter.getName() .equals(Constants.CLIENT_ENABLE_SESSION_CREATION)) { sslConfig.setEnableSessionCreation(Boolean.parseBoolean(parameter.getValue())); } } return sslConfig; } }
http/netty/components/org.wso2.carbon.transport.http.netty/src/main/java/org/wso2/carbon/transport/http/netty/common/Util.java
/* * Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and limitations under the License. */ package org.wso2.carbon.transport.http.netty.common; import io.netty.handler.codec.http.DefaultHttpRequest; import io.netty.handler.codec.http.DefaultHttpResponse; import io.netty.handler.codec.http.HttpHeaders; import io.netty.handler.codec.http.HttpMessage; import io.netty.handler.codec.http.HttpMethod; import io.netty.handler.codec.http.HttpRequest; import io.netty.handler.codec.http.HttpResponse; import io.netty.handler.codec.http.HttpResponseStatus; import io.netty.handler.codec.http.HttpVersion; import org.wso2.carbon.messaging.CarbonMessage; import org.wso2.carbon.messaging.Header; import org.wso2.carbon.messaging.Headers; import org.wso2.carbon.transport.http.netty.common.ssl.SSLConfig; import org.wso2.carbon.transport.http.netty.config.Parameter; import java.io.File; import java.util.LinkedList; import java.util.List; import java.util.Map; import static io.netty.handler.codec.http.HttpVersion.HTTP_1_1; /** * Includes utility methods for creating http requests and responses and their related properties. */ public class Util { private static final String DEFAULT_HTTP_METHOD_POST = "POST"; private static final String DEFAULT_VERSION_HTTP_1_1 = "HTTP/1.1"; public static Headers getHeaders(HttpMessage message) { List<Header> headers = new LinkedList<>(); if (message.headers() != null) { for (Map.Entry<String, String> k : message.headers().entries()) { headers.add(new Header(k.getKey(), k.getValue())); } } return new Headers(headers); } public static void setHeaders(HttpMessage message, Headers headers) { HttpHeaders httpHeaders = message.headers(); for (Header header : headers.getAll()) { httpHeaders.add(header.getName(), header.getValue()); } } public static String getStringValue(CarbonMessage msg, String key, String defaultValue) { String value = (String) msg.getProperty(key); if (value == null) { return defaultValue; } return value; } public static int getIntValue(CarbonMessage msg, String key, int defaultValue) { Integer value = (Integer) msg.getProperty(key); if (value == null) { return defaultValue; } return value; } @SuppressWarnings("unchecked") public static HttpResponse createHttpResponse(CarbonMessage msg) { HttpVersion httpVersion = new HttpVersion(Util.getStringValue(msg, Constants.HTTP_VERSION, HTTP_1_1.text()), true); int statusCode = Util.getIntValue(msg, Constants.HTTP_STATUS_CODE, 200); HttpResponseStatus httpResponseStatus = new HttpResponseStatus(statusCode, HttpResponseStatus.valueOf(statusCode).reasonPhrase()); DefaultHttpResponse outgoingResponse = new DefaultHttpResponse(httpVersion, httpResponseStatus, false); Headers headers = msg.getHeaders(); Util.setHeaders(outgoingResponse, headers); return outgoingResponse; } @SuppressWarnings("unchecked") public static HttpRequest createHttpRequest(CarbonMessage msg) { HttpMethod httpMethod; if (null != msg.getProperty(Constants.HTTP_METHOD)) { httpMethod = new HttpMethod((String) msg.getProperty(Constants.HTTP_METHOD)); } else { httpMethod = new HttpMethod(DEFAULT_HTTP_METHOD_POST); } HttpVersion httpVersion; if (null != msg.getProperty(Constants.HTTP_VERSION)) { httpVersion = new HttpVersion((String) msg.getProperty(Constants.HTTP_VERSION), true); } else { httpVersion = new HttpVersion(DEFAULT_VERSION_HTTP_1_1, true); } if ((String) msg.getProperty(Constants.TO) == null) { msg.setProperty(Constants.TO, "/"); } HttpRequest outgoingRequest = new DefaultHttpRequest(httpVersion, httpMethod, (String) msg.getProperty(Constants.TO), false); Headers headers = msg.getHeaders(); Util.setHeaders(outgoingRequest, headers); return outgoingRequest; } public static SSLConfig getSSLConfigForListener(String certPass, String keyStorePass, String keyStoreFile, String trustStoreFile, String trustStorePass, List<Parameter> parametersList) { if (certPass == null) { certPass = keyStorePass; } if (keyStoreFile == null || keyStorePass == null) { throw new IllegalArgumentException("keyStoreFile or keyStorePass not defined for " + "HTTPS scheme"); } File keyStore = new File(keyStoreFile); if (!keyStore.exists()) { throw new IllegalArgumentException("KeyStore File " + keyStoreFile + " not found"); } SSLConfig sslConfig = new SSLConfig(keyStore, keyStorePass).setCertPass(certPass); for (Parameter parameter : parametersList) { if (parameter.getName() .equals(Constants.SERVER_SUPPORT_CIPHERS)) { sslConfig.setCipherSuites(parameter.getValue()); } else if (parameter.getName() .equals(Constants.SERVER_SUPPORT_HTTPS_PROTOCOLS)) { sslConfig.setEnableProtocols(parameter.getValue()); } else if (parameter.getName() .equals(Constants.SERVER_SUPPORTED_SNIMATCHERS)) { sslConfig.setSniMatchers(parameter.getValue()); } else if (parameter.getName() .equals(Constants.SERVER_SUPPORTED_SERVER_NAMES)) { sslConfig.setServerNames(parameter.getValue()); } else if (parameter.getName() .equals(Constants.SERVER_ENABLE_SESSION_CREATION)) { sslConfig.setEnableSessionCreation(Boolean.parseBoolean(parameter.getValue())); } else if (parameter.getName() .equals(Constants.SSL_VERIFY_CLIENT)) { sslConfig.setNeedClientAuth(Boolean.parseBoolean(parameter.getValue())); } } if (trustStoreFile != null) { File trustStore = new File(trustStoreFile); if (!trustStore.exists()) { throw new IllegalArgumentException("trustStore File " + trustStoreFile + " not found"); } if (trustStorePass == null) { throw new IllegalArgumentException("trustStorePass is not defined for HTTPS scheme"); } sslConfig.setTrustStore(trustStore).setTrustStorePass(trustStorePass); } return sslConfig; } public static SSLConfig getSSLConfigForSender(String certPass, String keyStorePass, String keyStoreFile, String trustStoreFile, String trustStorePass, List<Parameter> parametersList) { if (certPass == null) { certPass = keyStorePass; } if (trustStoreFile == null || trustStorePass == null) { throw new IllegalArgumentException("TrusstoreFile or trustStorePass not defined for " + "HTTPS scheme"); } SSLConfig sslConfig = new SSLConfig(null, null).setCertPass(null); if (keyStoreFile != null) { File keyStore = new File(keyStoreFile); if (!keyStore.exists()) { throw new IllegalArgumentException("KeyStore File " + trustStoreFile + " not found"); } sslConfig = new SSLConfig(keyStore, keyStorePass).setCertPass(certPass); } File trustStore = new File(trustStoreFile); sslConfig.setTrustStore(trustStore).setTrustStorePass(trustStorePass); sslConfig.setClientMode(true); for (Parameter parameter : parametersList) { if (parameter.getName() .equals(Constants.CLIENT_SUPPORT_CIPHERS)) { sslConfig.setCipherSuites(parameter.getValue()); } else if (parameter.getName() .equals(Constants.CLIENT_SUPPORT_HTTPS_PROTOCOLS)) { sslConfig.setEnableProtocols(parameter.getValue()); } else if (parameter.getName() .equals(Constants.CLIENT_ENABLE_SESSION_CREATION)) { sslConfig.setEnableSessionCreation(Boolean.parseBoolean(parameter.getValue())); } } return sslConfig; } }
Fix #146 support variable substitution in keystore file path
http/netty/components/org.wso2.carbon.transport.http.netty/src/main/java/org/wso2/carbon/transport/http/netty/common/Util.java
Fix #146 support variable substitution in keystore file path
Java
apache-2.0
67a0bad78e96efeb74c4210fc5903340893268bb
0
dannil/scb-java-client,dannil/scb-java-client,dannil/scb-api
/* * Copyright 2017 Daniel Nilsson * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this * file except in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under * the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.github.dannil.scbjavaclient.client.financialmarkets.investmentfunds; import static org.junit.Assert.assertNotEquals; import java.util.Arrays; import java.util.Collections; import java.util.List; import com.github.dannil.scbjavaclient.test.runner.Date; import com.github.dannil.scbjavaclient.test.runner.DateJUnitRunner; import com.github.dannil.scbjavaclient.test.utility.RemoteIntegrationTestSuite; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; @RunWith(DateJUnitRunner.class) public class FinancialMarketsInvestmentFundsClientIT extends RemoteIntegrationTestSuite { private FinancialMarketsInvestmentFundsClient client; @Before public void setup() { this.client = new FinancialMarketsInvestmentFundsClient(); } @Test @Date("2017-11-22") public void getOwnershipOfInvestmentFund() { assertNotEquals(0, this.client.getOwnershipOfInvestmentFund().size()); } @Test @Date("2017-11-22") public void getOwnershipOfInvestmentFundWithParametersEmptyLists() { assertNotEquals(0, this.client.getOwnershipOfInvestmentFund(Collections.<String>emptyList(), Collections.<String>emptyList(), Collections.<String>emptyList(), Collections.<String>emptyList()).size()); } @Test @Date("2017-11-22") public void getOwnershipOfInvestmentFundWithParameters() { List<String> observations = Arrays.asList("inbet", "utbet"); List<String> types = Arrays.asList("S12251", "S12354"); List<String> sectors = Arrays.asList("S14", "S15"); List<String> quarters = Arrays.asList("2011K1", "2011K2"); assertNotEquals(0, this.client.getOwnershipOfInvestmentFund(observations, types, sectors, quarters).size()); } }
src/test/java/com/github/dannil/scbjavaclient/client/financialmarkets/investmentfunds/FinancialMarketsInvestmentFundsClientIT.java
/* * Copyright 2017 Daniel Nilsson * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this * file except in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under * the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.github.dannil.scbjavaclient.client.financialmarkets.investmentfunds; import static org.junit.Assert.assertNotEquals; import java.util.Arrays; import java.util.Collections; import java.util.List; import com.github.dannil.scbjavaclient.test.runner.Date; import com.github.dannil.scbjavaclient.test.runner.DateJUnitRunner; import com.github.dannil.scbjavaclient.test.utility.RemoteIntegrationTestSuite; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; @RunWith(JUnit4.class) public class FinancialMarketsInvestmentFundsClientIT extends RemoteIntegrationTestSuite { private FinancialMarketsInvestmentFundsClient client; @Before public void setup() { this.client = new FinancialMarketsInvestmentFundsClient(); } @Test @Date("2017-11-22") public void getOwnershipOfInvestmentFund() { assertNotEquals(0, this.client.getOwnershipOfInvestmentFund().size()); } @Test @Date("2017-11-22") public void getOwnershipOfInvestmentFundWithParametersEmptyLists() { assertNotEquals(0, this.client.getOwnershipOfInvestmentFund(Collections.<String>emptyList(), Collections.<String>emptyList(), Collections.<String>emptyList(), Collections.<String>emptyList()).size()); } @Test @Date("2017-11-22") public void getOwnershipOfInvestmentFundWithParameters() { List<String> observations = Arrays.asList("inbet", "utbet"); List<String> types = Arrays.asList("S12251", "S12354"); List<String> sectors = Arrays.asList("S14", "S15"); List<String> quarters = Arrays.asList("2011K1", "2011K2"); assertNotEquals(0, this.client.getOwnershipOfInvestmentFund(observations, types, sectors, quarters).size()); } }
Changed to correct runner
src/test/java/com/github/dannil/scbjavaclient/client/financialmarkets/investmentfunds/FinancialMarketsInvestmentFundsClientIT.java
Changed to correct runner
Java
apache-2.0
a48c1165b14bbc9a471526527b402798a1e0ce6f
0
yawkat/netty,huanyi0723/netty,tempbottle/netty,junjiemars/netty,sunbeansoft/netty,gigold/netty,danny200309/netty,lugt/netty,WangJunTYTL/netty,brennangaunce/netty,AchinthaReemal/netty,jenskordowski/netty,zhoffice/netty,jdivy/netty,zhoffice/netty,CodingFabian/netty,duqiao/netty,eonezhang/netty,unei66/netty,qingsong-xu/netty,zzcclp/netty,xingguang2013/netty,liuciuse/netty,satishsaley/netty,exinguu/netty,seetharamireddy540/netty,zxhfirefox/netty,caoyanwei/netty,xingguang2013/netty,smayoorans/netty,sameira/netty,carlbai/netty,yonglehou/netty-1,niuxinghua/netty,dongjiaqiang/netty,x1957/netty,BrunoColin/netty,AnselQiao/netty,shuangqiuan/netty,jenskordowski/netty,duqiao/netty,Alwayswithme/netty,danbev/netty,mcanthony/netty,kvr000/netty,tempbottle/netty,junjiemars/netty,lugt/netty,moyiguket/netty,balaprasanna/netty,IBYoung/netty,shuangqiuan/netty,jdivy/netty,hgl888/netty,sunbeansoft/netty,LuminateWireless/netty,nadeeshaan/netty,AchinthaReemal/netty,WangJunTYTL/netty,mway08/netty,Kingson4Wu/netty,eincs/netty,caoyanwei/netty,lznhust/netty,IBYoung/netty,ioanbsu/netty,nadeeshaan/netty,hgl888/netty,qingsong-xu/netty,junjiemars/netty,seetharamireddy540/netty,liyang1025/netty,LuminateWireless/netty,CodingFabian/netty,Alwayswithme/netty,Mounika-Chirukuri/netty,danbev/netty,sja/netty,orika/netty,wuxiaowei907/netty,orika/netty,chinayin/netty,zhujingling/netty,louiscryan/netty,dongjiaqiang/netty,lightsocks/netty,altihou/netty,liyang1025/netty,x1957/netty,xiexingguang/netty,shenguoquan/netty,LuminateWireless/netty,Mounika-Chirukuri/netty,shelsonjava/netty,dongjiaqiang/netty,shuangqiuan/netty,qingsong-xu/netty,AnselQiao/netty,serioussam/netty,bigheary/netty,ajaysarda/netty,wangyikai/netty,castomer/netty,liuciuse/netty,ioanbsu/netty,MediumOne/netty,shuangqiuan/netty,unei66/netty,altihou/netty,gigold/netty,nkhuyu/netty,youprofit/netty,ioanbsu/netty,gigold/netty,nadeeshaan/netty,timboudreau/netty,altihou/netty,lukw00/netty,f7753/netty,olupotd/netty,IBYoung/netty,Kalvar/netty,ninja-/netty,afds/netty,BrunoColin/netty,AchinthaReemal/netty,eincs/netty,timboudreau/netty,exinguu/netty,youprofit/netty,jenskordowski/netty,wuxiaowei907/netty,xiexingguang/netty,duqiao/netty,ninja-/netty,kjniemi/netty,caoyanwei/netty,ijuma/netty,mosoft521/netty,blademainer/netty,ijuma/netty,WangJunTYTL/netty,wangyikai/netty,bob329/netty,zhoffice/netty,chrisprobst/netty,mcanthony/netty,djchen/netty,yonglehou/netty-1,huuthang1993/netty,zxhfirefox/netty,ijuma/netty,silvaran/netty,yawkat/netty,liuciuse/netty,lukehutch/netty,alkemist/netty,lightsocks/netty,MediumOne/netty,unei66/netty,huanyi0723/netty,zzcclp/netty,jovezhougang/netty,ioanbsu/netty,sameira/netty,bigheary/netty,yawkat/netty,Mounika-Chirukuri/netty,lugt/netty,alkemist/netty,lukw00/netty,afds/netty,AnselQiao/netty,x1957/netty,nkhuyu/netty,MediumOne/netty,yawkat/netty,AchinthaReemal/netty,Alwayswithme/netty,mubarak/netty,qingsong-xu/netty,zhujingling/netty,liuciuse/netty,moyiguket/netty,jovezhougang/netty,sverkera/netty,lznhust/netty,bigheary/netty,mosoft521/netty,afds/netty,danbev/netty,mubarak/netty,huuthang1993/netty,Kingson4Wu/netty,sunbeansoft/netty,hgl888/netty,mcanthony/netty,f7753/netty,nkhuyu/netty,huuthang1993/netty,liyang1025/netty,liyang1025/netty,satishsaley/netty,silvaran/netty,caoyanwei/netty,kvr000/netty,tempbottle/netty,brennangaunce/netty,IBYoung/netty,sverkera/netty,eincs/netty,wuxiaowei907/netty,mway08/netty,chrisprobst/netty,eincs/netty,carlbai/netty,lznhust/netty,danbev/netty,zzcclp/netty,blademainer/netty,olupotd/netty,chinayin/netty,blademainer/netty,xingguang2013/netty,sverkera/netty,WangJunTYTL/netty,shenguoquan/netty,carlbai/netty,carlbai/netty,youprofit/netty,nkhuyu/netty,castomer/netty,shenguoquan/netty,timboudreau/netty,smayoorans/netty,sunbeansoft/netty,huuthang1993/netty,ninja-/netty,youprofit/netty,jenskordowski/netty,mubarak/netty,seetharamireddy540/netty,ajaysarda/netty,kjniemi/netty,huuthang1993/netty,huanyi0723/netty,wuxiaowei907/netty,sunbeansoft/netty,bob329/netty,orika/netty,balaprasanna/netty,shism/netty,nadeeshaan/netty,zzcclp/netty,xiongzheng/netty,niuxinghua/netty,shelsonjava/netty,bob329/netty,jdivy/netty,lukehutch/netty,alkemist/netty,lightsocks/netty,timboudreau/netty,blademainer/netty,chrisprobst/netty,zxhfirefox/netty,wuxiaowei907/netty,x1957/netty,sverkera/netty,huanyi0723/netty,Alwayswithme/netty,orika/netty,eonezhang/netty,ninja-/netty,timboudreau/netty,WangJunTYTL/netty,exinguu/netty,xiongzheng/netty,xiexingguang/netty,bigheary/netty,shenguoquan/netty,Kingson4Wu/netty,xingguang2013/netty,caoyanwei/netty,youprofit/netty,chrisprobst/netty,olupotd/netty,zzcclp/netty,ajaysarda/netty,shism/netty,Mounika-Chirukuri/netty,LuminateWireless/netty,djchen/netty,nkhuyu/netty,CodingFabian/netty,tempbottle/netty,ninja-/netty,duqiao/netty,balaprasanna/netty,balaprasanna/netty,exinguu/netty,louiscryan/netty,BrunoColin/netty,kvr000/netty,tempbottle/netty,niuxinghua/netty,serioussam/netty,danny200309/netty,xingguang2013/netty,gigold/netty,mcanthony/netty,zhujingling/netty,ajaysarda/netty,xiexingguang/netty,xiexingguang/netty,AnselQiao/netty,bigheary/netty,satishsaley/netty,wangyikai/netty,ioanbsu/netty,serioussam/netty,wangyikai/netty,IBYoung/netty,shuangqiuan/netty,brennangaunce/netty,louiscryan/netty,lukw00/netty,balaprasanna/netty,danny200309/netty,shelsonjava/netty,xiongzheng/netty,kjniemi/netty,eincs/netty,louiscryan/netty,f7753/netty,sja/netty,huanyi0723/netty,zhoffice/netty,lukehutch/netty,unei66/netty,orika/netty,djchen/netty,zxhfirefox/netty,chinayin/netty,brennangaunce/netty,eonezhang/netty,Kingson4Wu/netty,danny200309/netty,yonglehou/netty-1,serioussam/netty,mosoft521/netty,jenskordowski/netty,danbev/netty,alkemist/netty,smayoorans/netty,jovezhougang/netty,louiscryan/netty,mosoft521/netty,serioussam/netty,silvaran/netty,wangyikai/netty,AchinthaReemal/netty,BrunoColin/netty,satishsaley/netty,hgl888/netty,sameira/netty,kjniemi/netty,liuciuse/netty,mway08/netty,zhujingling/netty,lznhust/netty,carlbai/netty,sverkera/netty,LuminateWireless/netty,seetharamireddy540/netty,djchen/netty,smayoorans/netty,shelsonjava/netty,lukehutch/netty,lukw00/netty,liyang1025/netty,f7753/netty,alkemist/netty,jovezhougang/netty,gigold/netty,junjiemars/netty,eonezhang/netty,olupotd/netty,yonglehou/netty-1,bob329/netty,f7753/netty,mway08/netty,seetharamireddy540/netty,brennangaunce/netty,eonezhang/netty,Mounika-Chirukuri/netty,AnselQiao/netty,blademainer/netty,Kalvar/netty,smayoorans/netty,sameira/netty,satishsaley/netty,exinguu/netty,lugt/netty,ajaysarda/netty,dongjiaqiang/netty,BrunoColin/netty,castomer/netty,zxhfirefox/netty,sja/netty,shism/netty,junjiemars/netty,MediumOne/netty,Kingson4Wu/netty,afds/netty,moyiguket/netty,bob329/netty,mway08/netty,niuxinghua/netty,zhoffice/netty,olupotd/netty,sja/netty,qingsong-xu/netty,Kalvar/netty,mubarak/netty,Kalvar/netty,kvr000/netty,mubarak/netty,lightsocks/netty,lightsocks/netty,ijuma/netty,shelsonjava/netty,mosoft521/netty,silvaran/netty,Alwayswithme/netty,shenguoquan/netty,Kalvar/netty,silvaran/netty,lznhust/netty,chinayin/netty,duqiao/netty,jovezhougang/netty,yawkat/netty,moyiguket/netty,zhujingling/netty,MediumOne/netty,sja/netty,chinayin/netty,nadeeshaan/netty,sameira/netty,kvr000/netty,yonglehou/netty-1,xiongzheng/netty,castomer/netty,jdivy/netty,afds/netty,x1957/netty,chrisprobst/netty,jdivy/netty,CodingFabian/netty,mcanthony/netty,kjniemi/netty,hgl888/netty,shism/netty,lukw00/netty,altihou/netty,CodingFabian/netty,lukehutch/netty,dongjiaqiang/netty,lugt/netty,altihou/netty,ijuma/netty,xiongzheng/netty,djchen/netty,danny200309/netty,castomer/netty,shism/netty,moyiguket/netty,niuxinghua/netty,unei66/netty
/* * Copyright 2012 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.buffer; import io.netty.util.internal.PlatformDependent; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.CharBuffer; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.List; /** * Creates a new {@link ByteBuf} by allocating new space or by wrapping * or copying existing byte arrays, byte buffers and a string. * * <h3>Use static import</h3> * This classes is intended to be used with Java 5 static import statement: * * <pre> * import static io.netty.buffer.{@link Unpooled}.*; * * {@link ByteBuf} heapBuffer = buffer(128); * {@link ByteBuf} directBuffer = directBuffer(256); * {@link ByteBuf} wrappedBuffer = wrappedBuffer(new byte[128], new byte[256]); * {@link ByteBuf} copiedBuffe r = copiedBuffer({@link ByteBuffer}.allocate(128)); * </pre> * * <h3>Allocating a new buffer</h3> * * Three buffer types are provided out of the box. * * <ul> * <li>{@link #buffer(int)} allocates a new fixed-capacity heap buffer.</li> * <li>{@link #directBuffer(int)} allocates a new fixed-capacity direct buffer.</li> * </ul> * * <h3>Creating a wrapped buffer</h3> * * Wrapped buffer is a buffer which is a view of one or more existing * byte arrays and byte buffers. Any changes in the content of the original * array or buffer will be visible in the wrapped buffer. Various wrapper * methods are provided and their name is all {@code wrappedBuffer()}. * You might want to take a look at the methods that accept varargs closely if * you want to create a buffer which is composed of more than one array to * reduce the number of memory copy. * * <h3>Creating a copied buffer</h3> * * Copied buffer is a deep copy of one or more existing byte arrays, byte * buffers or a string. Unlike a wrapped buffer, there's no shared data * between the original data and the copied buffer. Various copy methods are * provided and their name is all {@code copiedBuffer()}. It is also convenient * to use this operation to merge multiple buffers into one buffer. */ public final class Unpooled { private static final ByteBufAllocator ALLOC = UnpooledByteBufAllocator.DEFAULT; /** * Big endian byte order. */ public static final ByteOrder BIG_ENDIAN = ByteOrder.BIG_ENDIAN; /** * Little endian byte order. */ public static final ByteOrder LITTLE_ENDIAN = ByteOrder.LITTLE_ENDIAN; /** * A buffer whose capacity is {@code 0}. */ public static final ByteBuf EMPTY_BUFFER = ALLOC.buffer(0, 0); static { assert EMPTY_BUFFER instanceof EmptyByteBuf: "EMPTY_BUFFER must be an EmptyByteBuf."; } /** * Creates a new big-endian Java heap buffer with reasonably small initial capacity, which * expands its capacity boundlessly on demand. */ public static ByteBuf buffer() { return ALLOC.heapBuffer(); } /** * Creates a new big-endian direct buffer with reasonably small initial capacity, which * expands its capacity boundlessly on demand. */ public static ByteBuf directBuffer() { return ALLOC.directBuffer(); } /** * Creates a new big-endian Java heap buffer with the specified {@code capacity}, which * expands its capacity boundlessly on demand. The new buffer's {@code readerIndex} and * {@code writerIndex} are {@code 0}. */ public static ByteBuf buffer(int initialCapacity) { return ALLOC.heapBuffer(initialCapacity); } /** * Creates a new big-endian direct buffer with the specified {@code capacity}, which * expands its capacity boundlessly on demand. The new buffer's {@code readerIndex} and * {@code writerIndex} are {@code 0}. */ public static ByteBuf directBuffer(int initialCapacity) { return ALLOC.directBuffer(initialCapacity); } /** * Creates a new big-endian Java heap buffer with the specified * {@code initialCapacity}, that may grow up to {@code maxCapacity} * The new buffer's {@code readerIndex} and {@code writerIndex} are * {@code 0}. */ public static ByteBuf buffer(int initialCapacity, int maxCapacity) { return ALLOC.heapBuffer(initialCapacity, maxCapacity); } /** * Creates a new big-endian direct buffer with the specified * {@code initialCapacity}, that may grow up to {@code maxCapacity}. * The new buffer's {@code readerIndex} and {@code writerIndex} are * {@code 0}. */ public static ByteBuf directBuffer(int initialCapacity, int maxCapacity) { return ALLOC.directBuffer(initialCapacity, maxCapacity); } /** * Creates a new big-endian buffer which wraps the specified {@code array}. * A modification on the specified array's content will be visible to the * returned buffer. */ public static ByteBuf wrappedBuffer(byte[] array) { if (array.length == 0) { return EMPTY_BUFFER; } return new UnpooledHeapByteBuf(ALLOC, array, array.length); } /** * Creates a new big-endian buffer which wraps the sub-region of the * specified {@code array}. A modification on the specified array's * content will be visible to the returned buffer. */ public static ByteBuf wrappedBuffer(byte[] array, int offset, int length) { if (length == 0) { return EMPTY_BUFFER; } if (offset == 0 && length == array.length) { return wrappedBuffer(array); } return wrappedBuffer(array).slice(offset, length); } /** * Creates a new buffer which wraps the specified NIO buffer's current * slice. A modification on the specified buffer's content will be * visible to the returned buffer. */ public static ByteBuf wrappedBuffer(ByteBuffer buffer) { if (!buffer.hasRemaining()) { return EMPTY_BUFFER; } if (buffer.hasArray()) { return wrappedBuffer( buffer.array(), buffer.arrayOffset() + buffer.position(), buffer.remaining()).order(buffer.order()); } else if (PlatformDependent.hasUnsafe()) { if (buffer.isReadOnly()) { if (buffer.isDirect()) { return new ReadOnlyUnsafeDirectByteBuf(ALLOC, buffer); } else { return new ReadOnlyByteBufferBuf(ALLOC, buffer); } } else { return new UnpooledUnsafeDirectByteBuf(ALLOC, buffer, buffer.remaining()); } } else { if (buffer.isReadOnly()) { return new ReadOnlyByteBufferBuf(ALLOC, buffer); } else { return new UnpooledDirectByteBuf(ALLOC, buffer, buffer.remaining()); } } } /** * Creates a new buffer which wraps the specified buffer's readable bytes. * A modification on the specified buffer's content will be visible to the * returned buffer. */ public static ByteBuf wrappedBuffer(ByteBuf buffer) { if (buffer.isReadable()) { return buffer.slice(); } else { return EMPTY_BUFFER; } } /** * Creates a new big-endian composite buffer which wraps the specified * arrays without copying them. A modification on the specified arrays' * content will be visible to the returned buffer. */ public static ByteBuf wrappedBuffer(byte[]... arrays) { return wrappedBuffer(16, arrays); } /** * Creates a new big-endian composite buffer which wraps the readable bytes of the * specified buffers without copying them. A modification on the content * of the specified buffers will be visible to the returned buffer. */ public static ByteBuf wrappedBuffer(ByteBuf... buffers) { return wrappedBuffer(16, buffers); } /** * Creates a new big-endian composite buffer which wraps the slices of the specified * NIO buffers without copying them. A modification on the content of the * specified buffers will be visible to the returned buffer. */ public static ByteBuf wrappedBuffer(ByteBuffer... buffers) { return wrappedBuffer(16, buffers); } /** * Creates a new big-endian composite buffer which wraps the specified * arrays without copying them. A modification on the specified arrays' * content will be visible to the returned buffer. */ public static ByteBuf wrappedBuffer(int maxNumComponents, byte[]... arrays) { switch (arrays.length) { case 0: break; case 1: if (arrays[0].length != 0) { return wrappedBuffer(arrays[0]); } break; default: // Get the list of the component, while guessing the byte order. final List<ByteBuf> components = new ArrayList<ByteBuf>(arrays.length); for (byte[] a: arrays) { if (a == null) { break; } if (a.length > 0) { components.add(wrappedBuffer(a)); } } if (!components.isEmpty()) { return new CompositeByteBuf(ALLOC, false, maxNumComponents, components); } } return EMPTY_BUFFER; } /** * Creates a new big-endian composite buffer which wraps the readable bytes of the * specified buffers without copying them. A modification on the content * of the specified buffers will be visible to the returned buffer. */ public static ByteBuf wrappedBuffer(int maxNumComponents, ByteBuf... buffers) { switch (buffers.length) { case 0: break; case 1: if (buffers[0].isReadable()) { return wrappedBuffer(buffers[0].order(BIG_ENDIAN)); } break; default: for (ByteBuf b: buffers) { if (b.isReadable()) { return new CompositeByteBuf(ALLOC, false, maxNumComponents, buffers); } } } return EMPTY_BUFFER; } /** * Creates a new big-endian composite buffer which wraps the slices of the specified * NIO buffers without copying them. A modification on the content of the * specified buffers will be visible to the returned buffer. */ public static ByteBuf wrappedBuffer(int maxNumComponents, ByteBuffer... buffers) { switch (buffers.length) { case 0: break; case 1: if (buffers[0].hasRemaining()) { return wrappedBuffer(buffers[0].order(BIG_ENDIAN)); } break; default: // Get the list of the component, while guessing the byte order. final List<ByteBuf> components = new ArrayList<ByteBuf>(buffers.length); for (ByteBuffer b: buffers) { if (b == null) { break; } if (b.remaining() > 0) { components.add(wrappedBuffer(b.order(BIG_ENDIAN))); } } if (!components.isEmpty()) { return new CompositeByteBuf(ALLOC, false, maxNumComponents, components); } } return EMPTY_BUFFER; } /** * Returns a new big-endian composite buffer with no components. */ public static CompositeByteBuf compositeBuffer() { return compositeBuffer(16); } /** * Returns a new big-endian composite buffer with no components. */ public static CompositeByteBuf compositeBuffer(int maxNumComponents) { return new CompositeByteBuf(ALLOC, false, maxNumComponents); } /** * Creates a new big-endian buffer whose content is a copy of the * specified {@code array}. The new buffer's {@code readerIndex} and * {@code writerIndex} are {@code 0} and {@code array.length} respectively. */ public static ByteBuf copiedBuffer(byte[] array) { if (array.length == 0) { return EMPTY_BUFFER; } return wrappedBuffer(array.clone()); } /** * Creates a new big-endian buffer whose content is a copy of the * specified {@code array}'s sub-region. The new buffer's * {@code readerIndex} and {@code writerIndex} are {@code 0} and * the specified {@code length} respectively. */ public static ByteBuf copiedBuffer(byte[] array, int offset, int length) { if (length == 0) { return EMPTY_BUFFER; } byte[] copy = new byte[length]; System.arraycopy(array, offset, copy, 0, length); return wrappedBuffer(copy); } /** * Creates a new buffer whose content is a copy of the specified * {@code buffer}'s current slice. The new buffer's {@code readerIndex} * and {@code writerIndex} are {@code 0} and {@code buffer.remaining} * respectively. */ public static ByteBuf copiedBuffer(ByteBuffer buffer) { int length = buffer.remaining(); if (length == 0) { return EMPTY_BUFFER; } byte[] copy = new byte[length]; // Duplicate the buffer so we not adjust the position during our get operation. // See https://github.com/netty/netty/issues/3896 ByteBuffer duplicate = buffer.duplicate(); duplicate.get(copy); return wrappedBuffer(copy).order(duplicate.order()); } /** * Creates a new buffer whose content is a copy of the specified * {@code buffer}'s readable bytes. The new buffer's {@code readerIndex} * and {@code writerIndex} are {@code 0} and {@code buffer.readableBytes} * respectively. */ public static ByteBuf copiedBuffer(ByteBuf buffer) { int readable = buffer.readableBytes(); if (readable > 0) { ByteBuf copy = buffer(readable); copy.writeBytes(buffer, buffer.readerIndex(), readable); return copy; } else { return EMPTY_BUFFER; } } /** * Creates a new big-endian buffer whose content is a merged copy of * the specified {@code arrays}. The new buffer's {@code readerIndex} * and {@code writerIndex} are {@code 0} and the sum of all arrays' * {@code length} respectively. */ public static ByteBuf copiedBuffer(byte[]... arrays) { switch (arrays.length) { case 0: return EMPTY_BUFFER; case 1: if (arrays[0].length == 0) { return EMPTY_BUFFER; } else { return copiedBuffer(arrays[0]); } } // Merge the specified arrays into one array. int length = 0; for (byte[] a: arrays) { if (Integer.MAX_VALUE - length < a.length) { throw new IllegalArgumentException( "The total length of the specified arrays is too big."); } length += a.length; } if (length == 0) { return EMPTY_BUFFER; } byte[] mergedArray = new byte[length]; for (int i = 0, j = 0; i < arrays.length; i ++) { byte[] a = arrays[i]; System.arraycopy(a, 0, mergedArray, j, a.length); j += a.length; } return wrappedBuffer(mergedArray); } /** * Creates a new buffer whose content is a merged copy of the specified * {@code buffers}' readable bytes. The new buffer's {@code readerIndex} * and {@code writerIndex} are {@code 0} and the sum of all buffers' * {@code readableBytes} respectively. * * @throws IllegalArgumentException * if the specified buffers' endianness are different from each * other */ public static ByteBuf copiedBuffer(ByteBuf... buffers) { switch (buffers.length) { case 0: return EMPTY_BUFFER; case 1: return copiedBuffer(buffers[0]); } // Merge the specified buffers into one buffer. ByteOrder order = null; int length = 0; for (ByteBuf b: buffers) { int bLen = b.readableBytes(); if (bLen <= 0) { continue; } if (Integer.MAX_VALUE - length < bLen) { throw new IllegalArgumentException( "The total length of the specified buffers is too big."); } length += bLen; if (order != null) { if (!order.equals(b.order())) { throw new IllegalArgumentException("inconsistent byte order"); } } else { order = b.order(); } } if (length == 0) { return EMPTY_BUFFER; } byte[] mergedArray = new byte[length]; for (int i = 0, j = 0; i < buffers.length; i ++) { ByteBuf b = buffers[i]; int bLen = b.readableBytes(); b.getBytes(b.readerIndex(), mergedArray, j, bLen); j += bLen; } return wrappedBuffer(mergedArray).order(order); } /** * Creates a new buffer whose content is a merged copy of the specified * {@code buffers}' slices. The new buffer's {@code readerIndex} and * {@code writerIndex} are {@code 0} and the sum of all buffers' * {@code remaining} respectively. * * @throws IllegalArgumentException * if the specified buffers' endianness are different from each * other */ public static ByteBuf copiedBuffer(ByteBuffer... buffers) { switch (buffers.length) { case 0: return EMPTY_BUFFER; case 1: return copiedBuffer(buffers[0]); } // Merge the specified buffers into one buffer. ByteOrder order = null; int length = 0; for (ByteBuffer b: buffers) { int bLen = b.remaining(); if (bLen <= 0) { continue; } if (Integer.MAX_VALUE - length < bLen) { throw new IllegalArgumentException( "The total length of the specified buffers is too big."); } length += bLen; if (order != null) { if (!order.equals(b.order())) { throw new IllegalArgumentException("inconsistent byte order"); } } else { order = b.order(); } } if (length == 0) { return EMPTY_BUFFER; } byte[] mergedArray = new byte[length]; for (int i = 0, j = 0; i < buffers.length; i ++) { // Duplicate the buffer so we not adjust the position during our get operation. // See https://github.com/netty/netty/issues/3896 ByteBuffer b = buffers[i].duplicate(); int bLen = b.remaining(); b.get(mergedArray, j, bLen); j += bLen; } return wrappedBuffer(mergedArray).order(order); } /** * Creates a new big-endian buffer whose content is the specified * {@code string} encoded in the specified {@code charset}. * The new buffer's {@code readerIndex} and {@code writerIndex} are * {@code 0} and the length of the encoded string respectively. */ public static ByteBuf copiedBuffer(CharSequence string, Charset charset) { if (string == null) { throw new NullPointerException("string"); } if (string instanceof CharBuffer) { return copiedBuffer((CharBuffer) string, charset); } return copiedBuffer(CharBuffer.wrap(string), charset); } /** * Creates a new big-endian buffer whose content is a subregion of * the specified {@code string} encoded in the specified {@code charset}. * The new buffer's {@code readerIndex} and {@code writerIndex} are * {@code 0} and the length of the encoded string respectively. */ public static ByteBuf copiedBuffer( CharSequence string, int offset, int length, Charset charset) { if (string == null) { throw new NullPointerException("string"); } if (length == 0) { return EMPTY_BUFFER; } if (string instanceof CharBuffer) { CharBuffer buf = (CharBuffer) string; if (buf.hasArray()) { return copiedBuffer( buf.array(), buf.arrayOffset() + buf.position() + offset, length, charset); } buf = buf.slice(); buf.limit(length); buf.position(offset); return copiedBuffer(buf, charset); } return copiedBuffer(CharBuffer.wrap(string, offset, offset + length), charset); } /** * Creates a new big-endian buffer whose content is the specified * {@code array} encoded in the specified {@code charset}. * The new buffer's {@code readerIndex} and {@code writerIndex} are * {@code 0} and the length of the encoded string respectively. */ public static ByteBuf copiedBuffer(char[] array, Charset charset) { if (array == null) { throw new NullPointerException("array"); } return copiedBuffer(array, 0, array.length, charset); } /** * Creates a new big-endian buffer whose content is a subregion of * the specified {@code array} encoded in the specified {@code charset}. * The new buffer's {@code readerIndex} and {@code writerIndex} are * {@code 0} and the length of the encoded string respectively. */ public static ByteBuf copiedBuffer(char[] array, int offset, int length, Charset charset) { if (array == null) { throw new NullPointerException("array"); } if (length == 0) { return EMPTY_BUFFER; } return copiedBuffer(CharBuffer.wrap(array, offset, length), charset); } private static ByteBuf copiedBuffer(CharBuffer buffer, Charset charset) { return ByteBufUtil.encodeString0(ALLOC, true, buffer, charset); } /** * Creates a read-only buffer which disallows any modification operations * on the specified {@code buffer}. The new buffer has the same * {@code readerIndex} and {@code writerIndex} with the specified * {@code buffer}. */ public static ByteBuf unmodifiableBuffer(ByteBuf buffer) { ByteOrder endianness = buffer.order(); if (endianness == BIG_ENDIAN) { return new ReadOnlyByteBuf(buffer); } return new ReadOnlyByteBuf(buffer.order(BIG_ENDIAN)).order(LITTLE_ENDIAN); } /** * Creates a new 4-byte big-endian buffer that holds the specified 32-bit integer. */ public static ByteBuf copyInt(int value) { ByteBuf buf = buffer(4); buf.writeInt(value); return buf; } /** * Create a big-endian buffer that holds a sequence of the specified 32-bit integers. */ public static ByteBuf copyInt(int... values) { if (values == null || values.length == 0) { return EMPTY_BUFFER; } ByteBuf buffer = buffer(values.length * 4); for (int v: values) { buffer.writeInt(v); } return buffer; } /** * Creates a new 2-byte big-endian buffer that holds the specified 16-bit integer. */ public static ByteBuf copyShort(int value) { ByteBuf buf = buffer(2); buf.writeShort(value); return buf; } /** * Create a new big-endian buffer that holds a sequence of the specified 16-bit integers. */ public static ByteBuf copyShort(short... values) { if (values == null || values.length == 0) { return EMPTY_BUFFER; } ByteBuf buffer = buffer(values.length * 2); for (int v: values) { buffer.writeShort(v); } return buffer; } /** * Create a new big-endian buffer that holds a sequence of the specified 16-bit integers. */ public static ByteBuf copyShort(int... values) { if (values == null || values.length == 0) { return EMPTY_BUFFER; } ByteBuf buffer = buffer(values.length * 2); for (int v: values) { buffer.writeShort(v); } return buffer; } /** * Creates a new 3-byte big-endian buffer that holds the specified 24-bit integer. */ public static ByteBuf copyMedium(int value) { ByteBuf buf = buffer(3); buf.writeMedium(value); return buf; } /** * Create a new big-endian buffer that holds a sequence of the specified 24-bit integers. */ public static ByteBuf copyMedium(int... values) { if (values == null || values.length == 0) { return EMPTY_BUFFER; } ByteBuf buffer = buffer(values.length * 3); for (int v: values) { buffer.writeMedium(v); } return buffer; } /** * Creates a new 8-byte big-endian buffer that holds the specified 64-bit integer. */ public static ByteBuf copyLong(long value) { ByteBuf buf = buffer(8); buf.writeLong(value); return buf; } /** * Create a new big-endian buffer that holds a sequence of the specified 64-bit integers. */ public static ByteBuf copyLong(long... values) { if (values == null || values.length == 0) { return EMPTY_BUFFER; } ByteBuf buffer = buffer(values.length * 8); for (long v: values) { buffer.writeLong(v); } return buffer; } /** * Creates a new single-byte big-endian buffer that holds the specified boolean value. */ public static ByteBuf copyBoolean(boolean value) { ByteBuf buf = buffer(1); buf.writeBoolean(value); return buf; } /** * Create a new big-endian buffer that holds a sequence of the specified boolean values. */ public static ByteBuf copyBoolean(boolean... values) { if (values == null || values.length == 0) { return EMPTY_BUFFER; } ByteBuf buffer = buffer(values.length); for (boolean v: values) { buffer.writeBoolean(v); } return buffer; } /** * Creates a new 4-byte big-endian buffer that holds the specified 32-bit floating point number. */ public static ByteBuf copyFloat(float value) { ByteBuf buf = buffer(4); buf.writeFloat(value); return buf; } /** * Create a new big-endian buffer that holds a sequence of the specified 32-bit floating point numbers. */ public static ByteBuf copyFloat(float... values) { if (values == null || values.length == 0) { return EMPTY_BUFFER; } ByteBuf buffer = buffer(values.length * 4); for (float v: values) { buffer.writeFloat(v); } return buffer; } /** * Creates a new 8-byte big-endian buffer that holds the specified 64-bit floating point number. */ public static ByteBuf copyDouble(double value) { ByteBuf buf = buffer(8); buf.writeDouble(value); return buf; } /** * Create a new big-endian buffer that holds a sequence of the specified 64-bit floating point numbers. */ public static ByteBuf copyDouble(double... values) { if (values == null || values.length == 0) { return EMPTY_BUFFER; } ByteBuf buffer = buffer(values.length * 8); for (double v: values) { buffer.writeDouble(v); } return buffer; } /** * Return a unreleasable view on the given {@link ByteBuf} which will just ignore release and retain calls. */ public static ByteBuf unreleasableBuffer(ByteBuf buf) { return new UnreleasableByteBuf(buf); } /** * Wrap the given {@link ByteBuf}s in an unmodifiable {@link ByteBuf}. Be aware the returned {@link ByteBuf} will * not try to slice the given {@link ByteBuf}s to reduce GC-Pressure. */ public static ByteBuf unmodifiableBuffer(ByteBuf... buffers) { return new FixedCompositeByteBuf(ALLOC, buffers); } private Unpooled() { // Unused } }
buffer/src/main/java/io/netty/buffer/Unpooled.java
/* * Copyright 2012 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.buffer; import io.netty.util.internal.PlatformDependent; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.CharBuffer; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.List; /** * Creates a new {@link ByteBuf} by allocating new space or by wrapping * or copying existing byte arrays, byte buffers and a string. * * <h3>Use static import</h3> * This classes is intended to be used with Java 5 static import statement: * * <pre> * import static io.netty.buffer.{@link Unpooled}.*; * * {@link ByteBuf} heapBuffer = buffer(128); * {@link ByteBuf} directBuffer = directBuffer(256); * {@link ByteBuf} wrappedBuffer = wrappedBuffer(new byte[128], new byte[256]); * {@link ByteBuf} copiedBuffe r = copiedBuffer({@link ByteBuffer}.allocate(128)); * </pre> * * <h3>Allocating a new buffer</h3> * * Three buffer types are provided out of the box. * * <ul> * <li>{@link #buffer(int)} allocates a new fixed-capacity heap buffer.</li> * <li>{@link #directBuffer(int)} allocates a new fixed-capacity direct buffer.</li> * </ul> * * <h3>Creating a wrapped buffer</h3> * * Wrapped buffer is a buffer which is a view of one or more existing * byte arrays and byte buffers. Any changes in the content of the original * array or buffer will be visible in the wrapped buffer. Various wrapper * methods are provided and their name is all {@code wrappedBuffer()}. * You might want to take a look at the methods that accept varargs closely if * you want to create a buffer which is composed of more than one array to * reduce the number of memory copy. * * <h3>Creating a copied buffer</h3> * * Copied buffer is a deep copy of one or more existing byte arrays, byte * buffers or a string. Unlike a wrapped buffer, there's no shared data * between the original data and the copied buffer. Various copy methods are * provided and their name is all {@code copiedBuffer()}. It is also convenient * to use this operation to merge multiple buffers into one buffer. */ public final class Unpooled { private static final ByteBufAllocator ALLOC = UnpooledByteBufAllocator.DEFAULT; /** * Big endian byte order. */ public static final ByteOrder BIG_ENDIAN = ByteOrder.BIG_ENDIAN; /** * Little endian byte order. */ public static final ByteOrder LITTLE_ENDIAN = ByteOrder.LITTLE_ENDIAN; /** * A buffer whose capacity is {@code 0}. */ public static final ByteBuf EMPTY_BUFFER = ALLOC.buffer(0, 0); static { assert EMPTY_BUFFER instanceof EmptyByteBuf: "EMPTY_BUFFER must be an EmptyByteBuf."; } /** * Creates a new big-endian Java heap buffer with reasonably small initial capacity, which * expands its capacity boundlessly on demand. */ public static ByteBuf buffer() { return ALLOC.heapBuffer(); } /** * Creates a new big-endian direct buffer with reasonably small initial capacity, which * expands its capacity boundlessly on demand. */ public static ByteBuf directBuffer() { return ALLOC.directBuffer(); } /** * Creates a new big-endian Java heap buffer with the specified {@code capacity}, which * expands its capacity boundlessly on demand. The new buffer's {@code readerIndex} and * {@code writerIndex} are {@code 0}. */ public static ByteBuf buffer(int initialCapacity) { return ALLOC.heapBuffer(initialCapacity); } /** * Creates a new big-endian direct buffer with the specified {@code capacity}, which * expands its capacity boundlessly on demand. The new buffer's {@code readerIndex} and * {@code writerIndex} are {@code 0}. */ public static ByteBuf directBuffer(int initialCapacity) { return ALLOC.directBuffer(initialCapacity); } /** * Creates a new big-endian Java heap buffer with the specified * {@code initialCapacity}, that may grow up to {@code maxCapacity} * The new buffer's {@code readerIndex} and {@code writerIndex} are * {@code 0}. */ public static ByteBuf buffer(int initialCapacity, int maxCapacity) { return ALLOC.heapBuffer(initialCapacity, maxCapacity); } /** * Creates a new big-endian direct buffer with the specified * {@code initialCapacity}, that may grow up to {@code maxCapacity}. * The new buffer's {@code readerIndex} and {@code writerIndex} are * {@code 0}. */ public static ByteBuf directBuffer(int initialCapacity, int maxCapacity) { return ALLOC.directBuffer(initialCapacity, maxCapacity); } /** * Creates a new big-endian buffer which wraps the specified {@code array}. * A modification on the specified array's content will be visible to the * returned buffer. */ public static ByteBuf wrappedBuffer(byte[] array) { if (array.length == 0) { return EMPTY_BUFFER; } return new UnpooledHeapByteBuf(ALLOC, array, array.length); } /** * Creates a new big-endian buffer which wraps the sub-region of the * specified {@code array}. A modification on the specified array's * content will be visible to the returned buffer. */ public static ByteBuf wrappedBuffer(byte[] array, int offset, int length) { if (length == 0) { return EMPTY_BUFFER; } if (offset == 0 && length == array.length) { return wrappedBuffer(array); } return wrappedBuffer(array).slice(offset, length); } /** * Creates a new buffer which wraps the specified NIO buffer's current * slice. A modification on the specified buffer's content will be * visible to the returned buffer. */ public static ByteBuf wrappedBuffer(ByteBuffer buffer) { if (!buffer.hasRemaining()) { return EMPTY_BUFFER; } if (buffer.hasArray()) { return wrappedBuffer( buffer.array(), buffer.arrayOffset() + buffer.position(), buffer.remaining()).order(buffer.order()); } else if (PlatformDependent.hasUnsafe()) { if (buffer.isReadOnly()) { if (buffer.isDirect()) { return new ReadOnlyUnsafeDirectByteBuf(ALLOC, buffer); } else { return new ReadOnlyByteBufferBuf(ALLOC, buffer); } } else { return new UnpooledUnsafeDirectByteBuf(ALLOC, buffer, buffer.remaining()); } } else { if (buffer.isReadOnly()) { return new ReadOnlyByteBufferBuf(ALLOC, buffer); } else { return new UnpooledDirectByteBuf(ALLOC, buffer, buffer.remaining()); } } } /** * Creates a new buffer which wraps the specified buffer's readable bytes. * A modification on the specified buffer's content will be visible to the * returned buffer. */ public static ByteBuf wrappedBuffer(ByteBuf buffer) { if (buffer.isReadable()) { return buffer.slice(); } else { return EMPTY_BUFFER; } } /** * Creates a new big-endian composite buffer which wraps the specified * arrays without copying them. A modification on the specified arrays' * content will be visible to the returned buffer. */ public static ByteBuf wrappedBuffer(byte[]... arrays) { return wrappedBuffer(16, arrays); } /** * Creates a new big-endian composite buffer which wraps the readable bytes of the * specified buffers without copying them. A modification on the content * of the specified buffers will be visible to the returned buffer. */ public static ByteBuf wrappedBuffer(ByteBuf... buffers) { return wrappedBuffer(16, buffers); } /** * Creates a new big-endian composite buffer which wraps the slices of the specified * NIO buffers without copying them. A modification on the content of the * specified buffers will be visible to the returned buffer. */ public static ByteBuf wrappedBuffer(ByteBuffer... buffers) { return wrappedBuffer(16, buffers); } /** * Creates a new big-endian composite buffer which wraps the specified * arrays without copying them. A modification on the specified arrays' * content will be visible to the returned buffer. */ public static ByteBuf wrappedBuffer(int maxNumComponents, byte[]... arrays) { switch (arrays.length) { case 0: break; case 1: if (arrays[0].length != 0) { return wrappedBuffer(arrays[0]); } break; default: // Get the list of the component, while guessing the byte order. final List<ByteBuf> components = new ArrayList<ByteBuf>(arrays.length); for (byte[] a: arrays) { if (a == null) { break; } if (a.length > 0) { components.add(wrappedBuffer(a)); } } if (!components.isEmpty()) { return new CompositeByteBuf(ALLOC, false, maxNumComponents, components); } } return EMPTY_BUFFER; } /** * Creates a new big-endian composite buffer which wraps the readable bytes of the * specified buffers without copying them. A modification on the content * of the specified buffers will be visible to the returned buffer. */ public static ByteBuf wrappedBuffer(int maxNumComponents, ByteBuf... buffers) { switch (buffers.length) { case 0: break; case 1: if (buffers[0].isReadable()) { return wrappedBuffer(buffers[0].order(BIG_ENDIAN)); } break; default: for (ByteBuf b: buffers) { if (b.isReadable()) { return new CompositeByteBuf(ALLOC, false, maxNumComponents, buffers); } } } return EMPTY_BUFFER; } /** * Creates a new big-endian composite buffer which wraps the slices of the specified * NIO buffers without copying them. A modification on the content of the * specified buffers will be visible to the returned buffer. */ public static ByteBuf wrappedBuffer(int maxNumComponents, ByteBuffer... buffers) { switch (buffers.length) { case 0: break; case 1: if (buffers[0].hasRemaining()) { return wrappedBuffer(buffers[0].order(BIG_ENDIAN)); } break; default: // Get the list of the component, while guessing the byte order. final List<ByteBuf> components = new ArrayList<ByteBuf>(buffers.length); for (ByteBuffer b: buffers) { if (b == null) { break; } if (b.remaining() > 0) { components.add(wrappedBuffer(b.order(BIG_ENDIAN))); } } if (!components.isEmpty()) { return new CompositeByteBuf(ALLOC, false, maxNumComponents, components); } } return EMPTY_BUFFER; } /** * Returns a new big-endian composite buffer with no components. */ public static CompositeByteBuf compositeBuffer() { return compositeBuffer(16); } /** * Returns a new big-endian composite buffer with no components. */ public static CompositeByteBuf compositeBuffer(int maxNumComponents) { return new CompositeByteBuf(ALLOC, false, maxNumComponents); } /** * Creates a new big-endian buffer whose content is a copy of the * specified {@code array}. The new buffer's {@code readerIndex} and * {@code writerIndex} are {@code 0} and {@code array.length} respectively. */ public static ByteBuf copiedBuffer(byte[] array) { if (array.length == 0) { return EMPTY_BUFFER; } return wrappedBuffer(array.clone()); } /** * Creates a new big-endian buffer whose content is a copy of the * specified {@code array}'s sub-region. The new buffer's * {@code readerIndex} and {@code writerIndex} are {@code 0} and * the specified {@code length} respectively. */ public static ByteBuf copiedBuffer(byte[] array, int offset, int length) { if (length == 0) { return EMPTY_BUFFER; } byte[] copy = new byte[length]; System.arraycopy(array, offset, copy, 0, length); return wrappedBuffer(copy); } /** * Creates a new buffer whose content is a copy of the specified * {@code buffer}'s current slice. The new buffer's {@code readerIndex} * and {@code writerIndex} are {@code 0} and {@code buffer.remaining} * respectively. */ public static ByteBuf copiedBuffer(ByteBuffer buffer) { int length = buffer.remaining(); if (length == 0) { return EMPTY_BUFFER; } byte[] copy = new byte[length]; int position = buffer.position(); try { buffer.get(copy); } finally { buffer.position(position); } return wrappedBuffer(copy).order(buffer.order()); } /** * Creates a new buffer whose content is a copy of the specified * {@code buffer}'s readable bytes. The new buffer's {@code readerIndex} * and {@code writerIndex} are {@code 0} and {@code buffer.readableBytes} * respectively. */ public static ByteBuf copiedBuffer(ByteBuf buffer) { int readable = buffer.readableBytes(); if (readable > 0) { ByteBuf copy = buffer(readable); copy.writeBytes(buffer, buffer.readerIndex(), readable); return copy; } else { return EMPTY_BUFFER; } } /** * Creates a new big-endian buffer whose content is a merged copy of * the specified {@code arrays}. The new buffer's {@code readerIndex} * and {@code writerIndex} are {@code 0} and the sum of all arrays' * {@code length} respectively. */ public static ByteBuf copiedBuffer(byte[]... arrays) { switch (arrays.length) { case 0: return EMPTY_BUFFER; case 1: if (arrays[0].length == 0) { return EMPTY_BUFFER; } else { return copiedBuffer(arrays[0]); } } // Merge the specified arrays into one array. int length = 0; for (byte[] a: arrays) { if (Integer.MAX_VALUE - length < a.length) { throw new IllegalArgumentException( "The total length of the specified arrays is too big."); } length += a.length; } if (length == 0) { return EMPTY_BUFFER; } byte[] mergedArray = new byte[length]; for (int i = 0, j = 0; i < arrays.length; i ++) { byte[] a = arrays[i]; System.arraycopy(a, 0, mergedArray, j, a.length); j += a.length; } return wrappedBuffer(mergedArray); } /** * Creates a new buffer whose content is a merged copy of the specified * {@code buffers}' readable bytes. The new buffer's {@code readerIndex} * and {@code writerIndex} are {@code 0} and the sum of all buffers' * {@code readableBytes} respectively. * * @throws IllegalArgumentException * if the specified buffers' endianness are different from each * other */ public static ByteBuf copiedBuffer(ByteBuf... buffers) { switch (buffers.length) { case 0: return EMPTY_BUFFER; case 1: return copiedBuffer(buffers[0]); } // Merge the specified buffers into one buffer. ByteOrder order = null; int length = 0; for (ByteBuf b: buffers) { int bLen = b.readableBytes(); if (bLen <= 0) { continue; } if (Integer.MAX_VALUE - length < bLen) { throw new IllegalArgumentException( "The total length of the specified buffers is too big."); } length += bLen; if (order != null) { if (!order.equals(b.order())) { throw new IllegalArgumentException("inconsistent byte order"); } } else { order = b.order(); } } if (length == 0) { return EMPTY_BUFFER; } byte[] mergedArray = new byte[length]; for (int i = 0, j = 0; i < buffers.length; i ++) { ByteBuf b = buffers[i]; int bLen = b.readableBytes(); b.getBytes(b.readerIndex(), mergedArray, j, bLen); j += bLen; } return wrappedBuffer(mergedArray).order(order); } /** * Creates a new buffer whose content is a merged copy of the specified * {@code buffers}' slices. The new buffer's {@code readerIndex} and * {@code writerIndex} are {@code 0} and the sum of all buffers' * {@code remaining} respectively. * * @throws IllegalArgumentException * if the specified buffers' endianness are different from each * other */ public static ByteBuf copiedBuffer(ByteBuffer... buffers) { switch (buffers.length) { case 0: return EMPTY_BUFFER; case 1: return copiedBuffer(buffers[0]); } // Merge the specified buffers into one buffer. ByteOrder order = null; int length = 0; for (ByteBuffer b: buffers) { int bLen = b.remaining(); if (bLen <= 0) { continue; } if (Integer.MAX_VALUE - length < bLen) { throw new IllegalArgumentException( "The total length of the specified buffers is too big."); } length += bLen; if (order != null) { if (!order.equals(b.order())) { throw new IllegalArgumentException("inconsistent byte order"); } } else { order = b.order(); } } if (length == 0) { return EMPTY_BUFFER; } byte[] mergedArray = new byte[length]; for (int i = 0, j = 0; i < buffers.length; i ++) { ByteBuffer b = buffers[i]; int bLen = b.remaining(); int oldPos = b.position(); b.get(mergedArray, j, bLen); b.position(oldPos); j += bLen; } return wrappedBuffer(mergedArray).order(order); } /** * Creates a new big-endian buffer whose content is the specified * {@code string} encoded in the specified {@code charset}. * The new buffer's {@code readerIndex} and {@code writerIndex} are * {@code 0} and the length of the encoded string respectively. */ public static ByteBuf copiedBuffer(CharSequence string, Charset charset) { if (string == null) { throw new NullPointerException("string"); } if (string instanceof CharBuffer) { return copiedBuffer((CharBuffer) string, charset); } return copiedBuffer(CharBuffer.wrap(string), charset); } /** * Creates a new big-endian buffer whose content is a subregion of * the specified {@code string} encoded in the specified {@code charset}. * The new buffer's {@code readerIndex} and {@code writerIndex} are * {@code 0} and the length of the encoded string respectively. */ public static ByteBuf copiedBuffer( CharSequence string, int offset, int length, Charset charset) { if (string == null) { throw new NullPointerException("string"); } if (length == 0) { return EMPTY_BUFFER; } if (string instanceof CharBuffer) { CharBuffer buf = (CharBuffer) string; if (buf.hasArray()) { return copiedBuffer( buf.array(), buf.arrayOffset() + buf.position() + offset, length, charset); } buf = buf.slice(); buf.limit(length); buf.position(offset); return copiedBuffer(buf, charset); } return copiedBuffer(CharBuffer.wrap(string, offset, offset + length), charset); } /** * Creates a new big-endian buffer whose content is the specified * {@code array} encoded in the specified {@code charset}. * The new buffer's {@code readerIndex} and {@code writerIndex} are * {@code 0} and the length of the encoded string respectively. */ public static ByteBuf copiedBuffer(char[] array, Charset charset) { if (array == null) { throw new NullPointerException("array"); } return copiedBuffer(array, 0, array.length, charset); } /** * Creates a new big-endian buffer whose content is a subregion of * the specified {@code array} encoded in the specified {@code charset}. * The new buffer's {@code readerIndex} and {@code writerIndex} are * {@code 0} and the length of the encoded string respectively. */ public static ByteBuf copiedBuffer(char[] array, int offset, int length, Charset charset) { if (array == null) { throw new NullPointerException("array"); } if (length == 0) { return EMPTY_BUFFER; } return copiedBuffer(CharBuffer.wrap(array, offset, length), charset); } private static ByteBuf copiedBuffer(CharBuffer buffer, Charset charset) { return ByteBufUtil.encodeString0(ALLOC, true, buffer, charset); } /** * Creates a read-only buffer which disallows any modification operations * on the specified {@code buffer}. The new buffer has the same * {@code readerIndex} and {@code writerIndex} with the specified * {@code buffer}. */ public static ByteBuf unmodifiableBuffer(ByteBuf buffer) { ByteOrder endianness = buffer.order(); if (endianness == BIG_ENDIAN) { return new ReadOnlyByteBuf(buffer); } return new ReadOnlyByteBuf(buffer.order(BIG_ENDIAN)).order(LITTLE_ENDIAN); } /** * Creates a new 4-byte big-endian buffer that holds the specified 32-bit integer. */ public static ByteBuf copyInt(int value) { ByteBuf buf = buffer(4); buf.writeInt(value); return buf; } /** * Create a big-endian buffer that holds a sequence of the specified 32-bit integers. */ public static ByteBuf copyInt(int... values) { if (values == null || values.length == 0) { return EMPTY_BUFFER; } ByteBuf buffer = buffer(values.length * 4); for (int v: values) { buffer.writeInt(v); } return buffer; } /** * Creates a new 2-byte big-endian buffer that holds the specified 16-bit integer. */ public static ByteBuf copyShort(int value) { ByteBuf buf = buffer(2); buf.writeShort(value); return buf; } /** * Create a new big-endian buffer that holds a sequence of the specified 16-bit integers. */ public static ByteBuf copyShort(short... values) { if (values == null || values.length == 0) { return EMPTY_BUFFER; } ByteBuf buffer = buffer(values.length * 2); for (int v: values) { buffer.writeShort(v); } return buffer; } /** * Create a new big-endian buffer that holds a sequence of the specified 16-bit integers. */ public static ByteBuf copyShort(int... values) { if (values == null || values.length == 0) { return EMPTY_BUFFER; } ByteBuf buffer = buffer(values.length * 2); for (int v: values) { buffer.writeShort(v); } return buffer; } /** * Creates a new 3-byte big-endian buffer that holds the specified 24-bit integer. */ public static ByteBuf copyMedium(int value) { ByteBuf buf = buffer(3); buf.writeMedium(value); return buf; } /** * Create a new big-endian buffer that holds a sequence of the specified 24-bit integers. */ public static ByteBuf copyMedium(int... values) { if (values == null || values.length == 0) { return EMPTY_BUFFER; } ByteBuf buffer = buffer(values.length * 3); for (int v: values) { buffer.writeMedium(v); } return buffer; } /** * Creates a new 8-byte big-endian buffer that holds the specified 64-bit integer. */ public static ByteBuf copyLong(long value) { ByteBuf buf = buffer(8); buf.writeLong(value); return buf; } /** * Create a new big-endian buffer that holds a sequence of the specified 64-bit integers. */ public static ByteBuf copyLong(long... values) { if (values == null || values.length == 0) { return EMPTY_BUFFER; } ByteBuf buffer = buffer(values.length * 8); for (long v: values) { buffer.writeLong(v); } return buffer; } /** * Creates a new single-byte big-endian buffer that holds the specified boolean value. */ public static ByteBuf copyBoolean(boolean value) { ByteBuf buf = buffer(1); buf.writeBoolean(value); return buf; } /** * Create a new big-endian buffer that holds a sequence of the specified boolean values. */ public static ByteBuf copyBoolean(boolean... values) { if (values == null || values.length == 0) { return EMPTY_BUFFER; } ByteBuf buffer = buffer(values.length); for (boolean v: values) { buffer.writeBoolean(v); } return buffer; } /** * Creates a new 4-byte big-endian buffer that holds the specified 32-bit floating point number. */ public static ByteBuf copyFloat(float value) { ByteBuf buf = buffer(4); buf.writeFloat(value); return buf; } /** * Create a new big-endian buffer that holds a sequence of the specified 32-bit floating point numbers. */ public static ByteBuf copyFloat(float... values) { if (values == null || values.length == 0) { return EMPTY_BUFFER; } ByteBuf buffer = buffer(values.length * 4); for (float v: values) { buffer.writeFloat(v); } return buffer; } /** * Creates a new 8-byte big-endian buffer that holds the specified 64-bit floating point number. */ public static ByteBuf copyDouble(double value) { ByteBuf buf = buffer(8); buf.writeDouble(value); return buf; } /** * Create a new big-endian buffer that holds a sequence of the specified 64-bit floating point numbers. */ public static ByteBuf copyDouble(double... values) { if (values == null || values.length == 0) { return EMPTY_BUFFER; } ByteBuf buffer = buffer(values.length * 8); for (double v: values) { buffer.writeDouble(v); } return buffer; } /** * Return a unreleasable view on the given {@link ByteBuf} which will just ignore release and retain calls. */ public static ByteBuf unreleasableBuffer(ByteBuf buf) { return new UnreleasableByteBuf(buf); } /** * Wrap the given {@link ByteBuf}s in an unmodifiable {@link ByteBuf}. Be aware the returned {@link ByteBuf} will * not try to slice the given {@link ByteBuf}s to reduce GC-Pressure. */ public static ByteBuf unmodifiableBuffer(ByteBuf... buffers) { return new FixedCompositeByteBuf(ALLOC, buffers); } private Unpooled() { // Unused } }
[#3896] Unpooled.copiedBuffer(ByteBuffer) and copiedBuffer(ByteBuffer...) is not thread-safe. Motivation: As we modify the position of the passed in ByteBuffer's this methods are not thread-safe. Modifications: Duplicate the input ByteBuffers before copy the content to byte[]. Result: Unpooled.copiedBuffer(ByteBuffer) and copiedBuffer(ByteBuffer...) is now thread-safe.
buffer/src/main/java/io/netty/buffer/Unpooled.java
[#3896] Unpooled.copiedBuffer(ByteBuffer) and copiedBuffer(ByteBuffer...) is not thread-safe.
Java
apache-2.0
5aa90d2161b58c738a2118ba9cb1838742760dd6
0
StevenLeRoux/warp10-platform,hbs/warp10-platform,hbs/warp10-platform,cityzendata/warp10-platform,StevenLeRoux/warp10-platform,hbs/warp10-platform,StevenLeRoux/warp10-platform,StevenLeRoux/warp10-platform,cityzendata/warp10-platform,hbs/warp10-platform,cityzendata/warp10-platform,cityzendata/warp10-platform
// // Copyright 2016 Cityzen Data // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package io.warp10.script; import io.warp10.continuum.geo.GeoDirectoryClient; import io.warp10.continuum.store.DirectoryClient; import io.warp10.continuum.store.StoreClient; import io.warp10.script.functions.SNAPSHOT; import io.warp10.script.functions.SNAPSHOT.Snapshotable; import io.warp10.warp.sdk.WarpScriptJavaFunction; import java.util.ArrayList; import java.util.EmptyStackException; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicLong; /** * The Einstein Geo Time Serie manipulation environment * usually uses a stack to operate. * * There may be multiple implementations of stacks that * Einstein can use, including some that persist to a * cache or that may spill to disk. * * All those implementations MUST implement this interface. * */ public interface WarpScriptStack { public static final int DEFAULT_MAX_RECURSION_LEVEL = 16; public static final long DEFAULT_FETCH_LIMIT = 100000L; public static final long DEFAULT_GTS_LIMIT = 100000L; public static final long DEFAULT_MAX_OPS = 1000L; public static final int DEFAULT_MAX_BUCKETS = 1000000; public static final int DEFAULT_MAX_GEOCELLS = 10000; public static final int DEFAULT_MAX_DEPTH = 1000; public static final long DEFAULT_MAX_LOOP_DURATION = 5000L; public static final int DEFAULT_MAX_SYMBOLS = 64; public static final int DEFAULT_MAX_WEBCALLS = 4; public static final long DEFAULT_MAX_PIXELS = 1000000L; public static final long DEFAULT_URLFETCH_LIMIT = 64; public static final long DEFAULT_URLFETCH_MAXSIZE = 1000000L; public static final String MACRO_START = "<%"; public static final String MACRO_END = "%>"; public static final String COMMENT_START = "/*"; public static final String COMMENT_END = "*/"; public static final String MULTILINE_START = "<'"; public static final String MULTILINE_END = "'>"; public static final String SECURE_SCRIPT_START = "<S"; public static final String SECURE_SCRIPT_END = "S>"; public static final String TOP_LEVEL_SECTION = "[TOP]"; /** * Flag indicating whether or not to set section with the current line number */ public static final String ATTRIBUTE_LINENO = "lineno"; /** * Prefix for traceing push/pop */ public static final String ATTRIBUTE_TRACE_PREFIX = "trace.prefix"; /** * Name of current code section, null is unnamed */ public static final String ATTRIBUTE_SECTION_NAME = "section.name"; /** * Flag indicating whether or not the stack is currently in documentation mode */ public static final String ATTRIBUTE_DOCMODE = "docmode"; /** * Flag indicating whether or not the stack is currently in info mode */ public static final String ATTRIBUTE_INFOMODE = "infomode"; /** * Debug depth of the stack. This is the number * of elements to return when an error occurs. */ public static final String ATTRIBUTE_DEBUG_DEPTH = "debug.depth"; /** * Is the stack configured to output strict JSON (i.e with no NaN/Infinity)? */ public static final String ATTRIBUTE_JSON_STRICT = "json.strict"; /** * Maximum number of datapoints that can be fetched in a session */ public static final String ATTRIBUTE_FETCH_LIMIT = "fetch.limit"; public static final String ATTRIBUTE_FETCH_LIMIT_HARD = "fetch.limit.hard"; /** * Maximum number of GTS which can be retrieved from directory in a session */ public static final String ATTRIBUTE_GTS_LIMIT = "gts.limit"; public static final String ATTRIBUTE_GTS_LIMIT_HARD = "gts.limit.hard"; /** * Number of datapoints fetched so far in the session */ public static final String ATTRIBUTE_FETCH_COUNT = "fetch.count"; /** * Number of GTS retrieved so far in the session */ public static final String ATTRIBUTE_GTS_COUNT = "gts.count"; /** * Maximum number of calls to URLFETCH in a session */ public static final String ATTRIBUTE_URLFETCH_LIMIT = "urlfetch.limit"; public static final String ATTRIBUTE_URLFETCH_LIMIT_HARD = "urlfetch.limit.hard"; /** * Number of calls to URLFETCH so far in the sessions */ public static final String ATTRIBUTE_URLFETCH_COUNT = "urlfetch.count"; /** * Maximum size of content retrieved via calls to URLFETCH in a session */ public static final String ATTRIBUTE_URLFETCH_MAXSIZE = "urlfetch.maxsize"; public static final String ATTRIBUTE_URLFETCH_MAXSIZE_HARD = "urlfetch.maxsize.hard"; /** * Current URLFETCH so far in the sessions */ public static final String ATTRIBUTE_URLFETCH_SIZE = "urlfetch.size"; /** * List of elapsed times (in ns) per line */ public static final String ATTRIBUTE_ELAPSED = "elapsed"; /** * Flag indicating whether or not to track elapsed times per script line */ public static final String ATTRIBUTE_TIMINGS = "timings"; /** * Maximum duration of loops in ms and its hard limit */ public static final String ATTRIBUTE_LOOP_MAXDURATION = "loop.maxduration"; public static final String ATTRIBUTE_LOOP_MAXDURATION_HARD = "loop.maxduration.hard"; /** * Maximum recursion depth */ public static final String ATTRIBUTE_RECURSION_MAXDEPTH = "recursion.maxdepth"; public static final String ATTRIBUTE_RECURSION_MAXDEPTH_HARD = "recursion.maxdepth.hard"; /** * Maximum depth of the stack */ public static final String ATTRIBUTE_MAX_DEPTH = "stack.maxdepth"; public static final String ATTRIBUTE_MAX_DEPTH_HARD = "stack.maxdepth.hard"; /** * Maximum number of operations for the stack */ public static final String ATTRIBUTE_MAX_OPS = "stack.maxops"; public static final String ATTRIBUTE_MAX_OPS_HARD = "stack.maxops.hard"; /** * Maximum number of pixels for images created on the stack */ public static final String ATTRIBUTE_MAX_PIXELS = "stack.maxpixels"; public static final String ATTRIBUTE_MAX_PIXELS_HARD = "stack.maxpixels.hard"; /** * Maximum number of buckets in bucketized GTS */ public static final String ATTRIBUTE_MAX_BUCKETS = "stack.maxbuckets"; public static final String ATTRIBUTE_MAX_BUCKETS_HARD = "stack.maxbuckets.hard"; /** * Maximum number of cells if GeoXPShapes */ public static final String ATTRIBUTE_MAX_GEOCELLS = "stack.maxgeocells"; public static final String ATTRIBUTE_MAX_GEOCELLS_HARD = "stack.maxgeocells.hard"; /** * Current number of operations performed on this stack */ public static final String ATTRIBUTE_OPS = "stack.ops"; /** * Maximum number of symbols for the stack */ public static final String ATTRIBUTE_MAX_SYMBOLS = "stack.symbols"; public static final String ATTRIBUTE_MAX_SYMBOLS_HARD = "stack.symbols.hard"; /** * Key for securing scripts */ public static final String ATTRIBUTE_SECURE_KEY = "secure.key"; /** * Flag indicating whether or not redefined functions are allowed */ public static final String ATTRIBUTE_ALLOW_REDEFINED = "allow.redefined"; /** * Key for storing an instance of Hadoop's Progressable to report progress to the Hadoop framework */ public static final String ATTRIBUTE_HADOOP_PROGRESSABLE = "hadoop.progressable"; /** * Maximum number of WEBCALL invocations per script run */ public static final String ATTRIBUTE_MAX_WEBCALLS = "stack.maxwebcalls"; /** * Token which was used to authenticate the stack, checked by some protected ops */ public static final String ATTRIBUTE_TOKEN = "stack.token"; /** * Flag indicating if we are currently in a secure macro execution */ public static final String ATTRIBUTE_IN_SECURE_MACRO = "in.secure.macro"; /** * Expiration date (in ms since the epoch) of a macro */ public static final String ATTRIBUTE_MACRO_EXPIRY = "macro.expiry"; /** * List of symbols to export upon script termination as a map of symbol name * to symbol value pushed onto the stack. */ public static final String ATTRIBUTE_EXPORTED_SYMBOLS = "exported.symbols"; /** * Map of headers to return with the response */ public static final String ATTRIBUTE_HEADERS = "response.headers"; /** * Last error encountered in a TRY block */ public static final String ATTRIBUTE_LAST_ERROR = "last.error"; /** * Index of RETURN_DEPTH counter */ public static final int COUNTER_RETURN_DEPTH = 0; public static class StackContext {} public static class Mark {} public static class Macro implements Snapshotable { /** * Flag indicating whether a macro is secure (its content cannot be displayed) or not */ private boolean secure = false; private long fingerprint; /** * Timestamp at which the macro expired, or LONG.MIN_VALUE if no expiry date was set */ private long expiry = Long.MIN_VALUE; private ArrayList<Object> statements = new ArrayList<Object>(); public boolean isExpired() { return (Long.MIN_VALUE != this.expiry) && (this.expiry < System.currentTimeMillis()); } public String toString() { return snapshot(); } public void add(Object o) { this.statements().add(o); } public Object get(int idx) { return this.statements().get(idx); } public int size() { return this.statements().size(); } public void setSize(int size) { if (size < this.statements.size() && size > 0) { int delta = this.statements.size() - size; while(delta > 0) { this.statements.remove(this.statements.size() - 1); delta--; } } else if (0 == size) { this.statements.clear(); } } public List<Object> statements() { return this.statements; } public void addAll(Macro macro) { this.statements().addAll(macro.statements()); } public void setSecure(boolean secure) { this.secure = secure; } public boolean isSecure() { return this.secure; } public long getFingerprint() { return this.fingerprint; } public void setFingerprint(long fingerprint) { this.fingerprint = fingerprint; } public void setExpiry(long expiry) { this.expiry = expiry; } @Override public String snapshot() { StringBuilder sb = new StringBuilder(); sb.append(MACRO_START); sb.append(" "); if (!secure) { for (Object o: this.statements()) { try { SNAPSHOT.addElement(sb, o); } catch (WarpScriptException wse) { sb.append(WarpScriptStack.COMMENT_START); sb.append(" Error while snapshoting element of type '" + o.getClass() + "' "); sb.append(WarpScriptStack.COMMENT_END); } sb.append(" "); } } else { sb.append(WarpScriptStack.COMMENT_START); sb.append(" Secure Macro "); sb.append(WarpScriptStack.COMMENT_END); sb.append(" "); } sb.append(MACRO_END); return sb.toString(); } } /** * Retrieve the StoreClient instance associated with this stack. * @return */ public StoreClient getStoreClient(); /** * Retrieve the DirectoryClient instance associated with this stack * @return */ public DirectoryClient getDirectoryClient(); /** * Retrieve the GeoDirectoryClient instance associated with this stack * @return */ public GeoDirectoryClient getGeoDirectoryClient(); /** * Push an object onto the stack * * @param o Object to push onto the stack */ public void push(Object o) throws WarpScriptException; /** * Remove and return the object on top of the stack. * * @return The object on top of the stack * * @throws EmptyStackException if the stack is empty. */ public Object pop() throws EmptyStackException; /** * Remove and return 'N' objects from the top of the * stack. * * 'N' is consumed at the top of the stack prior to * removing and returning the objects. * * * @return An array of 'N' objects, the first being the deepest. * * @throws EmptyStackException if the stack is empty. * @throws IndexOutOfBoundsException If 'N' is not present or if * 'N' is invalid or if the stack is not deep enough. */ public Object[] popn() throws WarpScriptException; /** * Return the object on top of the stack without removing * it from the stack. * * @return The object on top of the stack * * @throws EmptyStackException if the stack is empty. */ public Object peek() throws EmptyStackException; /** * Return the object at 'distance' from the top of the stack. * The 'distance' is on top of the stack and is consumed by 'peekn'. * * The object on top the stack is at distance 0. * * @throws EmptyStackException if the stack is empty. * @throws IndexOutOfBoundException if no valid 'distance' is on top of the stack or if the * requested distance is passed the bottom of the stack. */ public Object peekn() throws WarpScriptException; /** * Return the depth (i.e. number of objects) of the stack * * @return The depth of the stack */ public int depth(); /** * Reset the stack to the given depth */ public void reset(int depth) throws WarpScriptException; /** * Swap the top 2 objects of the stack. * * @throws EmptyStackException if the stack is empty. * @throws IndexOutOfBoundsException if the stack is empty or * contains a single element. */ public void swap() throws WarpScriptException; /** * Rotate the top 3 objects of the stack, pushing * the top of the stack down * * D D * C -> B * B A * A (top) C (top) * * @throws EmptyStackException if the stack is empty. * @throws IndexOutOfBoundsException if the stack contains less than 3 objects */ public void rot() throws WarpScriptException; /** * Rotate up the top 'N' objects of the stack. * 'N' is on top of the stack and is consumed by 'roll'. * * @throws EmptyStackException if the stack is empty. * @throws IndexOutOfBoundsException if 'N' is not present on top of the stack, * is not a number or if the stack does not have enough elements for the * operation. */ public void roll() throws WarpScriptException; /** * Rotate down the top 'N' objects of the stack. * 'N' is on the top of the stack and is consumed by 'rolld'. * * @throws EmptyStackException if the stack is empty * @throws IndexOutOfBoundsException if 'N' is not present on top of the stack, * is not a number or if the stack does not have enough elements for the * operation. */ public void rolld() throws WarpScriptException; /** * Copy the object at level 'N' on top of the stack. * 'N' is on top of the stack and is consumed by the call to 'pick'. * * @throws EmptyStackException * @throws IndexOutOfBoundsException */ public void pick() throws WarpScriptException; /** * Remove the top of the stack. * * @throws EmptyStackException If the stack is empty. */ public void drop() throws WarpScriptException; /** * Remove the top 'N' objects of the stack. * * @throws EmptyStackException if the stack is empty. * @throws IndexOutOfBoundsException If 'N' is not present on the top of the stack, * is not a number * or if the stack has fewer than 'N' objects after consuming 'N'. */ public void dropn() throws WarpScriptException; /** * Duplicate the object on top of the stack. * * @throws EmptyStackException if the stack is empty. */ public void dup() throws WarpScriptException; /** * Duplicate the top 'N' objects of the stack. * 'N' is consumed at the top of the stack first. * * @throws EmptyStackException if the stack is empty. * @throws IndexOutOfBoundsException if the stack contains less than 'N' objects. */ public void dupn() throws WarpScriptException; /** * Return the object at level 'level' on the stack. * The top of the stack is level 0 * * @param level Level of the object to return * @return The object found at 'level' * @throws WarpScriptException if the stack contains less than 'level' levels */ public Object get(int level) throws WarpScriptException; public void execMulti(String script) throws WarpScriptException; /** * Execute a serie of statements against the stack. * * @param line String containing a space separated list of statements to execute * @return * @throws Exception */ public void exec(String line) throws WarpScriptException; /** * Empty the stack * */ public void clear(); /** * Execute a macro against the stack. * * @param macro Macro instance to execute * @return * @throws WarpScriptException */ public void exec(Macro macro) throws WarpScriptException; /** * Execute a WarpScriptJavaFunction against the stack * * @param function * @throws WarpScriptException */ public void exec(WarpScriptJavaFunction function) throws WarpScriptException; /** * Find a macro by name * * @param macroName Name of macro to find * @throws WarpScriptException if macro is not found */ public Macro find(String macroName) throws WarpScriptException; /** * Execute a macro known by name. * @param macroName * @throws WarpScriptException */ public void run(String macroName) throws WarpScriptException; /** * Produces a String representation of the top 'n' levels of the stack * @param n Number of stack levels to display at most * @return */ public String dump(int n); /** * Return the content associated with the given symbol. * * @param symbol Name of symbol to retrieve * * @return The content associated with 'symbol' or null if 'symbol' is not known. */ public Object load(String symbol); /** * Store the given object under 'symbol'. * * @param symbol Name under which to store a value. * @param value Value to store. */ public void store(String symbol, Object value) throws WarpScriptException; /** * Forget the given symbol * * @param symbol Name of the symbol to forget. */ public void forget(String symbol); /** * Return the current symbol table. * * @return */ public Map<String,Object> getSymbolTable(); /** * Return the current map of redefined functions * @return */ public Map<String,WarpScriptStackFunction> getDefined(); /** * Return a UUID for the instance of EinsteinStack * @return */ public String getUUID(); /** * Set a stack attribute. * * @param key Key under which the attribute should be stored. * @param value Value of the attribute. If null, remove the attribute. * @return The previous value of the attribute or null if it was not set. */ public Object setAttribute(String key, Object value); /** * Return the value of an attribute. * * @param key Name of the attribute to retrieve. * * @return The value store unded 'key' or null */ public Object getAttribute(String key); /** * Return the ith counter associated with the stack * @param i * @return */ public AtomicLong getCounter(int i) throws WarpScriptException; /** * Returns a boolean indicating whether or not the stack is authenticated. * * @return The authentication status of the stack */ public boolean isAuthenticated(); /** * Perform a final check to ensure balancing constructs are balanced. * * @throws WarpScriptException if the stack is currently unbalanced. */ public void checkBalanced() throws WarpScriptException; /** * (re)define 'stmt' as a valid statement executing 'macro' * This allows for the overriding of built-in statements * * @param stmt * @param macro */ public void define(String stmt, Macro macro); /** * Push the current stack context (symbols + redefined statements) onto the stack. * */ public void save() throws WarpScriptException; /** * Restore the stack context from that on top of the stack */ public void restore() throws WarpScriptException; }
warp10/src/main/java/io/warp10/script/WarpScriptStack.java
// // Copyright 2016 Cityzen Data // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package io.warp10.script; import io.warp10.continuum.geo.GeoDirectoryClient; import io.warp10.continuum.store.DirectoryClient; import io.warp10.continuum.store.StoreClient; import io.warp10.script.functions.SNAPSHOT; import io.warp10.script.functions.SNAPSHOT.Snapshotable; import io.warp10.warp.sdk.WarpScriptJavaFunction; import java.util.ArrayList; import java.util.EmptyStackException; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicLong; /** * The Einstein Geo Time Serie manipulation environment * usually uses a stack to operate. * * There may be multiple implementations of stacks that * Einstein can use, including some that persist to a * cache or that may spill to disk. * * All those implementations MUST implement this interface. * */ public interface WarpScriptStack { public static final int DEFAULT_MAX_RECURSION_LEVEL = 16; public static final long DEFAULT_FETCH_LIMIT = 100000L; public static final long DEFAULT_GTS_LIMIT = 100000L; public static final long DEFAULT_MAX_OPS = 1000L; public static final int DEFAULT_MAX_BUCKETS = 1000000; public static final int DEFAULT_MAX_GEOCELLS = 10000; public static final int DEFAULT_MAX_DEPTH = 1000; public static final long DEFAULT_MAX_LOOP_DURATION = 5000L; public static final int DEFAULT_MAX_SYMBOLS = 64; public static final int DEFAULT_MAX_WEBCALLS = 4; public static final long DEFAULT_MAX_PIXELS = 1000000L; public static final long DEFAULT_URLFETCH_LIMIT = 64; public static final long DEFAULT_URLFETCH_MAXSIZE = 1000000L; public static final String MACRO_START = "<%"; public static final String MACRO_END = "%>"; public static final String COMMENT_START = "/*"; public static final String COMMENT_END = "*/"; public static final String MULTILINE_START = "<'"; public static final String MULTILINE_END = "'>"; public static final String SECURE_SCRIPT_START = "<S"; public static final String SECURE_SCRIPT_END = "S>"; public static final String TOP_LEVEL_SECTION = "[TOP]"; /** * Flag indicating whether or not to set section with the current line number */ public static final String ATTRIBUTE_LINENO = "lineno"; /** * Prefix for traceing push/pop */ public static final String ATTRIBUTE_TRACE_PREFIX = "trace.prefix"; /** * Name of current code section, null is unnamed */ public static final String ATTRIBUTE_SECTION_NAME = "section.name"; /** * Flag indicating whether or not the stack is currently in documentation mode */ public static final String ATTRIBUTE_DOCMODE = "docmode"; /** * Flag indicating whether or not the stack is currently in info mode */ public static final String ATTRIBUTE_INFOMODE = "infomode"; /** * Debug depth of the stack. This is the number * of elements to return when an error occurs. */ public static final String ATTRIBUTE_DEBUG_DEPTH = "debug.depth"; /** * Is the stack configured to output strict JSON (i.e with no NaN/Infinity)? */ public static final String ATTRIBUTE_JSON_STRICT = "json.strict"; /** * Maximum number of datapoints that can be fetched in a session */ public static final String ATTRIBUTE_FETCH_LIMIT = "fetch.limit"; public static final String ATTRIBUTE_FETCH_LIMIT_HARD = "fetch.limit.hard"; /** * Maximum number of GTS which can be retrieved from directory in a session */ public static final String ATTRIBUTE_GTS_LIMIT = "gts.limit"; public static final String ATTRIBUTE_GTS_LIMIT_HARD = "gts.limit.hard"; /** * Number of datapoints fetched so far in the session */ public static final String ATTRIBUTE_FETCH_COUNT = "fetch.count"; /** * Number of GTS retrieved so far in the session */ public static final String ATTRIBUTE_GTS_COUNT = "gts.count"; /** * Maximum number of calls to URLFETCH in a session */ public static final String ATTRIBUTE_URLFETCH_LIMIT = "urlfetch.limit"; public static final String ATTRIBUTE_URLFETCH_LIMIT_HARD = "urlfetch.limit.hard"; /** * Number of calls to URLFETCH so far in the sessions */ public static final String ATTRIBUTE_URLFETCH_COUNT = "urlfetch.count"; /** * Maximum size of content retrieved via calls to URLFETCH in a session */ public static final String ATTRIBUTE_URLFETCH_MAXSIZE = "urlfetch.maxsize"; public static final String ATTRIBUTE_URLFETCH_MAXSIZE_HARD = "urlfetch.maxsize.hard"; /** * Current URLFETCH so far in the sessions */ public static final String ATTRIBUTE_URLFETCH_SIZE = "urlfetch.size"; /** * List of elapsed times (in ns) per line */ public static final String ATTRIBUTE_ELAPSED = "elapsed"; /** * Flag indicating whether or not to track elapsed times per script line */ public static final String ATTRIBUTE_TIMINGS = "timings"; /** * Maximum duration of loops in ms and its hard limit */ public static final String ATTRIBUTE_LOOP_MAXDURATION = "loop.maxduration"; public static final String ATTRIBUTE_LOOP_MAXDURATION_HARD = "loop.maxduration.hard"; /** * Maximum recursion depth */ public static final String ATTRIBUTE_RECURSION_MAXDEPTH = "recursion.maxdepth"; public static final String ATTRIBUTE_RECURSION_MAXDEPTH_HARD = "recursion.maxdepth.hard"; /** * Maximum depth of the stack */ public static final String ATTRIBUTE_MAX_DEPTH = "stack.maxdepth"; public static final String ATTRIBUTE_MAX_DEPTH_HARD = "stack.maxdepth.hard"; /** * Maximum number of operations for the stack */ public static final String ATTRIBUTE_MAX_OPS = "stack.maxops"; public static final String ATTRIBUTE_MAX_OPS_HARD = "stack.maxops.hard"; /** * Maximum number of pixels for images created on the stack */ public static final String ATTRIBUTE_MAX_PIXELS = "stack.maxpixels"; public static final String ATTRIBUTE_MAX_PIXELS_HARD = "stack.maxpixels.hard"; /** * Maximum number of buckets in bucketized GTS */ public static final String ATTRIBUTE_MAX_BUCKETS = "stack.maxbuckets"; public static final String ATTRIBUTE_MAX_BUCKETS_HARD = "stack.maxbuckets.hard"; /** * Maximum number of cells if GeoXPShapes */ public static final String ATTRIBUTE_MAX_GEOCELLS = "stack.maxgeocells"; public static final String ATTRIBUTE_MAX_GEOCELLS_HARD = "stack.maxgeocells.hard"; /** * Current number of operations performed on this stack */ public static final String ATTRIBUTE_OPS = "stack.ops"; /** * Maximum number of symbols for the stack */ public static final String ATTRIBUTE_MAX_SYMBOLS = "stack.symbols"; public static final String ATTRIBUTE_MAX_SYMBOLS_HARD = "stack.symbols.hard"; /** * Key for securing scripts */ public static final String ATTRIBUTE_SECURE_KEY = "secure.key"; /** * Flag indicating whether or not redefined functions are allowed */ public static final String ATTRIBUTE_ALLOW_REDEFINED = "allow.redefined"; /** * Key for storing an instance of Hadoop's Progressable to report progress to the Hadoop framework */ public static final String ATTRIBUTE_HADOOP_PROGRESSABLE = "hadoop.progressable"; /** * Maximum number of WEBCALL invocations per script run */ public static final String ATTRIBUTE_MAX_WEBCALLS = "stack.maxwebcalls"; /** * Token which was used to authenticate the stack, checked by some protected ops */ public static final String ATTRIBUTE_TOKEN = "stack.token"; /** * Flag indicating if we are currently in a secure macro execution */ public static final String ATTRIBUTE_IN_SECURE_MACRO = "in.secure.macro"; /** * Expiration date (in ms since the epoch) of a macro */ public static final String ATTRIBUTE_MACRO_EXPIRY = "macro.expiry"; /** * List of symbols to export upon script termination as a map of symbol name * to symbol value pushed onto the stack. */ public static final String ATTRIBUTE_EXPORTED_SYMBOLS = "exported.symbols"; /** * Map of headers to return with the response */ public static final String ATTRIBUTE_HEADERS = "response.headers"; /** * Last error encountered in a TRY block */ public static final String ATTRIBUTE_LAST_ERROR = "last.error"; /** * Index of RETURN_DEPTH counter */ public static final int COUNTER_RETURN_DEPTH = 0; public static class StackContext {} public static class Mark {} public static class Macro implements Snapshotable { /** * Flag indicating whether a macro is secure (its content cannot be displayed) or not */ private boolean secure = false; private long fingerprint; /** * Timestamp at which the macro expired, or LONG.MIN_VALUE if no expiry date was set */ private long expiry = Long.MIN_VALUE; private ArrayList<Object> statements = new ArrayList<Object>(); public boolean isExpired() { return (Long.MIN_VALUE != this.expiry) && (this.expiry < System.currentTimeMillis()); } public String toString() { StringBuilder sb = new StringBuilder(); sb.append(MACRO_START); sb.append(" "); if (!secure) { for (Object o: this.statements()) { sb.append(StackUtils.toString(o)); sb.append(" "); } } else { sb.append(WarpScriptStack.COMMENT_START); sb.append(" Secure Macro "); sb.append(WarpScriptStack.COMMENT_END); sb.append(" "); } sb.append(MACRO_END); return sb.toString(); } public void add(Object o) { this.statements().add(o); } public Object get(int idx) { return this.statements().get(idx); } public int size() { return this.statements().size(); } public void setSize(int size) { if (size < this.statements.size() && size > 0) { int delta = this.statements.size() - size; while(delta > 0) { this.statements.remove(this.statements.size() - 1); delta--; } } else if (0 == size) { this.statements.clear(); } } public List<Object> statements() { return this.statements; } public void addAll(Macro macro) { this.statements().addAll(macro.statements()); } public void setSecure(boolean secure) { this.secure = secure; } public boolean isSecure() { return this.secure; } public long getFingerprint() { return this.fingerprint; } public void setFingerprint(long fingerprint) { this.fingerprint = fingerprint; } public void setExpiry(long expiry) { this.expiry = expiry; } @Override public String snapshot() { StringBuilder sb = new StringBuilder(); sb.append(MACRO_START); sb.append(" "); if (!secure) { for (Object o: this.statements()) { try { SNAPSHOT.addElement(sb, o); } catch (WarpScriptException wse) { sb.append(WarpScriptStack.COMMENT_START); sb.append(" Error while snapshoting element of type '" + o.getClass() + "' "); sb.append(WarpScriptStack.COMMENT_END); } sb.append(" "); } } else { sb.append(WarpScriptStack.COMMENT_START); sb.append(" Secure Macro "); sb.append(WarpScriptStack.COMMENT_END); sb.append(" "); } sb.append(MACRO_END); return sb.toString(); } } /** * Retrieve the StoreClient instance associated with this stack. * @return */ public StoreClient getStoreClient(); /** * Retrieve the DirectoryClient instance associated with this stack * @return */ public DirectoryClient getDirectoryClient(); /** * Retrieve the GeoDirectoryClient instance associated with this stack * @return */ public GeoDirectoryClient getGeoDirectoryClient(); /** * Push an object onto the stack * * @param o Object to push onto the stack */ public void push(Object o) throws WarpScriptException; /** * Remove and return the object on top of the stack. * * @return The object on top of the stack * * @throws EmptyStackException if the stack is empty. */ public Object pop() throws EmptyStackException; /** * Remove and return 'N' objects from the top of the * stack. * * 'N' is consumed at the top of the stack prior to * removing and returning the objects. * * * @return An array of 'N' objects, the first being the deepest. * * @throws EmptyStackException if the stack is empty. * @throws IndexOutOfBoundsException If 'N' is not present or if * 'N' is invalid or if the stack is not deep enough. */ public Object[] popn() throws WarpScriptException; /** * Return the object on top of the stack without removing * it from the stack. * * @return The object on top of the stack * * @throws EmptyStackException if the stack is empty. */ public Object peek() throws EmptyStackException; /** * Return the object at 'distance' from the top of the stack. * The 'distance' is on top of the stack and is consumed by 'peekn'. * * The object on top the stack is at distance 0. * * @throws EmptyStackException if the stack is empty. * @throws IndexOutOfBoundException if no valid 'distance' is on top of the stack or if the * requested distance is passed the bottom of the stack. */ public Object peekn() throws WarpScriptException; /** * Return the depth (i.e. number of objects) of the stack * * @return The depth of the stack */ public int depth(); /** * Reset the stack to the given depth */ public void reset(int depth) throws WarpScriptException; /** * Swap the top 2 objects of the stack. * * @throws EmptyStackException if the stack is empty. * @throws IndexOutOfBoundsException if the stack is empty or * contains a single element. */ public void swap() throws WarpScriptException; /** * Rotate the top 3 objects of the stack, pushing * the top of the stack down * * D D * C -> B * B A * A (top) C (top) * * @throws EmptyStackException if the stack is empty. * @throws IndexOutOfBoundsException if the stack contains less than 3 objects */ public void rot() throws WarpScriptException; /** * Rotate up the top 'N' objects of the stack. * 'N' is on top of the stack and is consumed by 'roll'. * * @throws EmptyStackException if the stack is empty. * @throws IndexOutOfBoundsException if 'N' is not present on top of the stack, * is not a number or if the stack does not have enough elements for the * operation. */ public void roll() throws WarpScriptException; /** * Rotate down the top 'N' objects of the stack. * 'N' is on the top of the stack and is consumed by 'rolld'. * * @throws EmptyStackException if the stack is empty * @throws IndexOutOfBoundsException if 'N' is not present on top of the stack, * is not a number or if the stack does not have enough elements for the * operation. */ public void rolld() throws WarpScriptException; /** * Copy the object at level 'N' on top of the stack. * 'N' is on top of the stack and is consumed by the call to 'pick'. * * @throws EmptyStackException * @throws IndexOutOfBoundsException */ public void pick() throws WarpScriptException; /** * Remove the top of the stack. * * @throws EmptyStackException If the stack is empty. */ public void drop() throws WarpScriptException; /** * Remove the top 'N' objects of the stack. * * @throws EmptyStackException if the stack is empty. * @throws IndexOutOfBoundsException If 'N' is not present on the top of the stack, * is not a number * or if the stack has fewer than 'N' objects after consuming 'N'. */ public void dropn() throws WarpScriptException; /** * Duplicate the object on top of the stack. * * @throws EmptyStackException if the stack is empty. */ public void dup() throws WarpScriptException; /** * Duplicate the top 'N' objects of the stack. * 'N' is consumed at the top of the stack first. * * @throws EmptyStackException if the stack is empty. * @throws IndexOutOfBoundsException if the stack contains less than 'N' objects. */ public void dupn() throws WarpScriptException; /** * Return the object at level 'level' on the stack. * The top of the stack is level 0 * * @param level Level of the object to return * @return The object found at 'level' * @throws WarpScriptException if the stack contains less than 'level' levels */ public Object get(int level) throws WarpScriptException; public void execMulti(String script) throws WarpScriptException; /** * Execute a serie of statements against the stack. * * @param line String containing a space separated list of statements to execute * @return * @throws Exception */ public void exec(String line) throws WarpScriptException; /** * Empty the stack * */ public void clear(); /** * Execute a macro against the stack. * * @param macro Macro instance to execute * @return * @throws WarpScriptException */ public void exec(Macro macro) throws WarpScriptException; /** * Execute a WarpScriptJavaFunction against the stack * * @param function * @throws WarpScriptException */ public void exec(WarpScriptJavaFunction function) throws WarpScriptException; /** * Find a macro by name * * @param macroName Name of macro to find * @throws WarpScriptException if macro is not found */ public Macro find(String macroName) throws WarpScriptException; /** * Execute a macro known by name. * @param macroName * @throws WarpScriptException */ public void run(String macroName) throws WarpScriptException; /** * Produces a String representation of the top 'n' levels of the stack * @param n Number of stack levels to display at most * @return */ public String dump(int n); /** * Return the content associated with the given symbol. * * @param symbol Name of symbol to retrieve * * @return The content associated with 'symbol' or null if 'symbol' is not known. */ public Object load(String symbol); /** * Store the given object under 'symbol'. * * @param symbol Name under which to store a value. * @param value Value to store. */ public void store(String symbol, Object value) throws WarpScriptException; /** * Forget the given symbol * * @param symbol Name of the symbol to forget. */ public void forget(String symbol); /** * Return the current symbol table. * * @return */ public Map<String,Object> getSymbolTable(); /** * Return the current map of redefined functions * @return */ public Map<String,WarpScriptStackFunction> getDefined(); /** * Return a UUID for the instance of EinsteinStack * @return */ public String getUUID(); /** * Set a stack attribute. * * @param key Key under which the attribute should be stored. * @param value Value of the attribute. If null, remove the attribute. * @return The previous value of the attribute or null if it was not set. */ public Object setAttribute(String key, Object value); /** * Return the value of an attribute. * * @param key Name of the attribute to retrieve. * * @return The value store unded 'key' or null */ public Object getAttribute(String key); /** * Return the ith counter associated with the stack * @param i * @return */ public AtomicLong getCounter(int i) throws WarpScriptException; /** * Returns a boolean indicating whether or not the stack is authenticated. * * @return The authentication status of the stack */ public boolean isAuthenticated(); /** * Perform a final check to ensure balancing constructs are balanced. * * @throws WarpScriptException if the stack is currently unbalanced. */ public void checkBalanced() throws WarpScriptException; /** * (re)define 'stmt' as a valid statement executing 'macro' * This allows for the overriding of built-in statements * * @param stmt * @param macro */ public void define(String stmt, Macro macro); /** * Push the current stack context (symbols + redefined statements) onto the stack. * */ public void save() throws WarpScriptException; /** * Restore the stack context from that on top of the stack */ public void restore() throws WarpScriptException; }
Modified Macro's toString to use snapshot
warp10/src/main/java/io/warp10/script/WarpScriptStack.java
Modified Macro's toString to use snapshot
Java
apache-2.0
a76616689b4011170a7b29d65975fd7da8c4abcf
0
RyanSkraba/beam,markflyhigh/incubator-beam,lukecwik/incubator-beam,RyanSkraba/beam,iemejia/incubator-beam,chamikaramj/beam,apache/beam,robertwb/incubator-beam,lukecwik/incubator-beam,lukecwik/incubator-beam,apache/beam,robertwb/incubator-beam,robertwb/incubator-beam,markflyhigh/incubator-beam,robertwb/incubator-beam,lukecwik/incubator-beam,apache/beam,chamikaramj/beam,markflyhigh/incubator-beam,lukecwik/incubator-beam,chamikaramj/beam,chamikaramj/beam,robertwb/incubator-beam,lukecwik/incubator-beam,robertwb/incubator-beam,apache/beam,markflyhigh/incubator-beam,apache/beam,chamikaramj/beam,RyanSkraba/beam,apache/beam,apache/beam,chamikaramj/beam,lukecwik/incubator-beam,RyanSkraba/beam,RyanSkraba/beam,lukecwik/incubator-beam,RyanSkraba/beam,apache/beam,markflyhigh/incubator-beam,iemejia/incubator-beam,robertwb/incubator-beam,robertwb/incubator-beam,apache/beam,markflyhigh/incubator-beam,lukecwik/incubator-beam,chamikaramj/beam,RyanSkraba/beam,apache/beam,markflyhigh/incubator-beam,chamikaramj/beam,chamikaramj/beam,robertwb/incubator-beam,chamikaramj/beam,lukecwik/incubator-beam,robertwb/incubator-beam,apache/beam
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.runners.dataflow.worker.fn.control; import java.io.Closeable; import java.io.IOException; import java.util.*; import java.util.Map; import org.apache.beam.model.fnexecution.v1.BeamFnApi; import org.apache.beam.model.pipeline.v1.RunnerApi; import org.apache.beam.runners.core.StateNamespace; import org.apache.beam.runners.core.StateNamespaces; import org.apache.beam.runners.core.TimerInternals; import org.apache.beam.runners.core.construction.CoderTranslation; import org.apache.beam.runners.core.construction.RehydratedComponents; import org.apache.beam.runners.core.construction.Timer; import org.apache.beam.runners.core.construction.graph.ExecutableStage; import org.apache.beam.runners.dataflow.worker.DataflowExecutionContext; import org.apache.beam.runners.dataflow.worker.DataflowOperationContext; import org.apache.beam.runners.dataflow.worker.util.common.worker.OutputReceiver; import org.apache.beam.runners.dataflow.worker.util.common.worker.ReceivingOperation; import org.apache.beam.runners.fnexecution.control.*; import org.apache.beam.runners.fnexecution.state.StateRequestHandler; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.fn.data.FnDataReceiver; import org.apache.beam.sdk.state.TimeDomain; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.transforms.windowing.GlobalWindow; import org.apache.beam.sdk.transforms.windowing.PaneInfo; import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.values.KV; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This {@link org.apache.beam.runners.dataflow.worker.util.common.worker.Operation} is responsible * for communicating with the SDK harness and asking it to process a bundle of work. This operation * requests a {@link org.apache.beam.runners.fnexecution.control.RemoteBundle}, sends elements to * SDK and receive processed results from SDK, passing these elements downstream. */ public class ProcessRemoteBundleOperation<InputT> extends ReceivingOperation { private static final Logger LOG = LoggerFactory.getLogger(ProcessRemoteBundleOperation.class); private final StageBundleFactory stageBundleFactory; private static final OutputReceiver[] EMPTY_RECEIVER_ARRAY = new OutputReceiver[0]; private final Map<String, OutputReceiver> outputReceiverMap; private final OutputReceiverFactory receiverFactory = new OutputReceiverFactory() { @Override public FnDataReceiver<?> create(String pCollectionId) { return receivedElement -> receive(pCollectionId, receivedElement); } }; private final StateRequestHandler stateRequestHandler; private final BundleProgressHandler progressHandler; private RemoteBundle remoteBundle; private RemoteBundle timerRemoteBundle; private final DataflowExecutionContext<?> executionContext; private final Map<String, ProcessBundleDescriptors.TimerSpec> timerOutputIdToSpecMap; private final Map<String, Coder<BoundedWindow>> timerWindowCodersMap; private final Map<String, ProcessBundleDescriptors.TimerSpec> timerIdToTimerSpecMap; private final Map<String, Object> timerIdToKey; private final Map<String, Object> timerIdToPayload; private ExecutableStage executableStage; public ProcessRemoteBundleOperation( ExecutableStage executableStage, DataflowExecutionContext<?> executionContext, DataflowOperationContext operationContext, StageBundleFactory stageBundleFactory, Map<String, OutputReceiver> outputReceiverMap) { super(EMPTY_RECEIVER_ARRAY, operationContext); this.stageBundleFactory = stageBundleFactory; this.stateRequestHandler = StateRequestHandler.unsupported(); this.progressHandler = BundleProgressHandler.ignored(); this.executionContext = executionContext; this.timerOutputIdToSpecMap = new HashMap<>(); this.timerWindowCodersMap = new HashMap<>(); this.executableStage = executableStage; this.timerIdToKey = new HashMap<>(); this.timerIdToPayload = new HashMap<>(); this.outputReceiverMap = outputReceiverMap; this.timerIdToTimerSpecMap = new HashMap<>(); ProcessBundleDescriptors.ExecutableProcessBundleDescriptor executableProcessBundleDescriptor = stageBundleFactory.getProcessBundleDescriptor(); BeamFnApi.ProcessBundleDescriptor processBundleDescriptor = executableProcessBundleDescriptor.getProcessBundleDescriptor(); executableProcessBundleDescriptor .getTimerSpecs() .values() .forEach( transformTimerMap -> { for (ProcessBundleDescriptors.TimerSpec timerSpec : transformTimerMap.values()) { timerIdToTimerSpecMap.put(timerSpec.timerId(), timerSpec); timerOutputIdToSpecMap.put(timerSpec.outputCollectionId(), timerSpec); } }); for (RunnerApi.PTransform pTransform : processBundleDescriptor.getTransformsMap().values()) { for (String timerId : timerIdToTimerSpecMap.keySet()) { if (!pTransform.getInputsMap().containsKey(timerId)) { continue; } String timerPCollectionId = pTransform.getInputsMap().get(timerId); RunnerApi.PCollection timerPCollection = processBundleDescriptor.getPcollectionsMap().get(timerPCollectionId); String windowingStrategyId = timerPCollection.getWindowingStrategyId(); RunnerApi.WindowingStrategy windowingStrategy = processBundleDescriptor.getWindowingStrategiesMap().get(windowingStrategyId); String windowingCoderId = windowingStrategy.getWindowCoderId(); RunnerApi.Coder windowingCoder = processBundleDescriptor.getCodersMap().get(windowingCoderId); RehydratedComponents components = RehydratedComponents.forComponents(executableStage.getComponents()); try { timerWindowCodersMap.put( timerId, (Coder<BoundedWindow>) CoderTranslation.fromProto(windowingCoder, components)); } catch (IOException e) { LOG.error("Could not retrieve coder for timerId {}. Failed with error: {}", timerId, e.getMessage()); } } } } @Override public void start() throws Exception { try (Closeable scope = context.enterStart()) { super.start(); try { remoteBundle = stageBundleFactory.getBundle(receiverFactory, stateRequestHandler, progressHandler); } catch (Exception e) { throw new RuntimeException("Failed to start remote bundle", e); } try { timerRemoteBundle = stageBundleFactory.getBundle(receiverFactory, stateRequestHandler, progressHandler); } catch (Exception e) { throw new RuntimeException("Failed to start timer remote bundle", e); } } } @Override public void process(Object inputElement) throws Exception { LOG.debug("Sending element: {}", inputElement); String mainInputPCollectionId = executableStage.getInputPCollection().getId(); FnDataReceiver<WindowedValue<?>> mainInputReceiver = remoteBundle.getInputReceivers().get(mainInputPCollectionId); // TODO(BEAM-6274): Is this always true? Do we always send the input element to the main input // receiver? try (Closeable scope = context.enterProcess()) { mainInputReceiver.accept((WindowedValue<?>) inputElement); } catch (Exception e) { LOG.error( "Could not process element {} to receiver {} for pcollection {} with error {}", inputElement, mainInputReceiver, mainInputPCollectionId, e.getMessage()); } } @Override public void finish() throws Exception { try (Closeable scope = context.enterFinish()) { try { // close blocks until all results are received remoteBundle.close(); } catch (Exception e) { throw new RuntimeException("Failed to finish remote bundle", e); } try { // close blocks until all results are received timerRemoteBundle.close(); } catch (Exception e) { throw new RuntimeException("Failed to finish remote bundle", e); } } } private void fireTimers() throws Exception { // TODO(BEAM-6274): Why do we need to namespace this to "user"? DataflowExecutionContext.DataflowStepContext stepContext = executionContext.getStepContext((DataflowOperationContext) this.context).namespacedToUser(); // TODO(BEAM-6274): investigate if this is the correct window TimerInternals.TimerData timerData = stepContext.getNextFiredTimer(GlobalWindow.Coder.INSTANCE); while (timerData != null) { // TODO(BEAM-6274): get the correct payload and payload coder StateNamespaces.WindowNamespace windowNamespace = (StateNamespaces.WindowNamespace) timerData.getNamespace(); BoundedWindow window = windowNamespace.getWindow(); WindowedValue<KV<Object, Timer>> timerValue = WindowedValue.of( KV.of( timerIdToKey.get(timerData.getTimerId()), Timer.of(timerData.getTimestamp(), timerIdToPayload.get(timerData.getTimerId()))), timerData.getTimestamp(), Collections.singleton(window), PaneInfo.NO_FIRING); String mainInputId = timerIdToTimerSpecMap.get(timerData.getTimerId()).inputCollectionId(); timerRemoteBundle.getInputReceivers().get(mainInputId).accept(timerValue); // TODO(BEAM-6274): investigate if this is the correct window timerData = stepContext.getNextFiredTimer(GlobalWindow.Coder.INSTANCE); } } private void receive(String pCollectionId, Object receivedElement) throws Exception { LOG.debug("Received element {} for pcollection {}", receivedElement, pCollectionId); // TODO(BEAM-6274): move this out into its own receiver class if (timerOutputIdToSpecMap.containsKey(pCollectionId)) { WindowedValue<KV<Object, Timer>> windowedValue = (WindowedValue<KV<Object, Timer>>) receivedElement; ProcessBundleDescriptors.TimerSpec timerSpec = timerOutputIdToSpecMap.get(pCollectionId); Timer timer = windowedValue.getValue().getValue(); for (BoundedWindow window : windowedValue.getWindows()) { Coder<BoundedWindow> windowCoder = timerWindowCodersMap.get(timerSpec.timerId()); StateNamespace namespace = StateNamespaces.window(windowCoder, window); TimeDomain timeDomain = timerSpec.getTimerSpec().getTimeDomain(); String timerId = timerSpec.timerId(); DataflowExecutionContext.DataflowStepContext stepContext = executionContext.getStepContext((DataflowOperationContext) this.context); TimerInternals timerData = stepContext.namespacedToUser().timerInternals(); timerData.setTimer(namespace, timerId, timer.getTimestamp(), timeDomain); timerIdToKey.put(timerId, windowedValue.getValue().getKey()); timerIdToPayload.put(timerId, timer.getPayload()); fireTimers(); } } else { outputReceiverMap.get(pCollectionId).process((WindowedValue<?>) receivedElement); } } }
runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/fn/control/ProcessRemoteBundleOperation.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.runners.dataflow.worker.fn.control; import java.io.Closeable; import java.io.IOException; import java.util.*; import java.util.Map; import org.apache.beam.model.fnexecution.v1.BeamFnApi; import org.apache.beam.model.pipeline.v1.RunnerApi; import org.apache.beam.runners.core.StateNamespace; import org.apache.beam.runners.core.StateNamespaces; import org.apache.beam.runners.core.TimerInternals; import org.apache.beam.runners.core.construction.CoderTranslation; import org.apache.beam.runners.core.construction.RehydratedComponents; import org.apache.beam.runners.core.construction.Timer; import org.apache.beam.runners.core.construction.graph.ExecutableStage; import org.apache.beam.runners.dataflow.worker.DataflowExecutionContext; import org.apache.beam.runners.dataflow.worker.DataflowOperationContext; import org.apache.beam.runners.dataflow.worker.util.common.worker.OutputReceiver; import org.apache.beam.runners.dataflow.worker.util.common.worker.ReceivingOperation; import org.apache.beam.runners.fnexecution.control.*; import org.apache.beam.runners.fnexecution.state.StateRequestHandler; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.fn.data.FnDataReceiver; import org.apache.beam.sdk.state.TimeDomain; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.transforms.windowing.GlobalWindow; import org.apache.beam.sdk.transforms.windowing.PaneInfo; import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.values.KV; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This {@link org.apache.beam.runners.dataflow.worker.util.common.worker.Operation} is responsible * for communicating with the SDK harness and asking it to process a bundle of work. This operation * requests a {@link org.apache.beam.runners.fnexecution.control.RemoteBundle}, sends elements to * SDK and receive processed results from SDK, passing these elements downstream. */ public class ProcessRemoteBundleOperation<InputT> extends ReceivingOperation { private static final Logger LOG = LoggerFactory.getLogger(ProcessRemoteBundleOperation.class); private final StageBundleFactory stageBundleFactory; private static final OutputReceiver[] EMPTY_RECEIVER_ARRAY = new OutputReceiver[0]; private final Map<String, OutputReceiver> outputReceiverMap; private final OutputReceiverFactory receiverFactory = new OutputReceiverFactory() { @Override public FnDataReceiver<?> create(String pCollectionId) { return receivedElement -> receive(pCollectionId, receivedElement); } }; private final StateRequestHandler stateRequestHandler; private final BundleProgressHandler progressHandler; private RemoteBundle remoteBundle; private RemoteBundle timerRemoteBundle; private final DataflowExecutionContext<?> executionContext; private final Map<String, ProcessBundleDescriptors.TimerSpec> timerOutputIdToSpecMap; private final Map<String, Coder<BoundedWindow>> timerWindowCodersMap; private final Map<String, ProcessBundleDescriptors.TimerSpec> timerIdToTimerSpecMap; private final Map<String, Object> timerIdToKey; private final Map<String, Object> timerIdToPayload; private ExecutableStage executableStage; private String loggingName; public ProcessRemoteBundleOperation( ExecutableStage executableStage, DataflowExecutionContext<?> executionContext, DataflowOperationContext operationContext, StageBundleFactory stageBundleFactory, Map<String, OutputReceiver> outputReceiverMap) { super(EMPTY_RECEIVER_ARRAY, operationContext); // TODO: Remove this loggingName = executionContext.getStepContext(operationContext).getNameContext().toString(); this.stageBundleFactory = stageBundleFactory; this.stateRequestHandler = StateRequestHandler.unsupported(); this.progressHandler = BundleProgressHandler.ignored(); this.executionContext = executionContext; this.timerOutputIdToSpecMap = new HashMap<>(); this.timerWindowCodersMap = new HashMap<>(); this.executableStage = executableStage; this.timerIdToKey = new HashMap<>(); this.timerIdToPayload = new HashMap<>(); this.outputReceiverMap = outputReceiverMap; this.timerIdToTimerSpecMap = new HashMap<>(); ProcessBundleDescriptors.ExecutableProcessBundleDescriptor executableProcessBundleDescriptor = stageBundleFactory.getProcessBundleDescriptor(); BeamFnApi.ProcessBundleDescriptor processBundleDescriptor = executableProcessBundleDescriptor.getProcessBundleDescriptor(); executableProcessBundleDescriptor .getTimerSpecs() .values() .forEach( transformTimerMap -> { for (ProcessBundleDescriptors.TimerSpec timerSpec : transformTimerMap.values()) { timerIdToTimerSpecMap.put(timerSpec.timerId(), timerSpec); timerOutputIdToSpecMap.put(timerSpec.outputCollectionId(), timerSpec); } }); for (RunnerApi.PTransform pTransform : processBundleDescriptor.getTransformsMap().values()) { for (String timerId : timerIdToTimerSpecMap.keySet()) { if (!pTransform.getInputsMap().containsKey(timerId)) { continue; } String timerPCollectionId = pTransform.getInputsMap().get(timerId); RunnerApi.PCollection timerPCollection = processBundleDescriptor.getPcollectionsMap().get(timerPCollectionId); String windowingStrategyId = timerPCollection.getWindowingStrategyId(); RunnerApi.WindowingStrategy windowingStrategy = processBundleDescriptor.getWindowingStrategiesMap().get(windowingStrategyId); String windowingCoderId = windowingStrategy.getWindowCoderId(); RunnerApi.Coder windowingCoder = processBundleDescriptor.getCodersMap().get(windowingCoderId); RehydratedComponents components = RehydratedComponents.forComponents(executableStage.getComponents()); try { timerWindowCodersMap.put( timerId, (Coder<BoundedWindow>) CoderTranslation.fromProto(windowingCoder, components)); } catch (IOException e) { LOG.error(e.getMessage()); } } } } @Override public void start() throws Exception { try (Closeable scope = context.enterStart()) { super.start(); try { remoteBundle = stageBundleFactory.getBundle(receiverFactory, stateRequestHandler, progressHandler); } catch (Exception e) { throw new RuntimeException("Failed to start remote bundle", e); } try { timerRemoteBundle = stageBundleFactory.getBundle(receiverFactory, stateRequestHandler, progressHandler); } catch (Exception e) { throw new RuntimeException("Failed to start timer remote bundle", e); } } } @Override public void process(Object inputElement) throws Exception { LOG.error("[{}] Sending element: {}", loggingName, inputElement); String mainInputPCollectionId = executableStage.getInputPCollection().getId(); FnDataReceiver<WindowedValue<?>> mainInputReceiver = remoteBundle.getInputReceivers().get(mainInputPCollectionId); // TODO(BEAM-6274): Is this always true? Do we always send the input element to the main input // receiver? try (Closeable scope = context.enterProcess()) { mainInputReceiver.accept((WindowedValue<?>) inputElement); } catch (Exception e) { LOG.error( "[{}] Could not process element {} to receiver {} for pcollection {} with error {}", loggingName, inputElement, mainInputReceiver, mainInputPCollectionId, e.getMessage()); } } @Override public void finish() throws Exception { try (Closeable scope = context.enterFinish()) { try { // close blocks until all results are received remoteBundle.close(); } catch (Exception e) { throw new RuntimeException("Failed to finish remote bundle", e); } try { // close blocks until all results are received timerRemoteBundle.close(); } catch (Exception e) { throw new RuntimeException("Failed to finish remote bundle", e); } /* // TODO(BEAM-6274): do we have to put this in the "start" method as well? // The ProcessRemoteBundleOperation has to wait until it has received all elements from the // SDK in case the SDK generated a timer. try (RemoteBundle bundle = stageBundleFactory.getBundle(receiverFactory, stateRequestHandler, progressHandler)) { // TODO(BEAM-6274): Why do we need to namespace this to "user"? DataflowExecutionContext.DataflowStepContext stepContext = executionContext .getStepContext((DataflowOperationContext) this.context) .namespacedToUser(); // TODO(BEAM-6274): investigate if this is the correct window TimerInternals.TimerData timerData = stepContext.getNextFiredTimer(GlobalWindow.Coder.INSTANCE); while (timerData != null) { LOG.debug("Found fired timer in start {}", timerData); // TODO(BEAM-6274): get the correct payload and payload coder StateNamespaces.WindowNamespace windowNamespace = (StateNamespaces.WindowNamespace) timerData.getNamespace(); BoundedWindow window = windowNamespace.getWindow(); WindowedValue<KV<Object, Timer>> timerValue = WindowedValue.of( KV.of( timerIdToKey.get(timerData.getTimerId()), Timer.of(timerData.getTimestamp(), new byte[0])), timerData.getTimestamp(), Collections.singleton(window), PaneInfo.NO_FIRING); String mainInputId = timerIdToTimerSpecMap.get(timerData.getTimerId()).inputCollectionId(); bundle.getInputReceivers().get(mainInputId).accept(timerValue); // TODO(BEAM-6274): investigate if this is the correct window timerData = stepContext.getNextFiredTimer(GlobalWindow.Coder.INSTANCE); } }*/ } } private void fireTimers() throws Exception { // TODO(BEAM-6274): Why do we need to namespace this to "user"? DataflowExecutionContext.DataflowStepContext stepContext = executionContext.getStepContext((DataflowOperationContext) this.context).namespacedToUser(); // TODO(BEAM-6274): investigate if this is the correct window TimerInternals.TimerData timerData = stepContext.getNextFiredTimer(GlobalWindow.Coder.INSTANCE); while (timerData != null) { LOG.error("[{}] Found fired timer in 'receive' {}", loggingName, timerData); // TODO(BEAM-6274): get the correct payload and payload coder StateNamespaces.WindowNamespace windowNamespace = (StateNamespaces.WindowNamespace) timerData.getNamespace(); BoundedWindow window = windowNamespace.getWindow(); WindowedValue<KV<Object, Timer>> timerValue = WindowedValue.of( KV.of( timerIdToKey.get(timerData.getTimerId()), Timer.of(timerData.getTimestamp(), timerIdToPayload.get(timerData.getTimerId()))), timerData.getTimestamp(), Collections.singleton(window), PaneInfo.NO_FIRING); String mainInputId = timerIdToTimerSpecMap.get(timerData.getTimerId()).inputCollectionId(); timerRemoteBundle.getInputReceivers().get(mainInputId).accept(timerValue); // TODO(BEAM-6274): investigate if this is the correct window timerData = stepContext.getNextFiredTimer(GlobalWindow.Coder.INSTANCE); } } private void receive(String pCollectionId, Object receivedElement) throws Exception { LOG.error( "[{}] Received element {} for pcollection {}", loggingName, receivedElement, pCollectionId); // TODO(BEAM-6274): move this out into its own receiver class if (timerOutputIdToSpecMap.containsKey(pCollectionId)) { WindowedValue<KV<Object, Timer>> windowedValue = (WindowedValue<KV<Object, Timer>>) receivedElement; ProcessBundleDescriptors.TimerSpec timerSpec = timerOutputIdToSpecMap.get(pCollectionId); Timer timer = windowedValue.getValue().getValue(); for (BoundedWindow window : windowedValue.getWindows()) { Coder<BoundedWindow> windowCoder = timerWindowCodersMap.get(timerSpec.timerId()); StateNamespace namespace = StateNamespaces.window(windowCoder, window); TimeDomain timeDomain = timerSpec.getTimerSpec().getTimeDomain(); String timerId = timerSpec.timerId(); DataflowExecutionContext.DataflowStepContext stepContext = executionContext.getStepContext((DataflowOperationContext) this.context); TimerInternals timerData = stepContext.namespacedToUser().timerInternals(); timerData.setTimer(namespace, timerId, timer.getTimestamp(), timeDomain); timerIdToKey.put(timerId, windowedValue.getValue().getKey()); timerIdToPayload.put(timerId, timer.getPayload()); fireTimers(); } } else { outputReceiverMap.get(pCollectionId).process((WindowedValue<?>) receivedElement); } } }
clean up
runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/fn/control/ProcessRemoteBundleOperation.java
clean up
Java
apache-2.0
a68af98c17fe571cf16f28dcc9f80edad750a7fa
0
Chanven/CommonPullToRefresh
package com.chanven.lib.cptr; import android.content.Context; import android.content.res.TypedArray; import android.support.v7.widget.RecyclerView; import android.util.AttributeSet; import android.view.Gravity; import android.view.MotionEvent; import android.view.View; import android.view.ViewConfiguration; import android.view.ViewGroup; import android.widget.AbsListView; import android.widget.GridView; import android.widget.Scroller; import android.widget.TextView; import com.chanven.lib.cptr.indicator.PtrIndicator; import com.chanven.lib.cptr.loadmore.DefaultLoadMoreViewFooter; import com.chanven.lib.cptr.loadmore.GridViewHandler; import com.chanven.lib.cptr.loadmore.ILoadMoreViewFactory; import com.chanven.lib.cptr.loadmore.ILoadMoreViewFactory.ILoadMoreView; import com.chanven.lib.cptr.loadmore.ListViewHandler; import com.chanven.lib.cptr.loadmore.LoadMoreHandler; import com.chanven.lib.cptr.loadmore.OnLoadMoreListener; import com.chanven.lib.cptr.loadmore.OnScrollBottomListener; import com.chanven.lib.cptr.loadmore.RecyclerViewHandler; import com.chanven.lib.cptr.utils.PtrCLog; /** * This layout view for "Pull to Refresh(Ptr)" support all of the view, you can contain everything you want. * support: pull to refresh / release to refresh / auto refresh / keep header view while refreshing / hide header view while refreshing * It defines {@link PtrUIHandler}, which allows you customize the UI easily. */ public class PtrFrameLayout extends ViewGroup { // status enum public final static byte PTR_STATUS_INIT = 1; public final static byte PTR_STATUS_PREPARE = 2; public final static byte PTR_STATUS_LOADING = 3; public final static byte PTR_STATUS_COMPLETE = 4; private static final boolean DEBUG_LAYOUT = true; public static boolean DEBUG = false; private static int ID = 1; // auto refresh status private static byte FLAG_AUTO_REFRESH_AT_ONCE = 0x01; private static byte FLAG_AUTO_REFRESH_BUT_LATER = 0x01 << 1; private static byte FLAG_ENABLE_NEXT_PTR_AT_ONCE = 0x01 << 2; private static byte FLAG_PIN_CONTENT = 0x01 << 3; private static byte MASK_AUTO_REFRESH = 0x03; protected final String LOG_TAG = "ptr-frame-" + ++ID; protected View mContent; // optional config for define header and content in xml file private int mHeaderId = 0; private int mContainerId = 0; // config private int mDurationToClose = 200; private int mDurationToCloseHeader = 1000; private boolean mKeepHeaderWhenRefresh = true; private boolean mPullToRefresh = false; private View mHeaderView; private PtrUIHandlerHolder mPtrUIHandlerHolder = PtrUIHandlerHolder.create(); private PtrHandler mPtrHandler; // working parameters private ScrollChecker mScrollChecker; private int mPagingTouchSlop; private int mHeaderHeight; private byte mStatus = PTR_STATUS_INIT; private boolean mDisableWhenHorizontalMove = false; private int mFlag = 0x00; // disable when detect moving horizontally private boolean mPreventForHorizontal = false; private MotionEvent mLastMoveEvent; private PtrUIHandlerHook mRefreshCompleteHook; private int mLoadingMinTime = 500; private long mLoadingStartTime = 0; private PtrIndicator mPtrIndicator; private boolean mHasSendCancelEvent = false; public PtrFrameLayout(Context context) { this(context, null); } public PtrFrameLayout(Context context, AttributeSet attrs) { this(context, attrs, 0); } public PtrFrameLayout(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); mPtrIndicator = new PtrIndicator(); TypedArray arr = context.obtainStyledAttributes(attrs, com.chanven.lib.cptr.R.styleable.PtrFrameLayout, 0, 0); if (arr != null) { mHeaderId = arr.getResourceId(com.chanven.lib.cptr.R.styleable.PtrFrameLayout_ptr_header, mHeaderId); mContainerId = arr.getResourceId(com.chanven.lib.cptr.R.styleable.PtrFrameLayout_ptr_content, mContainerId); mPtrIndicator.setResistance( arr.getFloat(com.chanven.lib.cptr.R.styleable.PtrFrameLayout_ptr_resistance, mPtrIndicator.getResistance())); mDurationToClose = arr.getInt(com.chanven.lib.cptr.R.styleable.PtrFrameLayout_ptr_duration_to_close, mDurationToClose); mDurationToCloseHeader = arr.getInt(com.chanven.lib.cptr.R.styleable.PtrFrameLayout_ptr_duration_to_close_header, mDurationToCloseHeader); float ratio = mPtrIndicator.getRatioOfHeaderToHeightRefresh(); ratio = arr.getFloat(com.chanven.lib.cptr.R.styleable.PtrFrameLayout_ptr_ratio_of_header_height_to_refresh, ratio); mPtrIndicator.setRatioOfHeaderHeightToRefresh(ratio); mKeepHeaderWhenRefresh = arr.getBoolean(com.chanven.lib.cptr.R.styleable.PtrFrameLayout_ptr_keep_header_when_refresh, mKeepHeaderWhenRefresh); mPullToRefresh = arr.getBoolean(com.chanven.lib.cptr.R.styleable.PtrFrameLayout_ptr_pull_to_fresh, mPullToRefresh); arr.recycle(); } mScrollChecker = new ScrollChecker(); final ViewConfiguration conf = ViewConfiguration.get(getContext()); mPagingTouchSlop = conf.getScaledTouchSlop() * 2; } @Override protected void onFinishInflate() { final int childCount = getChildCount(); if (childCount > 2) { throw new IllegalStateException("PtrFrameLayout only can host 2 elements"); } else if (childCount == 2) { if (mHeaderId != 0 && mHeaderView == null) { mHeaderView = findViewById(mHeaderId); } if (mContainerId != 0 && mContent == null) { mContent = findViewById(mContainerId); } // not specify header or content if (mContent == null || mHeaderView == null) { View child1 = getChildAt(0); View child2 = getChildAt(1); if (child1 instanceof PtrUIHandler) { mHeaderView = child1; mContent = child2; } else if (child2 instanceof PtrUIHandler) { mHeaderView = child2; mContent = child1; } else { // both are not specified if (mContent == null && mHeaderView == null) { mHeaderView = child1; mContent = child2; } // only one is specified else { if (mHeaderView == null) { mHeaderView = mContent == child1 ? child2 : child1; } else { mContent = mHeaderView == child1 ? child2 : child1; } } } } } else if (childCount == 1) { mContent = getChildAt(0); } else { TextView errorView = new TextView(getContext()); errorView.setClickable(true); errorView.setTextColor(0xffff6600); errorView.setGravity(Gravity.CENTER); errorView.setTextSize(20); errorView.setText("The content view in PtrFrameLayout is empty. Do you forget to specify its id in xml layout file?"); mContent = errorView; addView(mContent); } if (mHeaderView != null) { mHeaderView.bringToFront(); } super.onFinishInflate(); } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { super.onMeasure(widthMeasureSpec, heightMeasureSpec); if (DEBUG && DEBUG_LAYOUT) { PtrCLog.d(LOG_TAG, "onMeasure frame: width: %s, height: %s, padding: %s %s %s %s", getMeasuredHeight(), getMeasuredWidth(), getPaddingLeft(), getPaddingRight(), getPaddingTop(), getPaddingBottom()); } if (mHeaderView != null) { measureChildWithMargins(mHeaderView, widthMeasureSpec, 0, heightMeasureSpec, 0); MarginLayoutParams lp = (MarginLayoutParams) mHeaderView.getLayoutParams(); mHeaderHeight = mHeaderView.getMeasuredHeight() + lp.topMargin + lp.bottomMargin; mPtrIndicator.setHeaderHeight(mHeaderHeight); } if (mContent != null) { measureContentView(mContent, widthMeasureSpec, heightMeasureSpec); if (DEBUG && DEBUG_LAYOUT) { ViewGroup.MarginLayoutParams lp = (MarginLayoutParams) mContent.getLayoutParams(); PtrCLog.d(LOG_TAG, "onMeasure content, width: %s, height: %s, margin: %s %s %s %s", getMeasuredWidth(), getMeasuredHeight(), lp.leftMargin, lp.topMargin, lp.rightMargin, lp.bottomMargin); PtrCLog.d(LOG_TAG, "onMeasure, currentPos: %s, lastPos: %s, top: %s", mPtrIndicator.getCurrentPosY(), mPtrIndicator.getLastPosY(), mContent.getTop()); } } } private void measureContentView(View child, int parentWidthMeasureSpec, int parentHeightMeasureSpec) { final MarginLayoutParams lp = (MarginLayoutParams) child.getLayoutParams(); final int childWidthMeasureSpec = getChildMeasureSpec(parentWidthMeasureSpec, getPaddingLeft() + getPaddingRight() + lp.leftMargin + lp.rightMargin, lp.width); final int childHeightMeasureSpec = getChildMeasureSpec(parentHeightMeasureSpec, getPaddingTop() + getPaddingBottom() + lp.topMargin, lp.height); child.measure(childWidthMeasureSpec, childHeightMeasureSpec); } @Override protected void onLayout(boolean flag, int i, int j, int k, int l) { layoutChildren(); } private void layoutChildren() { int offsetX = mPtrIndicator.getCurrentPosY(); int paddingLeft = getPaddingLeft(); int paddingTop = getPaddingTop(); if (mHeaderView != null) { MarginLayoutParams lp = (MarginLayoutParams) mHeaderView.getLayoutParams(); final int left = paddingLeft + lp.leftMargin; final int top = paddingTop + lp.topMargin + offsetX - mHeaderHeight; final int right = left + mHeaderView.getMeasuredWidth(); final int bottom = top + mHeaderView.getMeasuredHeight(); mHeaderView.layout(left, top, right, bottom); if (DEBUG && DEBUG_LAYOUT) { PtrCLog.d(LOG_TAG, "onLayout header: %s %s %s %s", left, top, right, bottom); } } if (mContent != null) { if (isPinContent()) { offsetX = 0; } MarginLayoutParams lp = (MarginLayoutParams) mContent.getLayoutParams(); final int left = paddingLeft + lp.leftMargin; final int top = paddingTop + lp.topMargin + offsetX; final int right = left + mContent.getMeasuredWidth(); final int bottom = top + mContent.getMeasuredHeight(); if (DEBUG && DEBUG_LAYOUT) { PtrCLog.d(LOG_TAG, "onLayout content: %s %s %s %s", left, top, right, bottom); } mContent.layout(left, top, right, bottom); } } public boolean dispatchTouchEventSupper(MotionEvent e) { return super.dispatchTouchEvent(e); } @Override public boolean dispatchTouchEvent(MotionEvent e) { if (!isEnabled() || mContent == null || mHeaderView == null) { return dispatchTouchEventSupper(e); } int action = e.getAction(); switch (action) { case MotionEvent.ACTION_UP: case MotionEvent.ACTION_CANCEL: mPtrIndicator.onRelease(); if (mPtrIndicator.hasLeftStartPosition()) { if (DEBUG) { PtrCLog.d(LOG_TAG, "call onRelease when user release"); } onRelease(false); if (mPtrIndicator.hasMovedAfterPressedDown()) { sendCancelEvent(); return true; } return dispatchTouchEventSupper(e); } else { return dispatchTouchEventSupper(e); } case MotionEvent.ACTION_DOWN: mHasSendCancelEvent = false; mPtrIndicator.onPressDown(e.getX(), e.getY()); mScrollChecker.abortIfWorking(); mPreventForHorizontal = false; // The cancel event will be sent once the position is moved. // So let the event pass to children. // fix #93, #102 dispatchTouchEventSupper(e); return true; case MotionEvent.ACTION_MOVE: mLastMoveEvent = e; mPtrIndicator.onMove(e.getX(), e.getY()); float offsetX = mPtrIndicator.getOffsetX(); float offsetY = mPtrIndicator.getOffsetY(); if (mDisableWhenHorizontalMove && !mPreventForHorizontal && (Math.abs(offsetX) > mPagingTouchSlop && Math.abs(offsetX) > Math.abs(offsetY))) { if (mPtrIndicator.isInStartPosition()) { mPreventForHorizontal = true; } } if (mPreventForHorizontal) { return dispatchTouchEventSupper(e); } boolean moveDown = offsetY > 0; boolean moveUp = !moveDown; boolean canMoveUp = mPtrIndicator.hasLeftStartPosition(); if (DEBUG) { boolean canMoveDown = mPtrHandler != null && mPtrHandler.checkCanDoRefresh(this, mContent, mHeaderView); PtrCLog.v(LOG_TAG, "ACTION_MOVE: offsetY:%s, currentPos: %s, moveUp: %s, canMoveUp: %s, moveDown: %s: canMoveDown: %s", offsetY, mPtrIndicator.getCurrentPosY(), moveUp, canMoveUp, moveDown, canMoveDown); } // disable move when header not reach top if (moveDown && mPtrHandler != null && !mPtrHandler.checkCanDoRefresh(this, mContent, mHeaderView)) { return dispatchTouchEventSupper(e); } if ((moveUp && canMoveUp) || moveDown) { movePos(offsetY); return true; } } return dispatchTouchEventSupper(e); } /** * if deltaY > 0, move the content down * * @param deltaY */ private void movePos(float deltaY) { // has reached the top if ((deltaY < 0 && mPtrIndicator.isInStartPosition())) { if (DEBUG) { PtrCLog.e(LOG_TAG, String.format("has reached the top")); } return; } int to = mPtrIndicator.getCurrentPosY() + (int) deltaY; // over top if (mPtrIndicator.willOverTop(to)) { if (DEBUG) { PtrCLog.e(LOG_TAG, String.format("over top")); } to = PtrIndicator.POS_START; } mPtrIndicator.setCurrentPos(to); int change = to - mPtrIndicator.getLastPosY(); updatePos(change); } private void updatePos(int change) { if (change == 0) { return; } boolean isUnderTouch = mPtrIndicator.isUnderTouch(); // once moved, cancel event will be sent to child if (isUnderTouch && !mHasSendCancelEvent && mPtrIndicator.hasMovedAfterPressedDown()) { mHasSendCancelEvent = true; sendCancelEvent(); } // leave initiated position or just refresh complete if ((mPtrIndicator.hasJustLeftStartPosition() && mStatus == PTR_STATUS_INIT) || (mPtrIndicator.goDownCrossFinishPosition() && mStatus == PTR_STATUS_COMPLETE && isEnabledNextPtrAtOnce())) { mStatus = PTR_STATUS_PREPARE; mPtrUIHandlerHolder.onUIRefreshPrepare(this); if (DEBUG) { PtrCLog.i(LOG_TAG, "PtrUIHandler: onUIRefreshPrepare, mFlag %s", mFlag); } } // back to initiated position if (mPtrIndicator.hasJustBackToStartPosition()) { tryToNotifyReset(); // recover event to children if (isUnderTouch) { sendDownEvent(); } } // Pull to Refresh if (mStatus == PTR_STATUS_PREPARE) { // reach fresh height while moving from top to bottom if (isUnderTouch && !isAutoRefresh() && mPullToRefresh && mPtrIndicator.crossRefreshLineFromTopToBottom()) { tryToPerformRefresh(); } // reach header height while auto refresh if (performAutoRefreshButLater() && mPtrIndicator.hasJustReachedHeaderHeightFromTopToBottom()) { tryToPerformRefresh(); } } if (DEBUG) { PtrCLog.v(LOG_TAG, "updatePos: change: %s, current: %s last: %s, top: %s, headerHeight: %s", change, mPtrIndicator.getCurrentPosY(), mPtrIndicator.getLastPosY(), mContent.getTop(), mHeaderHeight); } mHeaderView.offsetTopAndBottom(change); if (!isPinContent()) { mContent.offsetTopAndBottom(change); } invalidate(); if (mPtrUIHandlerHolder.hasHandler()) { mPtrUIHandlerHolder.onUIPositionChange(this, isUnderTouch, mStatus, mPtrIndicator); } onPositionChange(isUnderTouch, mStatus, mPtrIndicator); } protected void onPositionChange(boolean isInTouching, byte status, PtrIndicator mPtrIndicator) { } @SuppressWarnings("unused") public int getHeaderHeight() { return mHeaderHeight; } private void onRelease(boolean stayForLoading) { tryToPerformRefresh(); if (mStatus == PTR_STATUS_LOADING) { // keep header for fresh if (mKeepHeaderWhenRefresh) { // scroll header back if (mPtrIndicator.isOverOffsetToKeepHeaderWhileLoading() && !stayForLoading) { mScrollChecker.tryToScrollTo(mPtrIndicator.getOffsetToKeepHeaderWhileLoading(), mDurationToClose); } else { // do nothing } } else { tryScrollBackToTopWhileLoading(); } } else { if (mStatus == PTR_STATUS_COMPLETE) { notifyUIRefreshComplete(false); } else { tryScrollBackToTopAbortRefresh(); } } } /** * please DO REMEMBER resume the hook * * @param hook */ public void setRefreshCompleteHook(PtrUIHandlerHook hook) { mRefreshCompleteHook = hook; hook.setResumeAction(new Runnable() { @Override public void run() { if (DEBUG) { PtrCLog.d(LOG_TAG, "mRefreshCompleteHook resume."); } notifyUIRefreshComplete(true); } }); } /** * Scroll back to to if is not under touch */ private void tryScrollBackToTop() { if (!mPtrIndicator.isUnderTouch()) { mScrollChecker.tryToScrollTo(PtrIndicator.POS_START, mDurationToCloseHeader); } } /** * just make easier to understand */ private void tryScrollBackToTopWhileLoading() { tryScrollBackToTop(); } /** * just make easier to understand */ private void tryScrollBackToTopAfterComplete() { tryScrollBackToTop(); } /** * just make easier to understand */ private void tryScrollBackToTopAbortRefresh() { tryScrollBackToTop(); } private boolean tryToPerformRefresh() { if (mStatus != PTR_STATUS_PREPARE) { return false; } // if ((mPtrIndicator.isOverOffsetToKeepHeaderWhileLoading() && isAutoRefresh()) || mPtrIndicator.isOverOffsetToRefresh()) { mStatus = PTR_STATUS_LOADING; performRefresh(); } return false; } private void performRefresh() { mLoadingStartTime = System.currentTimeMillis(); if (mPtrUIHandlerHolder.hasHandler()) { mPtrUIHandlerHolder.onUIRefreshBegin(this); if (DEBUG) { PtrCLog.i(LOG_TAG, "PtrUIHandler: onUIRefreshBegin"); } } if (mPtrHandler != null) { mPtrHandler.onRefreshBegin(this); } } /** * If at the top and not in loading, reset */ private boolean tryToNotifyReset() { if ((mStatus == PTR_STATUS_COMPLETE || mStatus == PTR_STATUS_PREPARE) && mPtrIndicator.isInStartPosition()) { if (mPtrUIHandlerHolder.hasHandler()) { mPtrUIHandlerHolder.onUIReset(this); if (DEBUG) { PtrCLog.i(LOG_TAG, "PtrUIHandler: onUIReset"); } } mStatus = PTR_STATUS_INIT; clearFlag(); return true; } return false; } protected void onPtrScrollAbort() { if (mPtrIndicator.hasLeftStartPosition() && isAutoRefresh()) { if (DEBUG) { PtrCLog.d(LOG_TAG, "call onRelease after scroll abort"); } onRelease(true); } } protected void onPtrScrollFinish() { if (mPtrIndicator.hasLeftStartPosition() && isAutoRefresh()) { if (DEBUG) { PtrCLog.d(LOG_TAG, "call onRelease after scroll finish"); } onRelease(true); } } /** * Detect whether is refreshing. * * @return */ public boolean isRefreshing() { return mStatus == PTR_STATUS_LOADING; } /** * Call this when data is loaded. * The UI will perform complete at once or after a delay, depends on the time elapsed is greater then {@link #mLoadingMinTime} or not. */ final public void refreshComplete() { if (DEBUG) { PtrCLog.i(LOG_TAG, "refreshComplete"); } if (mRefreshCompleteHook != null) { mRefreshCompleteHook.reset(); } int delay = (int) (mLoadingMinTime - (System.currentTimeMillis() - mLoadingStartTime)); if (delay <= 0) { if (DEBUG) { PtrCLog.d(LOG_TAG, "performRefreshComplete at once"); } performRefreshComplete(); } else { postDelayed(new Runnable() { @Override public void run() { performRefreshComplete(); } }, delay); if (DEBUG) { PtrCLog.d(LOG_TAG, "performRefreshComplete after delay: %s", delay); } } } /** * Do refresh complete work when time elapsed is greater than {@link #mLoadingMinTime} */ private void performRefreshComplete() { mStatus = PTR_STATUS_COMPLETE; // if is auto refresh do nothing, wait scroller stop if (mScrollChecker.mIsRunning && isAutoRefresh()) { // do nothing if (DEBUG) { PtrCLog.d(LOG_TAG, "performRefreshComplete do nothing, scrolling: %s, auto refresh: %s", mScrollChecker.mIsRunning, mFlag); } return; } notifyUIRefreshComplete(false); } /** * Do real refresh work. If there is a hook, execute the hook first. * * @param ignoreHook */ private void notifyUIRefreshComplete(boolean ignoreHook) { /** * After hook operation is done, {@link #notifyUIRefreshComplete} will be call in resume action to ignore hook. */ if (mPtrIndicator.hasLeftStartPosition() && !ignoreHook && mRefreshCompleteHook != null) { if (DEBUG) { PtrCLog.d(LOG_TAG, "notifyUIRefreshComplete mRefreshCompleteHook run."); } mRefreshCompleteHook.takeOver(); return; } if (mPtrUIHandlerHolder.hasHandler()) { if (DEBUG) { PtrCLog.i(LOG_TAG, "PtrUIHandler: onUIRefreshComplete"); } mPtrUIHandlerHolder.onUIRefreshComplete(this); } mPtrIndicator.onUIRefreshComplete(); tryScrollBackToTopAfterComplete(); tryToNotifyReset(); } public void autoRefresh() { autoRefresh(true, mDurationToCloseHeader); } public void autoRefresh(boolean atOnce) { autoRefresh(atOnce, mDurationToCloseHeader); } private void clearFlag() { // remove auto fresh flag mFlag = mFlag & ~MASK_AUTO_REFRESH; } public void autoRefresh(boolean atOnce, int duration) { if (mStatus != PTR_STATUS_INIT) { return; } mFlag |= atOnce ? FLAG_AUTO_REFRESH_AT_ONCE : FLAG_AUTO_REFRESH_BUT_LATER; mStatus = PTR_STATUS_PREPARE; if (mPtrUIHandlerHolder.hasHandler()) { mPtrUIHandlerHolder.onUIRefreshPrepare(this); if (DEBUG) { PtrCLog.i(LOG_TAG, "PtrUIHandler: onUIRefreshPrepare, mFlag %s", mFlag); } } mScrollChecker.tryToScrollTo(mPtrIndicator.getOffsetToRefresh(), duration); if (atOnce) { mStatus = PTR_STATUS_LOADING; performRefresh(); } } public boolean isAutoRefresh() { return (mFlag & MASK_AUTO_REFRESH) > 0; } private boolean performAutoRefreshButLater() { return (mFlag & MASK_AUTO_REFRESH) == FLAG_AUTO_REFRESH_BUT_LATER; } /** * If @param enable has been set to true. The user can perform next PTR at once. * * @param enable */ public void setEnabledNextPtrAtOnce(boolean enable) { if (enable) { mFlag = mFlag | FLAG_ENABLE_NEXT_PTR_AT_ONCE; } else { mFlag = mFlag & ~FLAG_ENABLE_NEXT_PTR_AT_ONCE; } } public boolean isEnabledNextPtrAtOnce() { return (mFlag & FLAG_ENABLE_NEXT_PTR_AT_ONCE) > 0; } /** * The content view will now move when {@param pinContent} set to true. * * @param pinContent */ public void setPinContent(boolean pinContent) { if (pinContent) { mFlag = mFlag | FLAG_PIN_CONTENT; } else { mFlag = mFlag & ~FLAG_PIN_CONTENT; } } public boolean isPinContent() { return (mFlag & FLAG_PIN_CONTENT) > 0; } /** * It's useful when working with viewpager. * * @param disable */ public void disableWhenHorizontalMove(boolean disable) { mDisableWhenHorizontalMove = disable; } /** * loading will last at least for so long * * @param time */ public void setLoadingMinTime(int time) { mLoadingMinTime = time; } /** * Not necessary any longer. Once moved, cancel event will be sent to child. * * @param yes */ @Deprecated public void setInterceptEventWhileWorking(boolean yes) { } @SuppressWarnings({"unused"}) public View getContentView() { return mContent; } public void setPtrHandler(PtrHandler ptrHandler) { mPtrHandler = ptrHandler; } public void addPtrUIHandler(PtrUIHandler ptrUIHandler) { PtrUIHandlerHolder.addHandler(mPtrUIHandlerHolder, ptrUIHandler); } @SuppressWarnings({"unused"}) public void removePtrUIHandler(PtrUIHandler ptrUIHandler) { mPtrUIHandlerHolder = PtrUIHandlerHolder.removeHandler(mPtrUIHandlerHolder, ptrUIHandler); } public void setPtrIndicator(PtrIndicator slider) { if (mPtrIndicator != null && mPtrIndicator != slider) { slider.convertFrom(mPtrIndicator); } mPtrIndicator = slider; } @SuppressWarnings({"unused"}) public float getResistance() { return mPtrIndicator.getResistance(); } public void setResistance(float resistance) { mPtrIndicator.setResistance(resistance); } @SuppressWarnings({"unused"}) public float getDurationToClose() { return mDurationToClose; } /** * The duration to return back to the refresh position * * @param duration */ public void setDurationToClose(int duration) { mDurationToClose = duration; } @SuppressWarnings({"unused"}) public long getDurationToCloseHeader() { return mDurationToCloseHeader; } /** * The duration to close time * * @param duration */ public void setDurationToCloseHeader(int duration) { mDurationToCloseHeader = duration; } public void setRatioOfHeaderHeightToRefresh(float ratio) { mPtrIndicator.setRatioOfHeaderHeightToRefresh(ratio); } public int getOffsetToRefresh() { return mPtrIndicator.getOffsetToRefresh(); } @SuppressWarnings({"unused"}) public void setOffsetToRefresh(int offset) { mPtrIndicator.setOffsetToRefresh(offset); } @SuppressWarnings({"unused"}) public float getRatioOfHeaderToHeightRefresh() { return mPtrIndicator.getRatioOfHeaderToHeightRefresh(); } @SuppressWarnings({"unused"}) public void setOffsetToKeepHeaderWhileLoading(int offset) { mPtrIndicator.setOffsetToKeepHeaderWhileLoading(offset); } @SuppressWarnings({"unused"}) public int getOffsetToKeepHeaderWhileLoading() { return mPtrIndicator.getOffsetToKeepHeaderWhileLoading(); } @SuppressWarnings({"unused"}) public boolean isKeepHeaderWhenRefresh() { return mKeepHeaderWhenRefresh; } public void setKeepHeaderWhenRefresh(boolean keepOrNot) { mKeepHeaderWhenRefresh = keepOrNot; } public boolean isPullToRefresh() { return mPullToRefresh; } public void setPullToRefresh(boolean pullToRefresh) { mPullToRefresh = pullToRefresh; } @SuppressWarnings({"unused"}) public View getHeaderView() { return mHeaderView; } public void setHeaderView(View header) { if (mHeaderView != null && header != null && mHeaderView != header) { removeView(mHeaderView); } ViewGroup.LayoutParams lp = header.getLayoutParams(); if (lp == null) { lp = new LayoutParams(-1, -2); header.setLayoutParams(lp); } mHeaderView = header; addView(header); } @Override protected boolean checkLayoutParams(ViewGroup.LayoutParams p) { return p != null && p instanceof LayoutParams; } @Override protected ViewGroup.LayoutParams generateDefaultLayoutParams() { return new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT); } @Override protected ViewGroup.LayoutParams generateLayoutParams(ViewGroup.LayoutParams p) { return new LayoutParams(p); } @Override public ViewGroup.LayoutParams generateLayoutParams(AttributeSet attrs) { return new LayoutParams(getContext(), attrs); } private void sendCancelEvent() { if (DEBUG) { PtrCLog.d(LOG_TAG, "send cancel event"); } // The ScrollChecker will update position and lead to send cancel event when mLastMoveEvent is null. // fix #104, #80, #92 if (mLastMoveEvent == null) { return; } MotionEvent last = mLastMoveEvent; MotionEvent e = MotionEvent.obtain(last.getDownTime(), last.getEventTime() + ViewConfiguration.getLongPressTimeout(), MotionEvent.ACTION_CANCEL, last.getX(), last.getY(), last.getMetaState()); dispatchTouchEventSupper(e); } private void sendDownEvent() { if (DEBUG) { PtrCLog.d(LOG_TAG, "send down event"); } final MotionEvent last = mLastMoveEvent; MotionEvent e = MotionEvent.obtain(last.getDownTime(), last.getEventTime(), MotionEvent.ACTION_DOWN, last.getX(), last.getY(), last.getMetaState()); dispatchTouchEventSupper(e); } public static class LayoutParams extends MarginLayoutParams { public LayoutParams(Context c, AttributeSet attrs) { super(c, attrs); } public LayoutParams(int width, int height) { super(width, height); } @SuppressWarnings({"unused"}) public LayoutParams(MarginLayoutParams source) { super(source); } public LayoutParams(ViewGroup.LayoutParams source) { super(source); } } class ScrollChecker implements Runnable { private int mLastFlingY; private Scroller mScroller; private boolean mIsRunning = false; private int mStart; private int mTo; public ScrollChecker() { mScroller = new Scroller(getContext()); } public void run() { boolean finish = !mScroller.computeScrollOffset() || mScroller.isFinished(); int curY = mScroller.getCurrY(); int deltaY = curY - mLastFlingY; if (DEBUG) { if (deltaY != 0) { PtrCLog.v(LOG_TAG, "scroll: %s, start: %s, to: %s, currentPos: %s, current :%s, last: %s, delta: %s", finish, mStart, mTo, mPtrIndicator.getCurrentPosY(), curY, mLastFlingY, deltaY); } } if (!finish) { mLastFlingY = curY; movePos(deltaY); post(this); } else { finish(); } } private void finish() { if (DEBUG) { PtrCLog.v(LOG_TAG, "finish, currentPos:%s", mPtrIndicator.getCurrentPosY()); } reset(); onPtrScrollFinish(); } private void reset() { mIsRunning = false; mLastFlingY = 0; removeCallbacks(this); } public void abortIfWorking() { if (mIsRunning) { if (!mScroller.isFinished()) { mScroller.forceFinished(true); } onPtrScrollAbort(); reset(); } } public void tryToScrollTo(int to, int duration) { if (mPtrIndicator.isAlreadyHere(to)) { return; } mStart = mPtrIndicator.getCurrentPosY(); mTo = to; int distance = to - mStart; if (DEBUG) { PtrCLog.d(LOG_TAG, "tryToScrollTo: start: %s, distance:%s, to:%s", mStart, distance, to); } removeCallbacks(this); mLastFlingY = 0; // fix #47: Scroller should be reused, https://github.com/liaohuqiu/android-Ultra-Pull-To-Refresh/issues/47 if (!mScroller.isFinished()) { mScroller.forceFinished(true); } mScroller.startScroll(0, 0, 0, distance, duration); post(this); mIsRunning = true; } } private boolean isLoadingMore = false; private boolean isAutoLoadMoreEnable = true; private boolean isLoadMoreEnable = false; private boolean hasInitLoadMoreView = false; private ILoadMoreViewFactory loadMoreViewFactory; private ILoadMoreView mLoadMoreView; private LoadMoreHandler mLoadMoreHandler; private View mContentView; public void setAutoLoadMoreEnable(boolean isAutoLoadMoreEnable) { this.isAutoLoadMoreEnable = isAutoLoadMoreEnable; } public void setFooterView(ILoadMoreViewFactory factory) { if (null == factory || (null != loadMoreViewFactory && loadMoreViewFactory == factory)) { return; } loadMoreViewFactory = factory; if (hasInitLoadMoreView) { mLoadMoreHandler.removeFooter(); mLoadMoreView = loadMoreViewFactory.madeLoadMoreView(); hasInitLoadMoreView = mLoadMoreHandler.handleSetAdapter(mContentView, mLoadMoreView, onClickLoadMoreListener); if (!isLoadMoreEnable) { mLoadMoreHandler.removeFooter(); } } } public void setLoadMoreEnable(boolean loadMoreEnable) { if (this.isLoadMoreEnable == loadMoreEnable) { return; } this.isLoadMoreEnable = loadMoreEnable; if (!hasInitLoadMoreView && isLoadMoreEnable) { mContentView = getContentView(); if (null == loadMoreViewFactory) { loadMoreViewFactory = new DefaultLoadMoreViewFooter(); } mLoadMoreView = loadMoreViewFactory.madeLoadMoreView(); if (null == mLoadMoreHandler) { if (mContentView instanceof GridView) { mLoadMoreHandler = new GridViewHandler(); } else if (mContentView instanceof AbsListView) { mLoadMoreHandler = new ListViewHandler(); } else if (mContentView instanceof RecyclerView) { mLoadMoreHandler = new RecyclerViewHandler(); } } if (null == mLoadMoreHandler) { throw new IllegalStateException("unSupported contentView !"); } hasInitLoadMoreView = mLoadMoreHandler.handleSetAdapter(mContentView, mLoadMoreView, onClickLoadMoreListener); mLoadMoreHandler.setOnScrollBottomListener(mContentView, onScrollBottomListener); return; } if (hasInitLoadMoreView) { if (isLoadMoreEnable) { mLoadMoreHandler.addFooter(); } else { mLoadMoreHandler.removeFooter(); } } } public boolean isLoadMoreEnable() { return isLoadMoreEnable; } private OnScrollBottomListener onScrollBottomListener = new OnScrollBottomListener() { @Override public void onScorllBootom() { if (isAutoLoadMoreEnable && isLoadMoreEnable && !isLoadingMore()) { // can check network here loadMore(); } } }; private OnClickListener onClickLoadMoreListener = new OnClickListener() { @Override public void onClick(View v) { if (isLoadMoreEnable && !isLoadingMore()) { loadMore(); } } }; void loadMore() { isLoadingMore = true; mLoadMoreView.showLoading(); mOnLoadMoreListener.loadMore(); } public void loadMoreComplete(boolean hasMore) { isLoadingMore = false; isLoadMoreEnable = hasMore; if (hasMore) { mLoadMoreView.showNormal(); } else { setNoMoreData(); } } public void setNoMoreData() { mLoadMoreView.showNomore(); } public boolean isLoadingMore() { return isLoadingMore; } OnLoadMoreListener mOnLoadMoreListener; public void setOnLoadMoreListener(OnLoadMoreListener loadMoreListener) { this.mOnLoadMoreListener = loadMoreListener; } }
cptr/src/com/chanven/lib/cptr/PtrFrameLayout.java
package com.chanven.lib.cptr; import android.content.Context; import android.content.res.TypedArray; import android.support.v7.widget.RecyclerView; import android.util.AttributeSet; import android.view.Gravity; import android.view.MotionEvent; import android.view.View; import android.view.ViewConfiguration; import android.view.ViewGroup; import android.widget.AbsListView; import android.widget.GridView; import android.widget.Scroller; import android.widget.TextView; import com.chanven.lib.cptr.indicator.PtrIndicator; import com.chanven.lib.cptr.loadmore.DefaultLoadMoreViewFooter; import com.chanven.lib.cptr.loadmore.GridViewHandler; import com.chanven.lib.cptr.loadmore.ILoadMoreViewFactory; import com.chanven.lib.cptr.loadmore.ILoadMoreViewFactory.ILoadMoreView; import com.chanven.lib.cptr.loadmore.ListViewHandler; import com.chanven.lib.cptr.loadmore.LoadMoreHandler; import com.chanven.lib.cptr.loadmore.OnLoadMoreListener; import com.chanven.lib.cptr.loadmore.OnScrollBottomListener; import com.chanven.lib.cptr.loadmore.RecyclerViewHandler; import com.chanven.lib.cptr.utils.PtrCLog; /** * This layout view for "Pull to Refresh(Ptr)" support all of the view, you can contain everything you want. * support: pull to refresh / release to refresh / auto refresh / keep header view while refreshing / hide header view while refreshing * It defines {@link PtrUIHandler}, which allows you customize the UI easily. */ public class PtrFrameLayout extends ViewGroup { // status enum public final static byte PTR_STATUS_INIT = 1; public final static byte PTR_STATUS_PREPARE = 2; public final static byte PTR_STATUS_LOADING = 3; public final static byte PTR_STATUS_COMPLETE = 4; private static final boolean DEBUG_LAYOUT = true; public static boolean DEBUG = false; private static int ID = 1; // auto refresh status private static byte FLAG_AUTO_REFRESH_AT_ONCE = 0x01; private static byte FLAG_AUTO_REFRESH_BUT_LATER = 0x01 << 1; private static byte FLAG_ENABLE_NEXT_PTR_AT_ONCE = 0x01 << 2; private static byte FLAG_PIN_CONTENT = 0x01 << 3; private static byte MASK_AUTO_REFRESH = 0x03; protected final String LOG_TAG = "ptr-frame-" + ++ID; protected View mContent; // optional config for define header and content in xml file private int mHeaderId = 0; private int mContainerId = 0; // config private int mDurationToClose = 200; private int mDurationToCloseHeader = 1000; private boolean mKeepHeaderWhenRefresh = true; private boolean mPullToRefresh = false; private View mHeaderView; private PtrUIHandlerHolder mPtrUIHandlerHolder = PtrUIHandlerHolder.create(); private PtrHandler mPtrHandler; // working parameters private ScrollChecker mScrollChecker; private int mPagingTouchSlop; private int mHeaderHeight; private byte mStatus = PTR_STATUS_INIT; private boolean mDisableWhenHorizontalMove = false; private int mFlag = 0x00; // disable when detect moving horizontally private boolean mPreventForHorizontal = false; private MotionEvent mLastMoveEvent; private PtrUIHandlerHook mRefreshCompleteHook; private int mLoadingMinTime = 500; private long mLoadingStartTime = 0; private PtrIndicator mPtrIndicator; private boolean mHasSendCancelEvent = false; public PtrFrameLayout(Context context) { this(context, null); } public PtrFrameLayout(Context context, AttributeSet attrs) { this(context, attrs, 0); } public PtrFrameLayout(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); mPtrIndicator = new PtrIndicator(); TypedArray arr = context.obtainStyledAttributes(attrs, com.chanven.lib.cptr.R.styleable.PtrFrameLayout, 0, 0); if (arr != null) { mHeaderId = arr.getResourceId(com.chanven.lib.cptr.R.styleable.PtrFrameLayout_ptr_header, mHeaderId); mContainerId = arr.getResourceId(com.chanven.lib.cptr.R.styleable.PtrFrameLayout_ptr_content, mContainerId); mPtrIndicator.setResistance( arr.getFloat(com.chanven.lib.cptr.R.styleable.PtrFrameLayout_ptr_resistance, mPtrIndicator.getResistance())); mDurationToClose = arr.getInt(com.chanven.lib.cptr.R.styleable.PtrFrameLayout_ptr_duration_to_close, mDurationToClose); mDurationToCloseHeader = arr.getInt(com.chanven.lib.cptr.R.styleable.PtrFrameLayout_ptr_duration_to_close_header, mDurationToCloseHeader); float ratio = mPtrIndicator.getRatioOfHeaderToHeightRefresh(); ratio = arr.getFloat(com.chanven.lib.cptr.R.styleable.PtrFrameLayout_ptr_ratio_of_header_height_to_refresh, ratio); mPtrIndicator.setRatioOfHeaderHeightToRefresh(ratio); mKeepHeaderWhenRefresh = arr.getBoolean(com.chanven.lib.cptr.R.styleable.PtrFrameLayout_ptr_keep_header_when_refresh, mKeepHeaderWhenRefresh); mPullToRefresh = arr.getBoolean(com.chanven.lib.cptr.R.styleable.PtrFrameLayout_ptr_pull_to_fresh, mPullToRefresh); arr.recycle(); } mScrollChecker = new ScrollChecker(); final ViewConfiguration conf = ViewConfiguration.get(getContext()); mPagingTouchSlop = conf.getScaledTouchSlop() * 2; } @Override protected void onFinishInflate() { final int childCount = getChildCount(); if (childCount > 2) { throw new IllegalStateException("PtrFrameLayout only can host 2 elements"); } else if (childCount == 2) { if (mHeaderId != 0 && mHeaderView == null) { mHeaderView = findViewById(mHeaderId); } if (mContainerId != 0 && mContent == null) { mContent = findViewById(mContainerId); } // not specify header or content if (mContent == null || mHeaderView == null) { View child1 = getChildAt(0); View child2 = getChildAt(1); if (child1 instanceof PtrUIHandler) { mHeaderView = child1; mContent = child2; } else if (child2 instanceof PtrUIHandler) { mHeaderView = child2; mContent = child1; } else { // both are not specified if (mContent == null && mHeaderView == null) { mHeaderView = child1; mContent = child2; } // only one is specified else { if (mHeaderView == null) { mHeaderView = mContent == child1 ? child2 : child1; } else { mContent = mHeaderView == child1 ? child2 : child1; } } } } } else if (childCount == 1) { mContent = getChildAt(0); } else { TextView errorView = new TextView(getContext()); errorView.setClickable(true); errorView.setTextColor(0xffff6600); errorView.setGravity(Gravity.CENTER); errorView.setTextSize(20); errorView.setText("The content view in PtrFrameLayout is empty. Do you forget to specify its id in xml layout file?"); mContent = errorView; addView(mContent); } if (mHeaderView != null) { mHeaderView.bringToFront(); } super.onFinishInflate(); } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { super.onMeasure(widthMeasureSpec, heightMeasureSpec); if (DEBUG && DEBUG_LAYOUT) { PtrCLog.d(LOG_TAG, "onMeasure frame: width: %s, height: %s, padding: %s %s %s %s", getMeasuredHeight(), getMeasuredWidth(), getPaddingLeft(), getPaddingRight(), getPaddingTop(), getPaddingBottom()); } if (mHeaderView != null) { measureChildWithMargins(mHeaderView, widthMeasureSpec, 0, heightMeasureSpec, 0); MarginLayoutParams lp = (MarginLayoutParams) mHeaderView.getLayoutParams(); mHeaderHeight = mHeaderView.getMeasuredHeight() + lp.topMargin + lp.bottomMargin; mPtrIndicator.setHeaderHeight(mHeaderHeight); } if (mContent != null) { measureContentView(mContent, widthMeasureSpec, heightMeasureSpec); if (DEBUG && DEBUG_LAYOUT) { ViewGroup.MarginLayoutParams lp = (MarginLayoutParams) mContent.getLayoutParams(); PtrCLog.d(LOG_TAG, "onMeasure content, width: %s, height: %s, margin: %s %s %s %s", getMeasuredWidth(), getMeasuredHeight(), lp.leftMargin, lp.topMargin, lp.rightMargin, lp.bottomMargin); PtrCLog.d(LOG_TAG, "onMeasure, currentPos: %s, lastPos: %s, top: %s", mPtrIndicator.getCurrentPosY(), mPtrIndicator.getLastPosY(), mContent.getTop()); } } } private void measureContentView(View child, int parentWidthMeasureSpec, int parentHeightMeasureSpec) { final MarginLayoutParams lp = (MarginLayoutParams) child.getLayoutParams(); final int childWidthMeasureSpec = getChildMeasureSpec(parentWidthMeasureSpec, getPaddingLeft() + getPaddingRight() + lp.leftMargin + lp.rightMargin, lp.width); final int childHeightMeasureSpec = getChildMeasureSpec(parentHeightMeasureSpec, getPaddingTop() + getPaddingBottom() + lp.topMargin, lp.height); child.measure(childWidthMeasureSpec, childHeightMeasureSpec); } @Override protected void onLayout(boolean flag, int i, int j, int k, int l) { layoutChildren(); } private void layoutChildren() { int offsetX = mPtrIndicator.getCurrentPosY(); int paddingLeft = getPaddingLeft(); int paddingTop = getPaddingTop(); if (mHeaderView != null) { MarginLayoutParams lp = (MarginLayoutParams) mHeaderView.getLayoutParams(); final int left = paddingLeft + lp.leftMargin; final int top = paddingTop + lp.topMargin + offsetX - mHeaderHeight; final int right = left + mHeaderView.getMeasuredWidth(); final int bottom = top + mHeaderView.getMeasuredHeight(); mHeaderView.layout(left, top, right, bottom); if (DEBUG && DEBUG_LAYOUT) { PtrCLog.d(LOG_TAG, "onLayout header: %s %s %s %s", left, top, right, bottom); } } if (mContent != null) { if (isPinContent()) { offsetX = 0; } MarginLayoutParams lp = (MarginLayoutParams) mContent.getLayoutParams(); final int left = paddingLeft + lp.leftMargin; final int top = paddingTop + lp.topMargin + offsetX; final int right = left + mContent.getMeasuredWidth(); final int bottom = top + mContent.getMeasuredHeight(); if (DEBUG && DEBUG_LAYOUT) { PtrCLog.d(LOG_TAG, "onLayout content: %s %s %s %s", left, top, right, bottom); } mContent.layout(left, top, right, bottom); } } public boolean dispatchTouchEventSupper(MotionEvent e) { return super.dispatchTouchEvent(e); } @Override public boolean dispatchTouchEvent(MotionEvent e) { if (!isEnabled() || mContent == null || mHeaderView == null) { return dispatchTouchEventSupper(e); } int action = e.getAction(); switch (action) { case MotionEvent.ACTION_UP: case MotionEvent.ACTION_CANCEL: mPtrIndicator.onRelease(); if (mPtrIndicator.hasLeftStartPosition()) { if (DEBUG) { PtrCLog.d(LOG_TAG, "call onRelease when user release"); } onRelease(false); if (mPtrIndicator.hasMovedAfterPressedDown()) { sendCancelEvent(); return true; } return dispatchTouchEventSupper(e); } else { return dispatchTouchEventSupper(e); } case MotionEvent.ACTION_DOWN: mHasSendCancelEvent = false; mPtrIndicator.onPressDown(e.getX(), e.getY()); mScrollChecker.abortIfWorking(); mPreventForHorizontal = false; // The cancel event will be sent once the position is moved. // So let the event pass to children. // fix #93, #102 dispatchTouchEventSupper(e); return true; case MotionEvent.ACTION_MOVE: mLastMoveEvent = e; mPtrIndicator.onMove(e.getX(), e.getY()); float offsetX = mPtrIndicator.getOffsetX(); float offsetY = mPtrIndicator.getOffsetY(); if (mDisableWhenHorizontalMove && !mPreventForHorizontal && (Math.abs(offsetX) > mPagingTouchSlop && Math.abs(offsetX) > Math.abs(offsetY))) { if (mPtrIndicator.isInStartPosition()) { mPreventForHorizontal = true; } } if (mPreventForHorizontal) { return dispatchTouchEventSupper(e); } boolean moveDown = offsetY > 0; boolean moveUp = !moveDown; boolean canMoveUp = mPtrIndicator.hasLeftStartPosition(); if (DEBUG) { boolean canMoveDown = mPtrHandler != null && mPtrHandler.checkCanDoRefresh(this, mContent, mHeaderView); PtrCLog.v(LOG_TAG, "ACTION_MOVE: offsetY:%s, currentPos: %s, moveUp: %s, canMoveUp: %s, moveDown: %s: canMoveDown: %s", offsetY, mPtrIndicator.getCurrentPosY(), moveUp, canMoveUp, moveDown, canMoveDown); } // disable move when header not reach top if (moveDown && mPtrHandler != null && !mPtrHandler.checkCanDoRefresh(this, mContent, mHeaderView)) { return dispatchTouchEventSupper(e); } if ((moveUp && canMoveUp) || moveDown) { movePos(offsetY); return true; } } return dispatchTouchEventSupper(e); } /** * if deltaY > 0, move the content down * * @param deltaY */ private void movePos(float deltaY) { // has reached the top if ((deltaY < 0 && mPtrIndicator.isInStartPosition())) { if (DEBUG) { PtrCLog.e(LOG_TAG, String.format("has reached the top")); } return; } int to = mPtrIndicator.getCurrentPosY() + (int) deltaY; // over top if (mPtrIndicator.willOverTop(to)) { if (DEBUG) { PtrCLog.e(LOG_TAG, String.format("over top")); } to = PtrIndicator.POS_START; } mPtrIndicator.setCurrentPos(to); int change = to - mPtrIndicator.getLastPosY(); updatePos(change); } private void updatePos(int change) { if (change == 0) { return; } boolean isUnderTouch = mPtrIndicator.isUnderTouch(); // once moved, cancel event will be sent to child if (isUnderTouch && !mHasSendCancelEvent && mPtrIndicator.hasMovedAfterPressedDown()) { mHasSendCancelEvent = true; sendCancelEvent(); } // leave initiated position or just refresh complete if ((mPtrIndicator.hasJustLeftStartPosition() && mStatus == PTR_STATUS_INIT) || (mPtrIndicator.goDownCrossFinishPosition() && mStatus == PTR_STATUS_COMPLETE && isEnabledNextPtrAtOnce())) { mStatus = PTR_STATUS_PREPARE; mPtrUIHandlerHolder.onUIRefreshPrepare(this); if (DEBUG) { PtrCLog.i(LOG_TAG, "PtrUIHandler: onUIRefreshPrepare, mFlag %s", mFlag); } } // back to initiated position if (mPtrIndicator.hasJustBackToStartPosition()) { tryToNotifyReset(); // recover event to children if (isUnderTouch) { sendDownEvent(); } } // Pull to Refresh if (mStatus == PTR_STATUS_PREPARE) { // reach fresh height while moving from top to bottom if (isUnderTouch && !isAutoRefresh() && mPullToRefresh && mPtrIndicator.crossRefreshLineFromTopToBottom()) { tryToPerformRefresh(); } // reach header height while auto refresh if (performAutoRefreshButLater() && mPtrIndicator.hasJustReachedHeaderHeightFromTopToBottom()) { tryToPerformRefresh(); } } if (DEBUG) { PtrCLog.v(LOG_TAG, "updatePos: change: %s, current: %s last: %s, top: %s, headerHeight: %s", change, mPtrIndicator.getCurrentPosY(), mPtrIndicator.getLastPosY(), mContent.getTop(), mHeaderHeight); } mHeaderView.offsetTopAndBottom(change); if (!isPinContent()) { mContent.offsetTopAndBottom(change); } invalidate(); if (mPtrUIHandlerHolder.hasHandler()) { mPtrUIHandlerHolder.onUIPositionChange(this, isUnderTouch, mStatus, mPtrIndicator); } onPositionChange(isUnderTouch, mStatus, mPtrIndicator); } protected void onPositionChange(boolean isInTouching, byte status, PtrIndicator mPtrIndicator) { } @SuppressWarnings("unused") public int getHeaderHeight() { return mHeaderHeight; } private void onRelease(boolean stayForLoading) { tryToPerformRefresh(); if (mStatus == PTR_STATUS_LOADING) { // keep header for fresh if (mKeepHeaderWhenRefresh) { // scroll header back if (mPtrIndicator.isOverOffsetToKeepHeaderWhileLoading() && !stayForLoading) { mScrollChecker.tryToScrollTo(mPtrIndicator.getOffsetToKeepHeaderWhileLoading(), mDurationToClose); } else { // do nothing } } else { tryScrollBackToTopWhileLoading(); } } else { if (mStatus == PTR_STATUS_COMPLETE) { notifyUIRefreshComplete(false); } else { tryScrollBackToTopAbortRefresh(); } } } /** * please DO REMEMBER resume the hook * * @param hook */ public void setRefreshCompleteHook(PtrUIHandlerHook hook) { mRefreshCompleteHook = hook; hook.setResumeAction(new Runnable() { @Override public void run() { if (DEBUG) { PtrCLog.d(LOG_TAG, "mRefreshCompleteHook resume."); } notifyUIRefreshComplete(true); } }); } /** * Scroll back to to if is not under touch */ private void tryScrollBackToTop() { if (!mPtrIndicator.isUnderTouch()) { mScrollChecker.tryToScrollTo(PtrIndicator.POS_START, mDurationToCloseHeader); } } /** * just make easier to understand */ private void tryScrollBackToTopWhileLoading() { tryScrollBackToTop(); } /** * just make easier to understand */ private void tryScrollBackToTopAfterComplete() { tryScrollBackToTop(); } /** * just make easier to understand */ private void tryScrollBackToTopAbortRefresh() { tryScrollBackToTop(); } private boolean tryToPerformRefresh() { if (mStatus != PTR_STATUS_PREPARE) { return false; } // if ((mPtrIndicator.isOverOffsetToKeepHeaderWhileLoading() && isAutoRefresh()) || mPtrIndicator.isOverOffsetToRefresh()) { mStatus = PTR_STATUS_LOADING; performRefresh(); } return false; } private void performRefresh() { mLoadingStartTime = System.currentTimeMillis(); if (mPtrUIHandlerHolder.hasHandler()) { mPtrUIHandlerHolder.onUIRefreshBegin(this); if (DEBUG) { PtrCLog.i(LOG_TAG, "PtrUIHandler: onUIRefreshBegin"); } } if (mPtrHandler != null) { mPtrHandler.onRefreshBegin(this); } } /** * If at the top and not in loading, reset */ private boolean tryToNotifyReset() { if ((mStatus == PTR_STATUS_COMPLETE || mStatus == PTR_STATUS_PREPARE) && mPtrIndicator.isInStartPosition()) { if (mPtrUIHandlerHolder.hasHandler()) { mPtrUIHandlerHolder.onUIReset(this); if (DEBUG) { PtrCLog.i(LOG_TAG, "PtrUIHandler: onUIReset"); } } mStatus = PTR_STATUS_INIT; clearFlag(); return true; } return false; } protected void onPtrScrollAbort() { if (mPtrIndicator.hasLeftStartPosition() && isAutoRefresh()) { if (DEBUG) { PtrCLog.d(LOG_TAG, "call onRelease after scroll abort"); } onRelease(true); } } protected void onPtrScrollFinish() { if (mPtrIndicator.hasLeftStartPosition() && isAutoRefresh()) { if (DEBUG) { PtrCLog.d(LOG_TAG, "call onRelease after scroll finish"); } onRelease(true); } } /** * Detect whether is refreshing. * * @return */ public boolean isRefreshing() { return mStatus == PTR_STATUS_LOADING; } /** * Call this when data is loaded. * The UI will perform complete at once or after a delay, depends on the time elapsed is greater then {@link #mLoadingMinTime} or not. */ final public void refreshComplete() { if (DEBUG) { PtrCLog.i(LOG_TAG, "refreshComplete"); } if (mRefreshCompleteHook != null) { mRefreshCompleteHook.reset(); } int delay = (int) (mLoadingMinTime - (System.currentTimeMillis() - mLoadingStartTime)); if (delay <= 0) { if (DEBUG) { PtrCLog.d(LOG_TAG, "performRefreshComplete at once"); } performRefreshComplete(); } else { postDelayed(new Runnable() { @Override public void run() { performRefreshComplete(); } }, delay); if (DEBUG) { PtrCLog.d(LOG_TAG, "performRefreshComplete after delay: %s", delay); } } } /** * Do refresh complete work when time elapsed is greater than {@link #mLoadingMinTime} */ private void performRefreshComplete() { mStatus = PTR_STATUS_COMPLETE; // if is auto refresh do nothing, wait scroller stop if (mScrollChecker.mIsRunning && isAutoRefresh()) { // do nothing if (DEBUG) { PtrCLog.d(LOG_TAG, "performRefreshComplete do nothing, scrolling: %s, auto refresh: %s", mScrollChecker.mIsRunning, mFlag); } return; } notifyUIRefreshComplete(false); } /** * Do real refresh work. If there is a hook, execute the hook first. * * @param ignoreHook */ private void notifyUIRefreshComplete(boolean ignoreHook) { /** * After hook operation is done, {@link #notifyUIRefreshComplete} will be call in resume action to ignore hook. */ if (mPtrIndicator.hasLeftStartPosition() && !ignoreHook && mRefreshCompleteHook != null) { if (DEBUG) { PtrCLog.d(LOG_TAG, "notifyUIRefreshComplete mRefreshCompleteHook run."); } mRefreshCompleteHook.takeOver(); return; } if (mPtrUIHandlerHolder.hasHandler()) { if (DEBUG) { PtrCLog.i(LOG_TAG, "PtrUIHandler: onUIRefreshComplete"); } mPtrUIHandlerHolder.onUIRefreshComplete(this); } mPtrIndicator.onUIRefreshComplete(); tryScrollBackToTopAfterComplete(); tryToNotifyReset(); } public void autoRefresh() { autoRefresh(true, mDurationToCloseHeader); } public void autoRefresh(boolean atOnce) { autoRefresh(atOnce, mDurationToCloseHeader); } private void clearFlag() { // remove auto fresh flag mFlag = mFlag & ~MASK_AUTO_REFRESH; } public void autoRefresh(boolean atOnce, int duration) { if (mStatus != PTR_STATUS_INIT) { return; } mFlag |= atOnce ? FLAG_AUTO_REFRESH_AT_ONCE : FLAG_AUTO_REFRESH_BUT_LATER; mStatus = PTR_STATUS_PREPARE; if (mPtrUIHandlerHolder.hasHandler()) { mPtrUIHandlerHolder.onUIRefreshPrepare(this); if (DEBUG) { PtrCLog.i(LOG_TAG, "PtrUIHandler: onUIRefreshPrepare, mFlag %s", mFlag); } } mScrollChecker.tryToScrollTo(mPtrIndicator.getOffsetToRefresh(), duration); if (atOnce) { mStatus = PTR_STATUS_LOADING; performRefresh(); } } public boolean isAutoRefresh() { return (mFlag & MASK_AUTO_REFRESH) > 0; } private boolean performAutoRefreshButLater() { return (mFlag & MASK_AUTO_REFRESH) == FLAG_AUTO_REFRESH_BUT_LATER; } /** * If @param enable has been set to true. The user can perform next PTR at once. * * @param enable */ public void setEnabledNextPtrAtOnce(boolean enable) { if (enable) { mFlag = mFlag | FLAG_ENABLE_NEXT_PTR_AT_ONCE; } else { mFlag = mFlag & ~FLAG_ENABLE_NEXT_PTR_AT_ONCE; } } public boolean isEnabledNextPtrAtOnce() { return (mFlag & FLAG_ENABLE_NEXT_PTR_AT_ONCE) > 0; } /** * The content view will now move when {@param pinContent} set to true. * * @param pinContent */ public void setPinContent(boolean pinContent) { if (pinContent) { mFlag = mFlag | FLAG_PIN_CONTENT; } else { mFlag = mFlag & ~FLAG_PIN_CONTENT; } } public boolean isPinContent() { return (mFlag & FLAG_PIN_CONTENT) > 0; } /** * It's useful when working with viewpager. * * @param disable */ public void disableWhenHorizontalMove(boolean disable) { mDisableWhenHorizontalMove = disable; } /** * loading will last at least for so long * * @param time */ public void setLoadingMinTime(int time) { mLoadingMinTime = time; } /** * Not necessary any longer. Once moved, cancel event will be sent to child. * * @param yes */ @Deprecated public void setInterceptEventWhileWorking(boolean yes) { } @SuppressWarnings({"unused"}) public View getContentView() { return mContent; } public void setPtrHandler(PtrHandler ptrHandler) { mPtrHandler = ptrHandler; } public void addPtrUIHandler(PtrUIHandler ptrUIHandler) { PtrUIHandlerHolder.addHandler(mPtrUIHandlerHolder, ptrUIHandler); } @SuppressWarnings({"unused"}) public void removePtrUIHandler(PtrUIHandler ptrUIHandler) { mPtrUIHandlerHolder = PtrUIHandlerHolder.removeHandler(mPtrUIHandlerHolder, ptrUIHandler); } public void setPtrIndicator(PtrIndicator slider) { if (mPtrIndicator != null && mPtrIndicator != slider) { slider.convertFrom(mPtrIndicator); } mPtrIndicator = slider; } @SuppressWarnings({"unused"}) public float getResistance() { return mPtrIndicator.getResistance(); } public void setResistance(float resistance) { mPtrIndicator.setResistance(resistance); } @SuppressWarnings({"unused"}) public float getDurationToClose() { return mDurationToClose; } /** * The duration to return back to the refresh position * * @param duration */ public void setDurationToClose(int duration) { mDurationToClose = duration; } @SuppressWarnings({"unused"}) public long getDurationToCloseHeader() { return mDurationToCloseHeader; } /** * The duration to close time * * @param duration */ public void setDurationToCloseHeader(int duration) { mDurationToCloseHeader = duration; } public void setRatioOfHeaderHeightToRefresh(float ratio) { mPtrIndicator.setRatioOfHeaderHeightToRefresh(ratio); } public int getOffsetToRefresh() { return mPtrIndicator.getOffsetToRefresh(); } @SuppressWarnings({"unused"}) public void setOffsetToRefresh(int offset) { mPtrIndicator.setOffsetToRefresh(offset); } @SuppressWarnings({"unused"}) public float getRatioOfHeaderToHeightRefresh() { return mPtrIndicator.getRatioOfHeaderToHeightRefresh(); } @SuppressWarnings({"unused"}) public void setOffsetToKeepHeaderWhileLoading(int offset) { mPtrIndicator.setOffsetToKeepHeaderWhileLoading(offset); } @SuppressWarnings({"unused"}) public int getOffsetToKeepHeaderWhileLoading() { return mPtrIndicator.getOffsetToKeepHeaderWhileLoading(); } @SuppressWarnings({"unused"}) public boolean isKeepHeaderWhenRefresh() { return mKeepHeaderWhenRefresh; } public void setKeepHeaderWhenRefresh(boolean keepOrNot) { mKeepHeaderWhenRefresh = keepOrNot; } public boolean isPullToRefresh() { return mPullToRefresh; } public void setPullToRefresh(boolean pullToRefresh) { mPullToRefresh = pullToRefresh; } @SuppressWarnings({"unused"}) public View getHeaderView() { return mHeaderView; } public void setHeaderView(View header) { if (mHeaderView != null && header != null && mHeaderView != header) { removeView(mHeaderView); } ViewGroup.LayoutParams lp = header.getLayoutParams(); if (lp == null) { lp = new LayoutParams(-1, -2); header.setLayoutParams(lp); } mHeaderView = header; addView(header); } @Override protected boolean checkLayoutParams(ViewGroup.LayoutParams p) { return p != null && p instanceof LayoutParams; } @Override protected ViewGroup.LayoutParams generateDefaultLayoutParams() { return new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT); } @Override protected ViewGroup.LayoutParams generateLayoutParams(ViewGroup.LayoutParams p) { return new LayoutParams(p); } @Override public ViewGroup.LayoutParams generateLayoutParams(AttributeSet attrs) { return new LayoutParams(getContext(), attrs); } private void sendCancelEvent() { if (DEBUG) { PtrCLog.d(LOG_TAG, "send cancel event"); } // The ScrollChecker will update position and lead to send cancel event when mLastMoveEvent is null. // fix #104, #80, #92 if (mLastMoveEvent == null) { return; } MotionEvent last = mLastMoveEvent; MotionEvent e = MotionEvent.obtain(last.getDownTime(), last.getEventTime() + ViewConfiguration.getLongPressTimeout(), MotionEvent.ACTION_CANCEL, last.getX(), last.getY(), last.getMetaState()); dispatchTouchEventSupper(e); } private void sendDownEvent() { if (DEBUG) { PtrCLog.d(LOG_TAG, "send down event"); } final MotionEvent last = mLastMoveEvent; MotionEvent e = MotionEvent.obtain(last.getDownTime(), last.getEventTime(), MotionEvent.ACTION_DOWN, last.getX(), last.getY(), last.getMetaState()); dispatchTouchEventSupper(e); } public static class LayoutParams extends MarginLayoutParams { public LayoutParams(Context c, AttributeSet attrs) { super(c, attrs); } public LayoutParams(int width, int height) { super(width, height); } @SuppressWarnings({"unused"}) public LayoutParams(MarginLayoutParams source) { super(source); } public LayoutParams(ViewGroup.LayoutParams source) { super(source); } } class ScrollChecker implements Runnable { private int mLastFlingY; private Scroller mScroller; private boolean mIsRunning = false; private int mStart; private int mTo; public ScrollChecker() { mScroller = new Scroller(getContext()); } public void run() { boolean finish = !mScroller.computeScrollOffset() || mScroller.isFinished(); int curY = mScroller.getCurrY(); int deltaY = curY - mLastFlingY; if (DEBUG) { if (deltaY != 0) { PtrCLog.v(LOG_TAG, "scroll: %s, start: %s, to: %s, currentPos: %s, current :%s, last: %s, delta: %s", finish, mStart, mTo, mPtrIndicator.getCurrentPosY(), curY, mLastFlingY, deltaY); } } if (!finish) { mLastFlingY = curY; movePos(deltaY); post(this); } else { finish(); } } private void finish() { if (DEBUG) { PtrCLog.v(LOG_TAG, "finish, currentPos:%s", mPtrIndicator.getCurrentPosY()); } reset(); onPtrScrollFinish(); } private void reset() { mIsRunning = false; mLastFlingY = 0; removeCallbacks(this); } public void abortIfWorking() { if (mIsRunning) { if (!mScroller.isFinished()) { mScroller.forceFinished(true); } onPtrScrollAbort(); reset(); } } public void tryToScrollTo(int to, int duration) { if (mPtrIndicator.isAlreadyHere(to)) { return; } mStart = mPtrIndicator.getCurrentPosY(); mTo = to; int distance = to - mStart; if (DEBUG) { PtrCLog.d(LOG_TAG, "tryToScrollTo: start: %s, distance:%s, to:%s", mStart, distance, to); } removeCallbacks(this); mLastFlingY = 0; // fix #47: Scroller should be reused, https://github.com/liaohuqiu/android-Ultra-Pull-To-Refresh/issues/47 if (!mScroller.isFinished()) { mScroller.forceFinished(true); } mScroller.startScroll(0, 0, 0, distance, duration); post(this); mIsRunning = true; } } private boolean isLoadingMore = false; private boolean isAutoLoadMoreEnable = true; private boolean isLoadMoreEnable = false; private boolean hasInitLoadMoreView = false; private ILoadMoreViewFactory loadMoreViewFactory; private ILoadMoreView mLoadMoreView; private LoadMoreHandler mLoadMoreHandler; private View mContentView; public void setAutoLoadMoreEnable(boolean isAutoLoadMoreEnable) { this.isAutoLoadMoreEnable = isAutoLoadMoreEnable; } public void setFooterView(ILoadMoreViewFactory factory) { if (null == factory || (null != loadMoreViewFactory && loadMoreViewFactory == factory)) { return; } loadMoreViewFactory = factory; if (hasInitLoadMoreView) { mLoadMoreHandler.removeFooter(); mLoadMoreView = loadMoreViewFactory.madeLoadMoreView(); hasInitLoadMoreView = mLoadMoreHandler.handleSetAdapter(mContentView, mLoadMoreView, onClickLoadMoreListener); if (!isLoadMoreEnable) { mLoadMoreHandler.removeFooter(); } } } public void setLoadMoreEnable(boolean loadMoreEnable) { if (this.isLoadMoreEnable == loadMoreEnable) { return; } this.isLoadMoreEnable = loadMoreEnable; if (!hasInitLoadMoreView && isLoadMoreEnable) { mContentView = getContentView(); if (null == loadMoreViewFactory) { loadMoreViewFactory = new DefaultLoadMoreViewFooter(); } mLoadMoreView = loadMoreViewFactory.madeLoadMoreView(); if (null == mLoadMoreHandler) { if (mContentView instanceof GridView) { mLoadMoreHandler = new GridViewHandler(); } else if (mContentView instanceof AbsListView) { mLoadMoreHandler = new ListViewHandler(); } else if (mContentView instanceof RecyclerView) { mLoadMoreHandler = new RecyclerViewHandler(); } } if (null == mLoadMoreHandler) { throw new IllegalStateException("unSupported contentView !"); } hasInitLoadMoreView = mLoadMoreHandler.handleSetAdapter(mContentView, mLoadMoreView, onClickLoadMoreListener); mLoadMoreHandler.setOnScrollBottomListener(mContentView, onScrollBottomListener); return; } if (hasInitLoadMoreView) { if (isLoadMoreEnable) { mLoadMoreHandler.addFooter(); } else { mLoadMoreHandler.removeFooter(); } } } public boolean isLoadMoreEnable() { return isLoadMoreEnable; } private OnScrollBottomListener onScrollBottomListener = new OnScrollBottomListener() { @Override public void onScorllBootom() { if (isAutoLoadMoreEnable && isLoadMoreEnable && !isLoadingMore()) { // can check network here loadMore(); } } }; private OnClickListener onClickLoadMoreListener = new OnClickListener() { @Override public void onClick(View v) { if (isLoadMoreEnable && !isLoadingMore()) { loadMore(); } } }; void loadMore() { isLoadingMore = true; mLoadMoreView.showLoading(); mOnLoadMoreListener.loadMore(); } public void loadMoreComplete(boolean hasMore) { isLoadingMore = false; isLoadMoreEnable = hasMore; if (hasMore) { mLoadMoreView.showNormal(); } else { mLoadMoreView.showNomore(); } } public boolean isLoadingMore() { return isLoadingMore; } OnLoadMoreListener mOnLoadMoreListener; public void setOnLoadMoreListener(OnLoadMoreListener loadMoreListener) { this.mOnLoadMoreListener = loadMoreListener; } }
fix #44
cptr/src/com/chanven/lib/cptr/PtrFrameLayout.java
fix #44
Java
apache-2.0
f40ef905b1db1aadabbd4496331755fb984d88c5
0
jssenyange/traccar,orcoliver/traccar,jssenyange/traccar,orcoliver/traccar,jssenyange/traccar,orcoliver/traccar
/* * Copyright 2015 - 2020 Anton Tananaev ([email protected]) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.traccar.helper; import java.util.ArrayList; import java.util.regex.Pattern; public class PatternBuilder { private final ArrayList<String> fragments = new ArrayList<>(); public PatternBuilder optional() { return optional(1); } public PatternBuilder optional(int count) { fragments.add(fragments.size() - count, "(?:"); fragments.add(")?"); return this; } public PatternBuilder expression(String s) { s = s.replaceAll("\\|$", "\\\\|"); // special case for delimiter fragments.add(s); return this; } public PatternBuilder text(String s) { fragments.add(s.replaceAll("([\\\\.\\[{()*+?^$|])", "\\\\$1")); return this; } public PatternBuilder number(String s) { s = s.replace("dddd", "d{4}").replace("ddd", "d{3}").replace("dd", "d{2}"); s = s.replace("xxxx", "x{4}").replace("xxx", "x{3}").replace("xx", "x{2}"); s = s.replace("d", "\\d").replace("x", "[0-9a-fA-F]").replaceAll("([.])", "\\\\$1"); s = s.replaceAll("\\|$", "\\\\|").replaceAll("^\\|", "\\\\|"); // special case for delimiter fragments.add(s); return this; } public PatternBuilder any() { fragments.add(".*"); return this; } public PatternBuilder binary(String s) { fragments.add(s.replaceAll("(\\p{XDigit}{2})", "\\\\$1")); return this; } public PatternBuilder or() { fragments.add("|"); return this; } public PatternBuilder groupBegin() { return expression("(?:"); } public PatternBuilder groupEnd() { return expression(")"); } public PatternBuilder groupEnd(String s) { return expression(")" + s); } public Pattern compile() { return Pattern.compile(toString(), Pattern.DOTALL); } @Override public String toString() { StringBuilder builder = new StringBuilder(); for (String fragment : fragments) { builder.append(fragment); } return builder.toString(); } }
src/main/java/org/traccar/helper/PatternBuilder.java
/* * Copyright 2015 Anton Tananaev ([email protected]) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.traccar.helper; import java.util.ArrayList; import java.util.regex.Pattern; public class PatternBuilder { private final ArrayList<String> fragments = new ArrayList<>(); public PatternBuilder optional() { return optional(1); } public PatternBuilder optional(int count) { fragments.add(fragments.size() - count, "(?:"); fragments.add(")?"); return this; } public PatternBuilder expression(String s) { s = s.replaceAll("\\|$", "\\\\|"); // special case for delimiter fragments.add(s); return this; } public PatternBuilder text(String s) { fragments.add(s.replaceAll("([\\\\\\.\\[\\{\\(\\)\\*\\+\\?\\^\\$\\|])", "\\\\$1")); return this; } public PatternBuilder number(String s) { s = s.replace("dddd", "d{4}").replace("ddd", "d{3}").replace("dd", "d{2}"); s = s.replace("xxxx", "x{4}").replace("xxx", "x{3}").replace("xx", "x{2}"); s = s.replace("d", "\\d").replace("x", "[0-9a-fA-F]").replaceAll("([\\.])", "\\\\$1"); s = s.replaceAll("\\|$", "\\\\|").replaceAll("^\\|", "\\\\|"); // special case for delimiter fragments.add(s); return this; } public PatternBuilder any() { fragments.add(".*"); return this; } public PatternBuilder binary(String s) { fragments.add(s.replaceAll("(\\p{XDigit}{2})", "\\\\$1")); return this; } public PatternBuilder or() { fragments.add("|"); return this; } public PatternBuilder groupBegin() { return expression("(?:"); } public PatternBuilder groupEnd() { return expression(")"); } public PatternBuilder groupEnd(String s) { return expression(")" + s); } public Pattern compile() { return Pattern.compile(toString(), Pattern.DOTALL); } @Override public String toString() { StringBuilder builder = new StringBuilder(); for (String fragment : fragments) { builder.append(fragment); } return builder.toString(); } }
Simplify regex
src/main/java/org/traccar/helper/PatternBuilder.java
Simplify regex
Java
bsd-3-clause
e5f8b302ec7acbe936690ae195a57edeea69d4ef
0
asamgir/openspecimen,krishagni/openspecimen,asamgir/openspecimen,krishagni/openspecimen,krishagni/openspecimen,asamgir/openspecimen
/** * <p>Title: StorageContainerHDAO Class> * <p>Description: StorageContainerHDAO is used to add Storage Container information into the database using Hibernate.</p> * Copyright: Copyright (c) year * Company: Washington University, School of Medicine, St. Louis. * @author Aniruddha Phadnis * @version 1.00 * Created on Jul 23, 2005 */ package edu.wustl.catissuecore.bizlogic; import java.io.Serializable; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.Vector; import org.hibernate.Session; import net.sf.ehcache.CacheException; import edu.wustl.catissuecore.domain.Capacity; import edu.wustl.catissuecore.domain.CollectionProtocol; import edu.wustl.catissuecore.domain.Container; import edu.wustl.catissuecore.domain.ContainerPosition; import edu.wustl.catissuecore.domain.Site; import edu.wustl.catissuecore.domain.Specimen; import edu.wustl.catissuecore.domain.SpecimenArray; import edu.wustl.catissuecore.domain.SpecimenArrayType; import edu.wustl.catissuecore.domain.SpecimenPosition; import edu.wustl.catissuecore.domain.StorageContainer; import edu.wustl.catissuecore.domain.StorageType; import edu.wustl.catissuecore.namegenerator.BarcodeGenerator; import edu.wustl.catissuecore.namegenerator.BarcodeGeneratorFactory; import edu.wustl.catissuecore.namegenerator.LabelGenerator; import edu.wustl.catissuecore.namegenerator.LabelGeneratorFactory; import edu.wustl.catissuecore.namegenerator.NameGeneratorException; import edu.wustl.catissuecore.util.ApiSearchUtil; import edu.wustl.catissuecore.util.CatissueCoreCacheManager; import edu.wustl.catissuecore.util.StorageContainerUtil; import edu.wustl.catissuecore.util.global.Constants; import edu.wustl.catissuecore.util.global.Utility; import edu.wustl.common.beans.NameValueBean; import edu.wustl.common.beans.SessionDataBean; import edu.wustl.common.bizlogic.DefaultBizLogic; import edu.wustl.common.dao.AbstractDAO; import edu.wustl.common.dao.DAO; import edu.wustl.common.dao.DAOFactory; import edu.wustl.common.dao.HibernateDAO; import edu.wustl.common.dao.JDBCDAO; import edu.wustl.common.domain.AbstractDomainObject; import edu.wustl.common.exception.BizLogicException; import edu.wustl.common.security.PrivilegeCache; import edu.wustl.common.security.PrivilegeManager; import edu.wustl.common.security.SecurityManager; import edu.wustl.common.security.exceptions.SMException; import edu.wustl.common.security.exceptions.UserNotAuthorizedException; import edu.wustl.common.tree.StorageContainerTreeNode; import edu.wustl.common.tree.TreeDataInterface; import edu.wustl.common.tree.TreeNode; import edu.wustl.common.tree.TreeNodeImpl; import edu.wustl.common.util.NameValueBeanRelevanceComparator; import edu.wustl.common.util.NameValueBeanValueComparator; import edu.wustl.common.util.XMLPropertyHandler; import edu.wustl.common.util.dbManager.DAOException; import edu.wustl.common.util.dbManager.DBUtil; import edu.wustl.common.util.dbManager.HibernateMetaData; import edu.wustl.common.util.global.ApplicationProperties; import edu.wustl.common.util.global.Validator; import edu.wustl.common.util.logger.Logger; /** * StorageContainerHDAO is used to add Storage Container information into the * database using Hibernate. * * @author vaishali_khandelwal */ public class StorageContainerBizLogic extends DefaultBizLogic implements TreeDataInterface { // Getting containersMaxLimit from the xml file in static variable private static final int containersMaxLimit = Integer .parseInt(XMLPropertyHandler .getValue(Constants.CONTAINERS_MAX_LIMIT)); /** * Saves the storageContainer object in the database. * * @param obj * The storageType object to be saved. * @param session * The session in which the object is saved. * @throws DAOException */ protected void insert(Object obj, DAO dao, SessionDataBean sessionDataBean) throws DAOException, UserNotAuthorizedException { StorageContainer container = (StorageContainer) obj; container.setActivityStatus(Constants.ACTIVITY_STATUS_ACTIVE); // Setting the Parent Container if applicable int posOneCapacity = 1, posTwoCapacity = 1; int positionDimensionOne = Constants.STORAGE_CONTAINER_FIRST_ROW, positionDimensionTwo = Constants.STORAGE_CONTAINER_FIRST_COLUMN; boolean fullStatus[][] = null; int noOfContainers = container.getNoOfContainers().intValue(); if (container.getLocatedAtPosition() != null && container.getLocatedAtPosition().getParentContainer() != null) { Object object = dao.retrieve(StorageContainer.class.getName(), container.getLocatedAtPosition().getParentContainer() .getId()); if (object != null) { StorageContainer parentContainer = (StorageContainer) object; // check for closed ParentContainer checkStatus(dao, parentContainer, "Parent Container"); int totalCapacity = parentContainer.getCapacity() .getOneDimensionCapacity().intValue() * parentContainer.getCapacity() .getTwoDimensionCapacity().intValue(); Collection children = StorageContainerUtil.getChildren(dao, parentContainer.getId()); if ((noOfContainers + children.size()) > totalCapacity) { throw new DAOException(ApplicationProperties .getValue("errors.storageContainer.overflow")); } else { // Check if position specified is within the parent // container's // capacity if (false == validatePosition(parentContainer, container)) { throw new DAOException( ApplicationProperties .getValue("errors.storageContainer.dimensionOverflow")); } try { // check for all validations on the storage container. checkContainer(dao, container.getLocatedAtPosition() .getParentContainer().getId().toString(), container.getLocatedAtPosition() .getPositionDimensionOne().toString(), container.getLocatedAtPosition() .getPositionDimensionTwo().toString(), sessionDataBean, false); } catch (SMException sme) { sme.printStackTrace(); throw handleSMException(sme); } // check for availability of position /* * boolean canUse = isContainerAvailableForPositions(dao, * container); * * if (!canUse) { throw new * DAOException(ApplicationProperties.getValue("errors.storageContainer.inUse")); } */ // Check weather parent container is valid container to use boolean parentContainerValidToUSe = isParentContainerValidToUSe( container, parentContainer); if (!parentContainerValidToUSe) { throw new DAOException( "Parent Container is not valid for this container type"); } ContainerPosition cntPos = container.getLocatedAtPosition(); cntPos.setParentContainer(parentContainer); container.setSite(parentContainer.getSite()); posOneCapacity = parentContainer.getCapacity() .getOneDimensionCapacity().intValue(); posTwoCapacity = parentContainer.getCapacity() .getTwoDimensionCapacity().intValue(); fullStatus = getStorageContainerFullStatus(dao, parentContainer, children); positionDimensionOne = cntPos.getPositionDimensionOne() .intValue(); positionDimensionTwo = cntPos.getPositionDimensionTwo() .intValue(); container.setLocatedAtPosition(cntPos); } } else { throw new DAOException(ApplicationProperties .getValue("errors.storageContainerExist")); } } else { loadSite(dao, container); } loadStorageType(dao, container); for (int i = 0; i < noOfContainers; i++) { StorageContainer cont = new StorageContainer(container); if (cont.getLocatedAtPosition() != null && cont.getLocatedAtPosition().getParentContainer() != null) { ContainerPosition cntPos = cont.getLocatedAtPosition(); cntPos .setPositionDimensionOne(new Integer( positionDimensionOne)); cntPos .setPositionDimensionTwo(new Integer( positionDimensionTwo)); cntPos.setOccupiedContainer(cont); cont.setLocatedAtPosition(cntPos); } Logger.out.debug("Collection protocol size:" + container.getCollectionProtocolCollection().size()); // by falguni // Call Storage container label generator if its specified to use // automatic label generator if (edu.wustl.catissuecore.util.global.Variables.isStorageContainerLabelGeneratorAvl) { LabelGenerator storagecontLblGenerator; try { storagecontLblGenerator = LabelGeneratorFactory .getInstance(Constants.STORAGECONTAINER_LABEL_GENERATOR_PROPERTY_NAME); storagecontLblGenerator.setLabel(cont); container.setName(cont.getName()); } catch (NameGeneratorException e) { throw new DAOException(e.getMessage()); } } if (edu.wustl.catissuecore.util.global.Variables.isStorageContainerBarcodeGeneratorAvl) { BarcodeGenerator storagecontBarcodeGenerator; try { storagecontBarcodeGenerator = BarcodeGeneratorFactory .getInstance(Constants.STORAGECONTAINER_BARCODE_GENERATOR_PROPERTY_NAME); // storagecontBarcodeGenerator.setBarcode(cont); } catch (NameGeneratorException e) { throw new DAOException(e.getMessage()); } } dao.insert(cont.getCapacity(), sessionDataBean, true, true); dao.insert(cont, sessionDataBean, true, true); // Used for showing the success message after insert and using it // for edit. container.setId(cont.getId()); container.setCapacity(cont.getCapacity()); if (container.getLocatedAtPosition() != null && container.getLocatedAtPosition().getParentContainer() != null) { Logger.out.debug("In if: "); do { if (positionDimensionTwo == posTwoCapacity) { if (positionDimensionOne == posOneCapacity) positionDimensionOne = Constants.STORAGE_CONTAINER_FIRST_ROW; else positionDimensionOne = (positionDimensionOne + 1) % (posOneCapacity + 1); positionDimensionTwo = Constants.STORAGE_CONTAINER_FIRST_COLUMN; } else { positionDimensionTwo = positionDimensionTwo + 1; } Logger.out.debug("positionDimensionTwo: " + positionDimensionTwo); Logger.out.debug("positionDimensionOne: " + positionDimensionOne); } while (fullStatus[positionDimensionOne][positionDimensionTwo] != false); } // Inserting authorization data Set protectionObjects = new HashSet(); protectionObjects.add(cont); try { // SecurityManager.getInstance(this.getClass()).insertAuthorizationData(null, // protectionObjects, getDynamicGroups(cont)); PrivilegeManager privilegeManager = PrivilegeManager .getInstance(); privilegeManager.insertAuthorizationData(null, protectionObjects, getDynamicGroups(cont), cont .getObjectId()); } catch (SMException e) { throw handleSMException(e); } } } /** * Name : Pathik Sheth Reviewer Name :Vishvesh Mulay * Description:Retrive only repository sites which are not closed. */ public List getRepositorySiteList(String sourceObjectName, String[] displayNameFields, String valueField, String activityStatusArr[], boolean isToExcludeDisabled) throws DAOException { String[] whereColumnName = null; String[] whereColumnCondition = null; String joinCondition = null; String separatorBetweenFields = ", "; whereColumnName = new String[] { "activityStatus","type"}; whereColumnCondition = new String[] { "not in","=" }; // whereColumnCondition = new String[]{"in"}; Object[] whereColumnValue = { activityStatusArr,Constants.REPOSITORY}; return getList(sourceObjectName, displayNameFields, valueField, whereColumnName, whereColumnCondition, whereColumnValue, joinCondition, separatorBetweenFields, isToExcludeDisabled); } public List getSiteList(String[] displayNameFields, String valueField, String activityStatusArr[], Long userId) throws DAOException { List siteResultList = getRepositorySiteList(Site.class.getName(), displayNameFields, valueField, activityStatusArr, false); List userList = null; Set<Long> idSet = new UserBizLogic().getRelatedSiteIds(userId); userList = new ArrayList(); Iterator siteListIterator = siteResultList.iterator(); while (siteListIterator.hasNext()) { NameValueBean nameValBean = (NameValueBean) siteListIterator .next(); Long siteId = new Long(nameValBean.getValue()); if (hasPrivilegeonSite(idSet, siteId)) { userList.add(nameValBean); } } return userList; } private boolean hasPrivilegeonSite(Set<Long> siteidSet, Long siteId) { boolean hasPrivilege = true; if (siteidSet != null) { if (!siteidSet.contains(siteId)) { hasPrivilege = false; } } return hasPrivilege; } /** * this function checks weather parent of the container is valid or not * according to restriction provided for the containers * * @param container - * Container * @param parent - * Parent Container * @return boolean true indicating valid to use , false indicating not valid * to use. * @throws DAOException */ protected boolean isParentContainerValidToUSe(StorageContainer container, StorageContainer parent) throws DAOException { StorageType storageTypeAny = new StorageType(); storageTypeAny.setId(new Long("1")); storageTypeAny.setName("All"); if (parent.getHoldsStorageTypeCollection().contains(storageTypeAny)) { return true; } if (!parent.getHoldsStorageTypeCollection().contains( container.getStorageType())) { return false; } return true; } // This method sets the collection Storage Types. protected String[] getDynamicGroups(AbstractDomainObject obj) throws SMException { String[] dynamicGroups = null; StorageContainer storageContainer = (StorageContainer) obj; if (storageContainer.getLocatedAtPosition() != null && storageContainer.getLocatedAtPosition().getParentContainer() != null) { dynamicGroups = SecurityManager.getInstance(this.getClass()) .getProtectionGroupByName( storageContainer.getLocatedAtPosition() .getParentContainer()); } else { dynamicGroups = SecurityManager.getInstance(this.getClass()) .getProtectionGroupByName(storageContainer.getSite()); } return dynamicGroups; } public void postInsert(Object obj, DAO dao, SessionDataBean sessionDataBean) throws DAOException, UserNotAuthorizedException { StorageContainer container = (StorageContainer) obj; try { Map containerMap = StorageContainerUtil.getContainerMapFromCache(); StorageContainerUtil.addStorageContainerInContainerMap(container, containerMap); } catch (Exception e) { } } /** * Updates the persistent object in the database. * * @param obj * The object to be updated. * @param session * The session in which the object is saved. * @throws DAOException */ protected void update(DAO dao, Object obj, Object oldObj, SessionDataBean sessionDataBean) throws DAOException, UserNotAuthorizedException { StorageContainer container = (StorageContainer) obj; StorageContainer oldContainer = (StorageContainer) oldObj; // lazy change StorageContainer persistentOldContainerForChange = null; Object object = dao.retrieve(StorageContainer.class.getName(), oldContainer.getId()); persistentOldContainerForChange = (StorageContainer) object; // retrive parent container if (container.getLocatedAtPosition() != null) { StorageContainer parentStorageContainer = (StorageContainer) dao .retrieve(StorageContainer.class.getName(), container .getLocatedAtPosition().getParentContainer() .getId()); container.getLocatedAtPosition().setParentContainer( parentStorageContainer); } Logger.out.debug("container.isParentChanged() : " + container.isParentChanged()); if (container.isParentChanged()) { if (container.getLocatedAtPosition() != null && container.getLocatedAtPosition().getParentContainer() != null) { // Check whether continer is moved to one of its sub container. if (isUnderSubContainer(container, container .getLocatedAtPosition().getParentContainer().getId(), dao)) { throw new DAOException(ApplicationProperties .getValue("errors.container.under.subcontainer")); } Logger.out.debug("Loading ParentContainer: " + container.getLocatedAtPosition().getParentContainer() .getId()); /** * Name : Vijay_Pande Reviewer : Sntosh_Chandak Bug ID: 4038 * Patch ID: 4038_1 See also: 1-3 Description: In the edit mode * while updating parent container there was a hibernet session * error Since we were retrieving parent container it was * retriving all child containers as well. Hence only required * filed of parent containcer is retrieved. */ // StorageContainer pc = (StorageContainer) // dao.retrieve(StorageContainer.class.getName(), // container.getParent().getId()); /* * Check if position specified is within the parent container's * capacity */ if (false == validatePosition(dao, container)) { throw new DAOException( ApplicationProperties .getValue("errors.storageContainer.dimensionOverflow")); } // Mandar : code added for validation bug id 666. 24-11-2005 // start boolean canUse = isContainerAvailableForPositions(dao, container); Logger.out.debug("canUse : " + canUse); if (!canUse) { throw new DAOException(ApplicationProperties .getValue("errors.storageContainer.inUse")); } // Mandar : code added for validation bug id 666. 24-11-2005 end // check for closed ParentContainer checkStatus(dao, container.getLocatedAtPosition() .getParentContainer(), "Parent Container"); // container.setParent(pc); Site site = getSite(dao, container.getLocatedAtPosition() .getParentContainer().getId()); // Site // site=((StorageContainer)container.getParent()).getSite(); // check for closed Site checkStatus(dao, site, "Parent Container Site"); container.setSite(site); /** -- patch ends here -- */ } } // Mandar : code added for validation 25-11-05----------- else // if parent container is not changed only the position is changed. { if (container.isPositionChanged()) { // ----------------- String sourceObjectName = StorageContainer.class.getName(); String[] selectColumnName = { "id", "capacity.oneDimensionCapacity", "capacity.twoDimensionCapacity" }; String[] whereColumnName = { "id" }; // "storageContainer."+Constants.SYSTEM_IDENTIFIER String[] whereColumnCondition = { "=" }; Object[] whereColumnValue = { container.getLocatedAtPosition() .getParentContainer().getId() }; String joinCondition = null; List list = dao.retrieve(sourceObjectName, selectColumnName, whereColumnName, whereColumnCondition, whereColumnValue, joinCondition); if (!list.isEmpty()) { Object[] obj1 = (Object[]) list.get(0); Logger.out .debug("**************PC obj::::::: --------------- " + obj1); Logger.out.debug((Long) obj1[0]); Logger.out.debug((Integer) obj1[1]); Logger.out.debug((Integer) obj1[2]); Integer pcCapacityOne = (Integer) obj1[1]; Integer pcCapacityTwo = (Integer) obj1[2]; if (!validatePosition(pcCapacityOne.intValue(), pcCapacityTwo.intValue(), container)) { throw new DAOException( ApplicationProperties .getValue("errors.storageContainer.dimensionOverflow")); } } else { } // ----------------- // StorageContainer pc = (StorageContainer) // dao.retrieve(StorageContainer.class.getName(), // container.getParentContainer().getId()); // if(!validatePosition(container.getParentContainer().getStorageContainerCapacity().getOneDimensionCapacity().intValue(), // container.getParentContainer().getStorageContainerCapacity().getTwoDimensionCapacity().intValue(), // container)) // /*Check if position specified is within the parent // container's capacity*/ // // if(!validatePosition(pc,container)) // { // throw new // DAOException(ApplicationProperties.getValue("errors.storageContainer.dimensionOverflow")); // } // /** * Only if parentContainerID, positionOne or positionTwo is * changed check for availability of position */ if (oldContainer.getLocatedAtPosition() != null && oldContainer.getLocatedAtPosition() .getPositionDimensionOne() != null && oldContainer.getLocatedAtPosition() .getPositionDimensionOne().intValue() != container .getLocatedAtPosition() .getPositionDimensionOne().intValue() || oldContainer.getLocatedAtPosition() .getPositionDimensionTwo().intValue() != container .getLocatedAtPosition() .getPositionDimensionTwo().intValue()) { boolean canUse = isContainerAvailableForPositions(dao, container); Logger.out.debug("canUse : " + canUse); if (!canUse) { throw new DAOException(ApplicationProperties .getValue("errors.storageContainer.inUse")); } } } } // Mandar : --------- end 25-11-05 ----------------- boolean flag = true; if (container.getLocatedAtPosition() != null && container.getLocatedAtPosition().getParentContainer() != null && oldContainer.getLocatedAtPosition() != null && container.getLocatedAtPosition().getParentContainer() .getId().longValue() == oldContainer .getLocatedAtPosition().getParentContainer().getId() .longValue() && container.getLocatedAtPosition().getPositionDimensionOne() .longValue() == oldContainer.getLocatedAtPosition() .getPositionDimensionOne().longValue() && container.getLocatedAtPosition().getPositionDimensionTwo() .longValue() == oldContainer.getLocatedAtPosition() .getPositionDimensionTwo().longValue()) { flag = false; } if (flag) { try { // check for all validations on the storage container. if (container.getLocatedAtPosition() != null && container.getLocatedAtPosition() .getParentContainer() != null) { checkContainer(dao, container.getLocatedAtPosition() .getParentContainer().getId().toString(), container .getLocatedAtPosition().getPositionDimensionOne() .toString(), container.getLocatedAtPosition() .getPositionDimensionTwo().toString(), sessionDataBean, false); } } catch (SMException sme) { sme.printStackTrace(); throw handleSMException(sme); } } // Check whether size has been reduced // Sri: fix for bug #355 (Storage capacity: Reducing capacity should be // handled) Integer oldContainerDimOne = oldContainer.getCapacity() .getOneDimensionCapacity(); Integer oldContainerDimTwo = oldContainer.getCapacity() .getTwoDimensionCapacity(); Integer newContainerDimOne = container.getCapacity() .getOneDimensionCapacity(); Integer newContainerDimTwo = container.getCapacity() .getTwoDimensionCapacity(); // If any size is reduced, object was present at any of the deleted // positions throw error if (oldContainerDimOne.intValue() > newContainerDimOne.intValue() || oldContainerDimTwo.intValue() > newContainerDimTwo .intValue()) { boolean canReduceDimension = StorageContainerUtil .checkCanReduceDimension(oldContainer, container); if (!canReduceDimension) { throw new DAOException(ApplicationProperties .getValue("errors.storageContainer.cannotReduce")); } } /** * Name : kalpana thakur Reviewer Name : Vaishali Bug ID: 4922 * Description:Storage container will not be added to closed site :check * for closed site */ if (container.getId() != null) { checkClosedSite(dao, container.getId(), "Container site"); } setSiteForSubContainers(container, container.getSite(), dao); boolean restrictionsCanChange = isContainerEmpty(dao, container); Logger.out.info("--------------container Available :" + restrictionsCanChange); if (!restrictionsCanChange) { boolean restrictionsChanged = checkForRestrictionsChanged( container, oldContainer); Logger.out.info("---------------restriction changed -:" + restrictionsChanged); if (restrictionsChanged) { throw new DAOException( ApplicationProperties .getValue("errros.storageContainer.restrictionCannotChanged")); } } Collection<SpecimenPosition> specimenPosColl = getSpecimenPositionCollForContainer( dao, container.getId()); container.setSpecimenPositionCollection(specimenPosColl); setValuesinPersistentObject(persistentOldContainerForChange, container, dao); dao.update(persistentOldContainerForChange, sessionDataBean, true, true, false); dao.update(persistentOldContainerForChange.getCapacity(), sessionDataBean, true, true, false); // Audit of update of storage container. dao.audit(obj, oldObj, sessionDataBean, true); dao.audit(container.getCapacity(), oldContainer.getCapacity(), sessionDataBean, true); Logger.out.debug("container.getActivityStatus() " + container.getActivityStatus()); // lazy change /* * if (container.getParent() != null) { * * StorageContainer pc = (StorageContainer) * dao.retrieve(StorageContainer.class.getName(), * container.getParent().getId()); container.setParent(pc); } */ if (container.getActivityStatus().equals( Constants.ACTIVITY_STATUS_DISABLED)) { Long containerIDArr[] = { container.getId() }; if (isContainerAvailableForDisabled(dao, containerIDArr)) { List disabledConts = new ArrayList(); /** * Preapare list of parent/child containers to disable * */ List<StorageContainer> disabledContainerList = new ArrayList<StorageContainer>(); disabledContainerList.add(persistentOldContainerForChange); persistentOldContainerForChange.setLocatedAtPosition(null); addEntriesInDisabledMap(persistentOldContainerForChange, disabledConts); // disabledConts.add(new StorageContainer(container)); setDisableToSubContainer(persistentOldContainerForChange, disabledConts, dao, disabledContainerList); persistentOldContainerForChange.getOccupiedPositions().clear(); Logger.out.debug("container.getActivityStatus() " + container.getActivityStatus()); disableSubStorageContainer(dao, sessionDataBean, disabledContainerList); persistentOldContainerForChange.setLocatedAtPosition(null); dao.update(persistentOldContainerForChange, sessionDataBean, true, true, false); try { CatissueCoreCacheManager catissueCoreCacheManager = CatissueCoreCacheManager .getInstance(); catissueCoreCacheManager.addObjectToCache( Constants.MAP_OF_DISABLED_CONTAINERS, (Serializable) disabledConts); } catch (CacheException e) { } } else { throw new DAOException(ApplicationProperties .getValue("errors.container.contains.specimen")); } } } public void setValuesinPersistentObject(StorageContainer persistentobject, StorageContainer newObject, DAO dao) throws DAOException { persistentobject.setActivityStatus(newObject.getActivityStatus()); persistentobject.setBarcode(newObject.getBarcode()); Capacity persistCapacity = persistentobject.getCapacity(); Capacity newCapacity = newObject.getCapacity(); persistCapacity.setOneDimensionCapacity(newCapacity .getOneDimensionCapacity()); persistCapacity.setTwoDimensionCapacity(newCapacity .getTwoDimensionCapacity()); Collection children = StorageContainerUtil.getChildren(dao, newObject .getId()); StorageContainerUtil.setChildren(children, dao, persistentobject .getId()); // persistentobject.setChildren(newObject.getChildren()); persistentobject.setCollectionProtocolCollection(newObject .getCollectionProtocolCollection()); persistentobject.setComment(newObject.getComment()); persistentobject.setFull(newObject.isFull()); persistentobject.setHoldsSpecimenArrayTypeCollection(newObject .getHoldsSpecimenArrayTypeCollection()); persistentobject.setHoldsSpecimenClassCollection(newObject .getHoldsSpecimenClassCollection()); persistentobject.setHoldsStorageTypeCollection(newObject .getHoldsStorageTypeCollection()); persistentobject.setName(newObject.getName()); persistentobject.setNoOfContainers(newObject.getNoOfContainers()); persistentobject.setParentChanged(newObject.isParentChanged()); persistentobject.setPositionChanged(newObject.isPositionChanged()); if (newObject.getLocatedAtPosition() != null) { ContainerPosition cntPos = persistentobject.getLocatedAtPosition(); if (cntPos == null) { cntPos = new ContainerPosition(); persistentobject.setLocatedAtPosition(cntPos); } cntPos.setPositionDimensionOne(newObject.getLocatedAtPosition() .getPositionDimensionOne()); cntPos.setPositionDimensionTwo(newObject.getLocatedAtPosition() .getPositionDimensionTwo()); cntPos.setParentContainer(newObject.getLocatedAtPosition() .getParentContainer()); cntPos.setOccupiedContainer(persistentobject); // persistentobject.setLocatedAtPosition(cntPos); } persistentobject.setSimilarContainerMap(newObject .getSimilarContainerMap()); persistentobject.setSite(newObject.getSite()); if (newObject.getSpecimenPositionCollection() != null) { Collection<SpecimenPosition> specPosColl = persistentobject .getSpecimenPositionCollection(); // if(specPosColl == null) // { // specPosColl = new HashSet<SpecimenPosition>(); // } specPosColl.addAll(newObject.getSpecimenPositionCollection()); // specPos.setSpecimen(newObject.getSpecimenPosition().getSpecimen()); // specPos.setStorageContainer(newObject); // persistentobject.setSpecimenPosition(specPos); } persistentobject.setStartNo(newObject.getStartNo()); persistentobject.setStorageType(newObject.getStorageType()); persistentobject.setTempratureInCentigrade(newObject .getTempratureInCentigrade()); } private void addEntriesInDisabledMap(StorageContainer container, List disabledConts) { String contNameKey = "StorageContName"; String contIdKey = "StorageContIdKey"; String parentContNameKey = "ParentContName"; String parentContIdKey = "ParentContId"; String pos1Key = "pos1"; String pos2Key = "pos2"; Map containerDetails = new TreeMap(); containerDetails.put(contNameKey, container.getName()); containerDetails.put(contIdKey, container.getId()); if (container != null && container.getLocatedAtPosition() != null && container.getLocatedAtPosition().getParentContainer() != null) { containerDetails.put(parentContNameKey, container .getLocatedAtPosition().getParentContainer().getName()); containerDetails.put(parentContIdKey, container .getLocatedAtPosition().getParentContainer().getId()); containerDetails.put(pos1Key, container.getLocatedAtPosition() .getPositionDimensionOne()); containerDetails.put(pos2Key, container.getLocatedAtPosition() .getPositionDimensionTwo()); } disabledConts.add(containerDetails); } public void postUpdate(DAO dao, Object currentObj, Object oldObj, SessionDataBean sessionDataBean) throws BizLogicException, UserNotAuthorizedException { try { Map containerMap = StorageContainerUtil.getContainerMapFromCache(); StorageContainer currentContainer = (StorageContainer) currentObj; StorageContainer oldContainer = (StorageContainer) oldObj; // if name gets change then update the cache with new key if (!currentContainer.getName().equals(oldContainer.getName())) { StorageContainerUtil.updateNameInCache(containerMap, currentContainer, oldContainer); } // If capacity of container gets increased then insert all the new // positions in map .......... int xOld = oldContainer.getCapacity().getOneDimensionCapacity() .intValue(); int xNew = currentContainer.getCapacity().getOneDimensionCapacity() .intValue(); int yOld = oldContainer.getCapacity().getTwoDimensionCapacity() .intValue(); int yNew = currentContainer.getCapacity().getTwoDimensionCapacity() .intValue(); if (xNew != xOld || yNew != yOld) { StorageContainerUtil.updateStoragePositions(containerMap, currentContainer, oldContainer); } // finish if (oldContainer != null && oldContainer.getLocatedAtPosition() != null && oldContainer.getLocatedAtPosition().getParentContainer() != null) { StorageContainer oldParentCont = (StorageContainer) HibernateMetaData .getProxyObjectImpl(oldContainer.getLocatedAtPosition() .getParentContainer()); StorageContainerUtil.insertSinglePositionInContainerMap( oldParentCont, containerMap, oldContainer .getLocatedAtPosition() .getPositionDimensionOne().intValue(), oldContainer.getLocatedAtPosition() .getPositionDimensionTwo().intValue()); } if (currentContainer != null && currentContainer.getLocatedAtPosition() != null && currentContainer.getLocatedAtPosition() .getParentContainer() != null) { StorageContainer currentParentCont = (StorageContainer) currentContainer .getLocatedAtPosition().getParentContainer(); StorageContainerUtil.deleteSinglePositionInContainerMap( currentParentCont, containerMap, currentContainer .getLocatedAtPosition() .getPositionDimensionOne().intValue(), currentContainer.getLocatedAtPosition() .getPositionDimensionTwo().intValue()); } if (currentContainer.getActivityStatus().equals( Constants.ACTIVITY_STATUS_DISABLED)) { List disabledConts = StorageContainerUtil .getListOfDisabledContainersFromCache(); List disabledContsAfterReverse = new ArrayList(); for (int i = disabledConts.size() - 1; i >= 0; i--) { disabledContsAfterReverse.add(disabledConts.get(i)); } Iterator itr = disabledContsAfterReverse.iterator(); while (itr.hasNext()) { Map disabledContDetails = (TreeMap) itr.next(); String contNameKey = "StorageContName"; String contIdKey = "StorageContIdKey"; String parentContNameKey = "ParentContName"; String parentContIdKey = "ParentContId"; String pos1Key = "pos1"; String pos2Key = "pos2"; StorageContainer cont = new StorageContainer(); cont.setId((Long) disabledContDetails.get(contIdKey)); cont.setName((String) disabledContDetails.get(contNameKey)); if (disabledContDetails.get(parentContIdKey) != null) { StorageContainer parent = new StorageContainer(); parent.setName((String) disabledContDetails .get(parentContNameKey)); parent.setId((Long) disabledContDetails .get(parentContIdKey)); // cont.setParent(parent); ContainerPosition cntPos = new ContainerPosition(); cntPos .setPositionDimensionOne((Integer) disabledContDetails .get(pos1Key)); cntPos .setPositionDimensionTwo((Integer) disabledContDetails .get(pos2Key)); cntPos.setParentContainer(parent); cntPos.setOccupiedContainer(cont); cont.setLocatedAtPosition(cntPos); } StorageContainerUtil.removeStorageContainerInContainerMap( cont, containerMap); } } } catch (Exception e) { Logger.out.error(e.getMessage(), e); throw new BizLogicException(e.getMessage(), e); } } /* * public boolean isContainerFull(String containerId, int dimX, int dimY) * throws DAOException { * * boolean availablePositions[][] = * getAvailablePositionsForContainer(containerId, dimX, dimY); * * dimX = availablePositions.length; for (int x = 1; x < dimX; x++) { dimY = * availablePositions[x].length; for (int y = 1; y < dimY; y++) { if * (availablePositions[x][y] == true) return false; } } return true; * } */ private boolean checkForRestrictionsChanged(StorageContainer newContainer, StorageContainer oldContainer) { int flag = 0; Collection cpCollNew = newContainer.getCollectionProtocolCollection(); Collection cpCollOld = oldContainer.getCollectionProtocolCollection(); Collection storTypeCollNew = newContainer .getHoldsStorageTypeCollection(); Collection storTypeCollOld = oldContainer .getHoldsStorageTypeCollection(); Collection spClassCollNew = newContainer .getHoldsSpecimenClassCollection(); Collection spClassCollOld = oldContainer .getHoldsSpecimenClassCollection(); Collection spArrayTypeCollNew = newContainer .getHoldsSpecimenArrayTypeCollection(); Collection spArrayTypeCollOld = oldContainer .getHoldsSpecimenArrayTypeCollection(); /* * if (cpCollNew.size() != cpCollOld.size()) return true; */ /** * Bug 3612 - User should be able to change the restrictions if he * specifies the superset of the old restrictions if container is not * empty. */ Iterator itrOld = cpCollOld.iterator(); while (itrOld.hasNext()) { flag = 0; CollectionProtocol cpOld = (CollectionProtocol) itrOld.next(); Iterator itrNew = cpCollNew.iterator(); if (cpCollNew.size() == 0) { break; } while (itrNew.hasNext()) { CollectionProtocol cpNew = (CollectionProtocol) itrNew.next(); if (cpOld.getId().longValue() == cpNew.getId().longValue()) { flag = 1; break; } } if (flag != 1) return true; } /* * if (storTypeCollNew.size() != storTypeCollOld.size()) return true; */ itrOld = storTypeCollOld.iterator(); while (itrOld.hasNext()) { flag = 0; StorageType storOld = (StorageType) itrOld.next(); Iterator itrNew = storTypeCollNew.iterator(); while (itrNew.hasNext()) { StorageType storNew = (StorageType) itrNew.next(); if (storNew.getId().longValue() == storOld.getId().longValue() || storNew.getId().longValue() == 1) { flag = 1; break; } } if (flag != 1) return true; } /* * if (spClassCollNew.size() != spClassCollOld.size()) return true; */ itrOld = spClassCollOld.iterator(); while (itrOld.hasNext()) { flag = 0; String specimenOld = (String) itrOld.next(); Iterator itrNew = spClassCollNew.iterator(); while (itrNew.hasNext()) { String specimenNew = (String) itrNew.next(); if (specimenNew.equals(specimenOld)) { flag = 1; break; } } if (flag != 1) return true; } /* * if (spArrayTypeCollNew.size() != spArrayTypeCollOld.size()) return * true; */ itrOld = spArrayTypeCollOld.iterator(); while (itrOld.hasNext()) { flag = 0; SpecimenArrayType spArrayTypeOld = (SpecimenArrayType) itrOld .next(); Iterator itrNew = spArrayTypeCollNew.iterator(); while (itrNew.hasNext()) { SpecimenArrayType spArrayTypeNew = (SpecimenArrayType) itrNew .next(); if (spArrayTypeNew.getId().longValue() == spArrayTypeOld .getId().longValue() || spArrayTypeNew.getId().longValue() == 1) { flag = 1; break; } } if (flag != 1) return true; } return false; } protected void setPrivilege(DAO dao, String privilegeName, Class objectType, Long[] objectIds, Long userId, String roleId, boolean assignToUser, boolean assignOperation) throws SMException, DAOException { Logger.out.debug(" privilegeName:" + privilegeName + " objectType:" + objectType + " objectIds:" + edu.wustl.common.util.Utility.getArrayString(objectIds) + " userId:" + userId + " roleId:" + roleId + " assignToUser:" + assignToUser); // Aarti: Bug#1199 - We should be able to deassign // privilege on child even though user has privilege on the parent. // Thus commenting the check for privileges on parent. // if (assignOperation == Constants.PRIVILEGE_DEASSIGN) // { // isDeAssignable(dao, privilegeName, objectIds, userId, roleId, // assignToUser); // } super.setPrivilege(dao, privilegeName, objectType, objectIds, userId, roleId, assignToUser, assignOperation); assignPrivilegeToSubStorageContainer(dao, privilegeName, objectIds, userId, roleId, assignToUser, assignOperation); } /** * Checks whether the user/role has privilege on the parent * (Container/Site). If the user has privilege an exception is thrown * stating to deassign the privilege of parent first. * * @param dao * The dao object to get the related objects down the hierarchy. * @param objectIds * The objects ids of containerwhose parent is to be checked. * @param privilegeName * The privilege name. * @param userId * The user identifier. * @param roleId * The roleId in case privilege is assigned/deassigned to a role. * @param assignToUser * boolean which determines whether privilege is * assigned/deassigned to a user or role. * @throws Exception */ private void isDeAssignable(DAO dao, String privilegeName, Long[] objectIds, Long userId, String roleId, boolean assignToUser) throws Exception { // Aarti: Bug#2364 - Error while assigning privileges since attribute // parentContainer changed to parent String[] selectColumnNames = { "locatedAtPosition.parentContainer.id", "site.id" }; String[] whereColumnNames = { "id" }; List listOfSubElement = super.getRelatedObjects(dao, StorageContainer.class, selectColumnNames, whereColumnNames, objectIds); Logger.out.debug("Related Objects>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>" + listOfSubElement.size()); String userName = new String(); if (assignToUser == true) { userName = SecurityManager.getInstance( StorageContainerBizLogic.class).getUserById( userId.toString()).getLoginName(); } // To get privilegeCache through // Singleton instance of PrivilegeManager, requires User LoginName PrivilegeManager privilegeManager = PrivilegeManager.getInstance(); PrivilegeCache privilegeCache = privilegeManager .getPrivilegeCache(userName); Iterator iterator = listOfSubElement.iterator(); while (iterator.hasNext()) { Object[] row = (Object[]) iterator.next(); // Parent storage container identifier. Object containerObject = (Object) row[0]; String className = StorageContainer.class.getName(); // Parent storage container identifier is null, the parent is a // site.. if ((row[0] == null) || (row[0].equals(""))) { containerObject = row[1]; className = Site.class.getName(); } Logger.out.debug("Container Object After ********************** : " + containerObject + "row[1] : " + row[1]); boolean permission = false; // Check the permission on the parent container or site. if (assignToUser == true)// If the privilege is // assigned/deassigned to a user. { // Call to SecurityManager.checkPermission bypassed & // instead, call redirected to privilegeCache.hasPrivilege permission = true; // Commented by Vishvesh & Ravindra for MSR for C1 // privilegeCache.hasPrivilege(className+"_"+containerObject.toString(), // privilegeName); // permission = // SecurityManager.getInstance(StorageContainerBizLogic.class).checkPermission(userName, // className, // containerObject.toString(), privilegeName); } else // If the privilege is assigned/deassigned to a user group. { permission = privilegeManager.hasGroupPrivilege(roleId, className + "_" + containerObject.toString(), privilegeName); // permission = // SecurityManager.getInstance(StorageContainerBizLogic.class).checkPermission(roleId, // className, // containerObject.toString()); } // If the parent is a Site. if (permission == true && row[0] == null) { throw new DAOException( "Error : First de-assign privilege of the Parent Site with system identifier " + row[1].toString()); } else if (permission == true && row[0] != null)// If the parent is // a storage // container. { throw new DAOException( "Error : First de-assign privilege of the Parent Container with system identifier " + row[0].toString()); } } } /** * Assigns the privilege to all the sub-containers down the hierarchy. * * @param dao * The dao object to get the related objects down the hierarchy. * @param privilegeName * The privilege name. * @param storageContainerIDArr * The storage container id array. * @param userId * The user identifier. * @param roleId * The roleId in case privilege is assigned/deassigned to a role. * @param assignToUser * boolean which determines whether privilege is * assigned/deassigned to a user or role. * @param assignOperation * boolean which determines assign/deassign. * @throws SMException * @throws DAOException */ private void assignPrivilegeToSubStorageContainer(DAO dao, String privilegeName, Long[] storageContainerIDArr, Long userId, String roleId, boolean assignToUser, boolean assignOperation) throws SMException, DAOException { // Aarti: Bug#2364 - Error while assigning privileges since attribute // parentContainer changed to parent // Get list of sub container identifiers. List listOfSubStorageContainerId = super.getRelatedObjects(dao, StorageContainer.class, "locatedAtPosition.parentContainer", storageContainerIDArr); if (listOfSubStorageContainerId.isEmpty()) return; super.setPrivilege(dao, privilegeName, StorageContainer.class, Utility .toLongArray(listOfSubStorageContainerId), userId, roleId, assignToUser, assignOperation); assignPrivilegeToSubStorageContainer(dao, privilegeName, Utility .toLongArray(listOfSubStorageContainerId), userId, roleId, assignToUser, assignOperation); } /** * @param dao * @param objectIds * @param assignToUser * @param roleId * @throws DAOException * @throws SMException */ public void assignPrivilegeToRelatedObjectsForSite(DAO dao, String privilegeName, Long[] objectIds, Long userId, String roleId, boolean assignToUser, boolean assignOperation) throws SMException, DAOException { List listOfSubElement = super.getRelatedObjects(dao, StorageContainer.class, "site", objectIds); if (!listOfSubElement.isEmpty()) { super.setPrivilege(dao, privilegeName, StorageContainer.class, Utility.toLongArray(listOfSubElement), userId, roleId, assignToUser, assignOperation); } } // This method sets the Storage Type & Site (if applicable) of this // container. protected void loadSite(DAO dao, StorageContainer container) throws DAOException { Site site = container.getSite(); // Setting the site if applicable if (site != null) { // Commenting dao.retrive() call as retrived object is not realy // required for further processing -Prafull Site siteObj = (Site) dao.retrieve(Site.class.getName(), container .getSite().getId()); if (siteObj != null) { // check for closed site checkStatus(dao, siteObj, "Site"); container.setSite(siteObj); setSiteForSubContainers(container, siteObj, dao); } } } protected void loadStorageType(DAO dao, StorageContainer container) throws DAOException { // Setting the Storage Type Object storageTypeObj = dao.retrieve(StorageType.class.getName(), container.getStorageType().getId()); if (storageTypeObj != null) { StorageType type = (StorageType) storageTypeObj; container.setStorageType(type); } } private void setSiteForSubContainers(StorageContainer storageContainer, Site site, DAO dao) throws DAOException { // Added storageContainer.getId()!=null check as this method fails in // case when it gets called from insert(). -PRafull if (storageContainer != null && storageContainer.getId() != null) { // Collection children = (Collection) // dao.retrieveAttribute(storageContainer.getClass().getName(), // storageContainer.getId(), "elements(children)"); Collection children = StorageContainerUtil.getChildren(dao, storageContainer.getId()); Logger.out .debug("storageContainer.getChildrenContainerCollection() " + children.size()); Iterator iterator = children.iterator(); while (iterator.hasNext()) { StorageContainer container = (StorageContainer) HibernateMetaData .getProxyObjectImpl(iterator.next()); container.setSite(site); setSiteForSubContainers(container, site, dao); } } } private boolean isUnderSubContainer(StorageContainer storageContainer, Long parentContainerID, DAO dao) throws DAOException { if (storageContainer != null) { // Ashish - 11/6/07 - Retriving children containers for performance // improvement. // Collection childrenColl = // (Collection)dao.retrieveAttribute(StorageContainer.class.getName(), // storageContainer.getId(),Constants.COLUMN_NAME_CHILDREN ); Collection childrenColl = StorageContainerUtil.getChildren(dao, storageContainer.getId()); Iterator iterator = childrenColl.iterator(); // storageContainer.getChildren() while (iterator.hasNext()) { StorageContainer container = (StorageContainer) iterator.next(); // Logger.out.debug("SUB CONTINER container // "+parentContainerID.longValue()+" // "+container.getId().longValue()+" // "+(parentContainerID.longValue()==container.getId().longValue())); if (parentContainerID.longValue() == container.getId() .longValue()) return true; if (isUnderSubContainer(container, parentContainerID, dao)) return true; } } return false; } // TODO TO BE REMOVED private void setDisableToSubContainer(StorageContainer storageContainer, List disabledConts, DAO dao, List disabledContainerList) throws DAOException { if (storageContainer != null) { // Ashish - 11/6/07 - Retriving children containers for performance // improvement. // Collection childrenColl = // (Collection)dao.retrieveAttribute(StorageContainer.class.getName(), // storageContainer.getId(),Constants.COLUMN_NAME_CHILDREN ); Collection childrenColl = StorageContainerUtil.getChildren(dao, storageContainer.getId()); Iterator iterator = childrenColl.iterator(); while (iterator.hasNext()) { StorageContainer container = (StorageContainer) iterator.next(); container.setActivityStatus(Constants.ACTIVITY_STATUS_DISABLED); addEntriesInDisabledMap(container, disabledConts); /* whenever container is disabled free it's used positions */ container.setLocatedAtPosition(null); disabledContainerList.add(container); setDisableToSubContainer(container, disabledConts, dao, disabledContainerList); } } storageContainer.getOccupiedPositions().clear(); } // This method is called from labelgenerator. public long getNextContainerNumber() throws DAOException { String sourceObjectName = "CATISSUE_STORAGE_CONTAINER"; String[] selectColumnName = { "max(IDENTIFIER) as MAX_NAME" }; AbstractDAO dao = DAOFactory.getInstance().getDAO(Constants.JDBC_DAO); dao.openSession(null); List list = dao.retrieve(sourceObjectName, selectColumnName); dao.closeSession(); if (!list.isEmpty()) { List columnList = (List) list.get(0); if (!columnList.isEmpty()) { String str = (String) columnList.get(0); if (!str.equals("")) { long no = Long.parseLong(str); return no + 1; } } } return 1; } // what to do abt thi public String getContainerName(String siteName, String typeName, String operation, long Id) throws DAOException { String containerName = ""; if (typeName != null && siteName != null && !typeName.equals("") && !siteName.equals("")) { // Poornima:Max length of site name is 50 and Max length of // container type name is 100, in Oracle the name does not truncate // and it is giving error. So these fields are truncated in case it // is longer than 40. // It also solves Bug 2829:System fails to create a default unique // storage container name String maxSiteName = siteName; String maxTypeName = typeName; if (siteName.length() > 40) { maxSiteName = siteName.substring(0, 39); } if (typeName.length() > 40) { maxTypeName = typeName.substring(0, 39); } if (operation.equals(Constants.ADD)) { containerName = maxSiteName + "_" + maxTypeName + "_" + String.valueOf(getNextContainerNumber()); } else { containerName = maxSiteName + "_" + maxTypeName + "_" + String.valueOf(Id); } } return containerName; } public int getNextContainerNumber(long parentID, long typeID, boolean isInSite) throws DAOException { String sourceObjectName = "CATISSUE_STORAGE_CONTAINER"; String[] selectColumnName = { "max(IDENTIFIER) as MAX_NAME" }; String[] whereColumnName = { "STORAGE_TYPE_ID", "PARENT_CONTAINER_ID" }; String[] whereColumnCondition = { "=", "=" }; Object[] whereColumnValue = { Long.valueOf(typeID), Long.valueOf(parentID) }; if (isInSite) { whereColumnName = new String[3]; whereColumnName[0] = "STORAGE_TYPE_ID"; whereColumnName[1] = "SITE_ID"; whereColumnName[2] = "PARENT_CONTAINER_ID"; whereColumnValue = new Object[3]; whereColumnValue[0] = Long.valueOf(typeID); whereColumnValue[1] = Long.valueOf(parentID); whereColumnValue[2] = "null"; whereColumnCondition = new String[3]; whereColumnCondition[0] = "="; whereColumnCondition[1] = "="; whereColumnCondition[2] = "is"; } String joinCondition = Constants.AND_JOIN_CONDITION; AbstractDAO dao = DAOFactory.getInstance().getDAO(Constants.JDBC_DAO); dao.openSession(null); List list = dao.retrieve(sourceObjectName, selectColumnName, whereColumnName, whereColumnCondition, whereColumnValue, joinCondition); dao.closeSession(); if (!list.isEmpty()) { List columnList = (List) list.get(0); if (!columnList.isEmpty()) { String str = (String) columnList.get(0); Logger.out.info("str---------------:" + str); if (!str.equals("")) { int no = Integer.parseInt(str); return no + 1; } } } return 1; } private boolean isContainerEmpty(DAO dao, StorageContainer container) throws DAOException { // Retrieving all the occupied positions by child containers String sourceObjectName = StorageContainer.class.getName(); String[] selectColumnName = { "locatedAtPosition.positionDimensionOne", "locatedAtPosition.positionDimensionTwo" }; String[] whereColumnName = { "locatedAtPosition.parentContainer.id" }; String[] whereColumnCondition = { "=" }; Object[] whereColumnValue = { container.getId() }; List list = dao.retrieve(sourceObjectName, selectColumnName, whereColumnName, whereColumnCondition, whereColumnValue, null); if (!list.isEmpty()) { return false; } else { // Retrieving all the occupied positions by specimens sourceObjectName = Specimen.class.getName(); whereColumnName[0] = "specimenPosition.storageContainer.id"; selectColumnName[0] = "specimenPosition.positionDimensionOne"; selectColumnName[1] = "specimenPosition.positionDimensionTwo"; list = dao.retrieve(sourceObjectName, selectColumnName, whereColumnName, whereColumnCondition, whereColumnValue, null); if (!list.isEmpty()) { return false; } else { // Retrieving all the occupied positions by specimens array type sourceObjectName = SpecimenArray.class.getName(); whereColumnName[0] = "locatedAtPosition.parentContainer.id"; selectColumnName[0] = "locatedAtPosition.positionDimensionOne"; selectColumnName[1] = "locatedAtPosition.positionDimensionTwo"; list = dao.retrieve(sourceObjectName, selectColumnName, whereColumnName, whereColumnCondition, whereColumnValue, null); if (!list.isEmpty()) { return false; } } } return true; } /** * Returns the data for generation of storage container tree view. * * @return the vector of tree nodes for the storage containers. */ public Vector getTreeViewData() throws DAOException { JDBCDAO dao = (JDBCDAO) DAOFactory.getInstance().getDAO( Constants.JDBC_DAO); dao.openSession(null); // String queryStr = " SELECT t8.IDENTIFIER, t8.CONTAINER_NAME, t5.TYPE, // t8.SITE_ID, " // + " t4.TYPE, t8.PARENT_IDENTIFIER, " // + " t8.PARENT_CONTAINER_NAME, t8.PARENT_CONTAINER_TYPE " // + " FROM (SELECT t7.IDENTIFIER, t7.CONTAINER_NAME, t7.SITE_ID, " // + " t7.STORAGE_TYPE_ID, t7.PARENT_IDENTIFIER, " // + " t7.PARENT_CONTAINER_NAME, t6.TYPE AS PARENT_CONTAINER_TYPE FROM " // + " (select t1.IDENTIFIER AS IDENTIFIER, t1.CONTAINER_NAME AS // CONTAINER_NAME, " // + " t1.SITE_ID AS SITE_ID, t1.STORAGE_TYPE_ID AS STORAGE_TYPE_ID, " // + " t2.IDENTIFIER AS PARENT_IDENTIFIER, t2.CONTAINER_NAME AS // PARENT_CONTAINER_NAME, " // + " t2.STORAGE_TYPE_ID AS PARENT_STORAGE_TYPE_ID " // + " from CATISSUE_STORAGE_CONTAINER t1 LEFT OUTER JOIN // CATISSUE_STORAGE_CONTAINER t2 " // + " on t1.PARENT_CONTAINER_ID = t2.IDENTIFIER) AS t7 LEFT OUTER JOIN // CATISSUE_STORAGE_TYPE t6 " // + " on t7.PARENT_STORAGE_TYPE_ID = t6.IDENTIFIER) AS t8, " // + " CATISSUE_SITE t4, CATISSUE_STORAGE_TYPE t5 " // + " WHERE t8.SITE_ID = t4.IDENTIFIER " + " AND t8.STORAGE_TYPE_ID = // t5.IDENTIFIER "; // String queryStr = "SELECT " + " t8.IDENTIFIER, t8.CONTAINER_NAME, // t5.NAME, t8.SITE_ID, t4.TYPE, t8.PARENT_IDENTIFIER, " // + " t8.PARENT_CONTAINER_NAME, t8.PARENT_CONTAINER_TYPE, // t8.ACTIVITY_STATUS, t8.PARENT_ACTIVITY_STATUS " + " FROM ( " + " // SELECT " // + " t7.IDENTIFIER, t7.CONTAINER_NAME, t7.SITE_ID, t7.STORAGE_TYPE_ID, // t7.ACTIVITY_STATUS, t7.PARENT_IDENTIFIER, " // + " t7.PARENT_CONTAINER_NAME, t6.NAME AS PARENT_CONTAINER_TYPE, // t7.PARENT_ACTIVITY_STATUS " + " FROM " + " ( " // + " select " // + " t1.IDENTIFIER AS IDENTIFIER, t1.NAME AS CONTAINER_NAME, // t11.SITE_ID AS SITE_ID, T1.ACTIVITY_STATUS AS ACTIVITY_STATUS," // + " t11.STORAGE_TYPE_ID AS STORAGE_TYPE_ID, t2.IDENTIFIER AS // PARENT_IDENTIFIER, " // + " t2.NAME AS PARENT_CONTAINER_NAME, t22.STORAGE_TYPE_ID AS // PARENT_STORAGE_TYPE_ID, T2.ACTIVITY_STATUS AS PARENT_ACTIVITY_STATUS" // + " from " + " CATISSUE_STORAGE_CONTAINER t11, // CATISSUE_STORAGE_CONTAINER t22, " // + " CATISSUE_CONTAINER t1 LEFT OUTER JOIN CATISSUE_CONTAINER t2 " + " // on t1.PARENT_CONTAINER_ID = t2.IDENTIFIER " // + " where " + " t1.identifier = t11.identifier and (t2.identifier is // null OR t2.identifier = t22.identifier)" + " ) " // + " t7 LEFT OUTER JOIN CATISSUE_CONTAINER_TYPE t6 on " + " // t7.PARENT_STORAGE_TYPE_ID = t6.IDENTIFIER " + " ) " // + " t8, CATISSUE_SITE t4, CATISSUE_CONTAINER_TYPE t5 WHERE t8.SITE_ID // = t4.IDENTIFIER " + " AND t8.STORAGE_TYPE_ID = t5.IDENTIFIER "; // Bug-2630: Added by jitendra String queryStr = "SELECT " + "t8.IDENTIFIER, t8.CONTAINER_NAME, t5.NAME, t8.SITE_ID, t4.TYPE, " + "t8. PARENT_IDENTIFIER, t8.PARENT_CONTAINER_NAME, t8.PARENT_CONTAINER_TYPE, " + "t8. ACTIVITY_STATUS, t8.PARENT_ACTIVITY_STATUS " + "FROM " + "( " + "SELECT " + "t7. IDENTIFIER, t7.CONTAINER_NAME, t7.SITE_ID, t7.STORAGE_TYPE_ID, " + "t7.ACTIVITY_STATUS, t7. PARENT_IDENTIFIER, " + "t7.PARENT_CONTAINER_NAME, t6.NAME AS PARENT_CONTAINER_TYPE, t7.PARENT_ACTIVITY_STATUS " + "FROM " + "( " + "select " + "t10. IDENTIFIER AS IDENTIFIER, t10.CONTAINER_NAME AS CONTAINER_NAME, t10.SITE_ID AS SITE_ID, " + "T10. ACTIVITY_STATUS AS ACTIVITY_STATUS, t10.STORAGE_TYPE_ID AS STORAGE_TYPE_ID, " + "t10.PARENT_IDENTIFIER AS PARENT_IDENTIFIER, t10.PARENT_CONTAINER_NAME AS PARENT_CONTAINER_NAME, " + "t22. STORAGE_TYPE_ID AS PARENT_STORAGE_TYPE_ID, T10.PARENT_ACTIVITY_STATUS AS PARENT_ACTIVITY_STATUS " + "from " + "( " + "select " + "t1. IDENTIFIER AS IDENTIFIER, t1.NAME AS CONTAINER_NAME, t11.SITE_ID AS SITE_ID, " + "T1. ACTIVITY_STATUS AS ACTIVITY_STATUS, t11.STORAGE_TYPE_ID AS STORAGE_TYPE_ID, " + "t2.IDENTIFIER AS PARENT_IDENTIFIER, t2.NAME AS PARENT_CONTAINER_NAME, " + "T2.ACTIVITY_STATUS AS PARENT_ACTIVITY_STATUS " + "from " + "CATISSUE_STORAGE_CONTAINER t11,CATISSUE_CONTAINER t1 LEFT OUTER JOIN " + "CATISSUE_CONTAINER t2 " + "on t1.PARENT_CONTAINER_ID = t2.IDENTIFIER " + "where t1.identifier = t11.identifier " + ")t10 " + "LEFT OUTER JOIN CATISSUE_STORAGE_CONTAINER t22 on t10.PARENT_IDENTIFIER = t22.identifier " + ")t7 " + "LEFT OUTER JOIN CATISSUE_CONTAINER_TYPE t6 on t7.PARENT_STORAGE_TYPE_ID = t6.IDENTIFIER " + ") t8, CATISSUE_SITE t4, CATISSUE_CONTAINER_TYPE t5 " + "WHERE " + "t8.SITE_ID = t4.IDENTIFIER AND t8.STORAGE_TYPE_ID = t5.IDENTIFIER "; Logger.out.debug("Storage Container query......................" + queryStr); List list = null; try { list = dao.executeQuery(queryStr, null, false, null); // printRecords(list); } catch (Exception ex) { throw new DAOException(ex.getMessage()); } dao.closeSession(); return getTreeNodeList(list); } /** * Returns the vector of tree node for the storage container list. * * @param resultList * the storage container list. * @return the vector of tree node for the storage container list. * @throws DAOException */ public Vector getTreeNodeList(List resultList) throws DAOException { Map containerRelationMap = new HashMap(); // Vector of Tree Nodes for all the storage containers. Vector treeNodeVector = new Vector(); Vector finalNodeVector = new Vector(); if (resultList.isEmpty() == false) { Iterator iterator = resultList.iterator(); while (iterator.hasNext()) { List rowList = (List) iterator.next(); // Bug-2630: Added by jitendra if ((String) rowList.get(8) != null && !((String) rowList.get(8)) .equals(Constants.ACTIVITY_STATUS_DISABLED)) { // Mandar : code for tooltip for the container String toolTip = getToolTipData((String) rowList.get(0)); // Create the tree node for the child node. TreeNode treeNodeImpl = new StorageContainerTreeNode(Long .valueOf((String) rowList.get(0)), (String) rowList .get(1), (String) rowList.get(1), toolTip, (String) rowList.get(8)); // Add the tree node in the Vector if it is not present. if (treeNodeVector.contains(treeNodeImpl) == false) { treeNodeVector.add(treeNodeImpl); } } if ((String) rowList.get(5) != "") // if parent container is // not null { List childIds = new ArrayList(); // Create the relationship map for parent container id and // the child container ids. // Check if the parent container already has an entry in the // Map and get it. if (containerRelationMap.containsKey(Long .valueOf((String) rowList.get(5)))) { childIds = (List) containerRelationMap.get(Long .valueOf((String) rowList.get(5))); } // Put the container in the child container list of the // parent container // and update the Map. childIds.add(Long.valueOf((String) rowList.get(0))); containerRelationMap.put(Long.valueOf((String) rowList .get(5)), childIds); // Create the tree node for the parent node and add it in // the vector if not present. String toolTip = getToolTipData((String) rowList.get(5)); TreeNode treeNodeImpl = new StorageContainerTreeNode(Long .valueOf((String) rowList.get(5)), (String) rowList .get(6), (String) rowList.get(6), toolTip, (String) rowList.get(9)); if (treeNodeVector.contains(treeNodeImpl) == false) { treeNodeVector.add(treeNodeImpl); } } } // printVectorMap(treeNodeVector, containerRelationMap); finalNodeVector = createHierarchy(containerRelationMap, treeNodeVector); } return finalNodeVector; } /** * Creates the hierarchy of the tree nodes of the container according to the * container relationship map. * * @param containerRelationMap * the container relationship map. * @param treeNodeVector * the vector of tree nodes. * @return the hierarchy of the tree nodes of the container according to the * container relationship map. * @throws DAOException */ private Vector createHierarchy(Map containerRelationMap, Vector treeNodeVector) throws DAOException { // Get the ket set of the parent containers. Set keySet = containerRelationMap.keySet(); Iterator iterator = keySet.iterator(); while (iterator.hasNext()) { // Get the parent container id and create the tree node. Long parentId = (Long) iterator.next(); StorageContainerTreeNode parentTreeNodeImpl = new StorageContainerTreeNode( parentId, null, null); parentTreeNodeImpl = (StorageContainerTreeNode) treeNodeVector .get(treeNodeVector.indexOf(parentTreeNodeImpl)); // Get the child container ids and create the tree nodes. List childNodeList = (List) containerRelationMap.get(parentId); Iterator childIterator = childNodeList.iterator(); while (childIterator.hasNext()) { Long childId = (Long) childIterator.next(); StorageContainerTreeNode childTreeNodeImpl = new StorageContainerTreeNode( childId, null, null); childTreeNodeImpl = (StorageContainerTreeNode) treeNodeVector .get(treeNodeVector.indexOf(childTreeNodeImpl)); // Set the relationship between the parent and child tree nodes. childTreeNodeImpl.setParentNode(parentTreeNodeImpl); parentTreeNodeImpl.getChildNodes().add(childTreeNodeImpl); } // for sorting Vector tempChildNodeList = parentTreeNodeImpl.getChildNodes(); parentTreeNodeImpl.setChildNodes(tempChildNodeList); } // Get the container whose tree node has parent null // and get its site tree node and set it as its child. Vector parentNodeVector = new Vector(); iterator = treeNodeVector.iterator(); // System.out.println("\nNodes without Parent\n"); while (iterator.hasNext()) { StorageContainerTreeNode treeNodeImpl = (StorageContainerTreeNode) iterator .next(); if (treeNodeImpl.getParentNode() == null) { // System.out.print("\n" + treeNodeImpl); TreeNodeImpl siteNode = getSiteTreeNode(treeNodeImpl .getIdentifier()); // System.out.print("\tSiteNodecreated: " + siteNode); if (parentNodeVector.contains(siteNode)) { siteNode = (TreeNodeImpl) parentNodeVector .get(parentNodeVector.indexOf(siteNode)); // System.out.print("SiteNode Found"); } else { parentNodeVector.add(siteNode); // System.out.print("\tSiteNodeSet: " + siteNode); } treeNodeImpl.setParentNode(siteNode); siteNode.getChildNodes().add(treeNodeImpl); // for sorting Vector tempChildNodeList = siteNode.getChildNodes(); siteNode.setChildNodes(tempChildNodeList); } } // Get the containers under site. Vector containersUnderSite = getContainersUnderSite(); containersUnderSite.removeAll(parentNodeVector); parentNodeVector.addAll(containersUnderSite); Utility.sortTreeVector(parentNodeVector); return parentNodeVector; } private Vector getContainersUnderSite() throws DAOException { // String sql = " SELECT sc.IDENTIFIER, sc.CONTAINER_NAME, scType.TYPE, // site.IDENTIFIER, site.NAME, site.TYPE " // + " from catissue_storage_container sc, catissue_site site, // catissue_storage_type scType " // + " where sc.SITE_ID = site.IDENTIFIER AND sc.STORAGE_TYPE_ID = // scType.IDENTIFIER " // + " and sc.PARENT_CONTAINER_ID is NULL"; String sql = "SELECT sc.IDENTIFIER, cn.NAME, scType.NAME, site.IDENTIFIER," + "site.NAME, site.TYPE from catissue_storage_container sc, " + "catissue_site site, catissue_container_type scType, " + "catissue_container cn where sc.SITE_ID = site.IDENTIFIER " + "AND sc.STORAGE_TYPE_ID = scType.IDENTIFIER " + "and sc.IDENTIFIER = cn.IDENTIFIER " + "and cn.IDENTIFIER not in (select pos.CONTAINER_ID from catissue_container_position pos)"; JDBCDAO dao = (JDBCDAO) DAOFactory.getInstance().getDAO( Constants.JDBC_DAO); List resultList = new ArrayList(); Vector containerNodeVector = new Vector(); try { dao.openSession(null); resultList = dao.executeQuery(sql, null, false, null); dao.closeSession(); // System.out.println("\nIn getContainersUnderSite()\n "); printRecords(resultList); } catch (Exception daoExp) { throw new DAOException(daoExp.getMessage(), daoExp); } Iterator iterator = resultList.iterator(); while (iterator.hasNext()) { List rowList = (List) iterator.next(); StorageContainerTreeNode containerNode = new StorageContainerTreeNode( Long.valueOf((String) rowList.get(0)), (String) rowList .get(1), (String) rowList.get(1)); StorageContainerTreeNode siteNode = new StorageContainerTreeNode( Long.valueOf((String) rowList.get(3)), (String) rowList .get(4), (String) rowList.get(4)); if (containerNodeVector.contains(siteNode)) { siteNode = (StorageContainerTreeNode) containerNodeVector .get(containerNodeVector.indexOf(siteNode)); } else containerNodeVector.add(siteNode); containerNode.setParentNode(siteNode); siteNode.getChildNodes().add(containerNode); } return containerNodeVector; } /** * Returns the site tree node of the container with the given identifier. * * @param identifier * the identifier of the container. * @return the site tree node of the container with the given identifier. * @throws DAOException */ private TreeNodeImpl getSiteTreeNode(Long identifier) throws DAOException { String sql = "SELECT site.IDENTIFIER, site.NAME, site.TYPE " + " from catissue_storage_container sc, catissue_site site " + " where sc.SITE_ID = site.IDENTIFIER AND sc.IDENTIFIER = " + identifier.longValue(); Logger.out.debug("Site Query........................." + sql); List resultList = executeSQL(sql); TreeNodeImpl siteTreeNode = null; if (resultList.isEmpty() == false) { List siteRecord = (List) resultList.get(0); siteTreeNode = new StorageContainerTreeNode(Long .valueOf((String) siteRecord.get(0)), (String) siteRecord .get(1), (String) siteRecord.get(1)); } return siteTreeNode; } /** * This method will add all the node into the vector that contains any * container node and add a dummy container node to show [+] sign on the UI, * so that on clicking expand sign ajax call will retrieve child container * node under the site node. */ public Vector getSiteWithDummyContainer(Long userId) throws DAOException { String sql = "SELECT site.IDENTIFIER, site.NAME,COUNT(site.NAME) FROM CATISSUE_SITE " + " site join CATISSUE_STORAGE_CONTAINER sc ON sc.site_id = site.identifier join " + "CATISSUE_CONTAINER con ON con.identifier = sc.identifier WHERE con.ACTIVITY_STATUS!='Disabled' " + "GROUP BY site.IDENTIFIER, site.NAME" +" order by upper(site.NAME)"; JDBCDAO dao = (JDBCDAO) DAOFactory.getInstance().getDAO( Constants.JDBC_DAO); List resultList = new ArrayList(); Long nodeIdentifier; String nodeName = null; String dummyNodeName = null; Vector containerNodeVector = new Vector(); try { dao.openSession(null); resultList = dao.executeQuery(sql, null, false, null); dao.closeSession(); } catch (Exception daoExp) { throw new DAOException(daoExp.getMessage(), daoExp); } Iterator iterator = resultList.iterator(); Set<Long> siteIdSet = new UserBizLogic().getRelatedSiteIds(userId); while (iterator.hasNext()) { List rowList = (List) iterator.next(); nodeIdentifier = Long.valueOf((String) rowList.get(0)); if (hasPrivilegeonSite(siteIdSet, nodeIdentifier)) { nodeName = (String) rowList.get(1); dummyNodeName = Constants.DUMMY_NODE_NAME; StorageContainerTreeNode siteNode = new StorageContainerTreeNode( nodeIdentifier, nodeName, nodeName); StorageContainerTreeNode dummyContainerNode = new StorageContainerTreeNode( nodeIdentifier, dummyNodeName, dummyNodeName); dummyContainerNode.setParentNode(siteNode); siteNode.getChildNodes().add(dummyContainerNode); containerNodeVector.add(siteNode); } } return containerNodeVector; } /** * @param identifier * Identifier of the container or site node * @param nodeName * Name of the site or container * @param parentId * parent identifier of the selected node * @return containerNodeVector This vector contains all the containers * @throws DAOException * @Description This method will retrieve all the containers under the * selected node */ public Vector<StorageContainerTreeNode> getStorageContainers( Long identifier, String nodeName, String parentId) throws DAOException { String sql = createSql(identifier, parentId); JDBCDAO dao = (JDBCDAO) DAOFactory.getInstance().getDAO( Constants.JDBC_DAO); String dummyNodeName = Constants.DUMMY_NODE_NAME; String containerName = null; Long nodeIdentifier; Long parentContainerId; Long childCount; List resultList = new ArrayList(); Vector<StorageContainerTreeNode> containerNodeVector = new Vector<StorageContainerTreeNode>(); try { dao.openSession(null); resultList = dao.executeQuery(sql, null, false, null); dao.closeSession(); } catch (Exception daoExp) { throw new DAOException(daoExp.getMessage(), daoExp); } Iterator iterator = resultList.iterator(); while (iterator.hasNext()) { List rowList = (List) iterator.next(); nodeIdentifier = Long.valueOf((String) rowList.get(0)); containerName = (String) rowList.get(1); parentContainerId = Long.valueOf((String) rowList.get(2)); childCount = Long.valueOf((String) rowList.get(3)); StorageContainerTreeNode containerNode = new StorageContainerTreeNode( nodeIdentifier, containerName, containerName); StorageContainerTreeNode parneContainerNode = new StorageContainerTreeNode( parentContainerId, nodeName, nodeName); if (childCount != null && childCount > 0) { StorageContainerTreeNode dummyContainerNode = new StorageContainerTreeNode( Long.valueOf((String) rowList.get(0)), dummyNodeName, dummyNodeName); dummyContainerNode.setParentNode(containerNode); containerNode.getChildNodes().add(dummyContainerNode); } if (containerNodeVector.contains(containerNode)) { containerNode = (StorageContainerTreeNode) containerNodeVector .get(containerNodeVector.indexOf(containerNode)); } else { containerNodeVector.add(containerNode); } containerNode.setParentNode(parneContainerNode); parneContainerNode.getChildNodes().add(containerNode); } if (containerNodeVector.isEmpty()) { StorageContainerTreeNode containerNode = new StorageContainerTreeNode( identifier, nodeName, nodeName); containerNodeVector.add(containerNode); } return containerNodeVector; } /** * @param identifier * Identifier of the container or site node * @param parentId * Parent identifier of the selected node * @return String sql This method with return the sql depending on the node * clicked (i.e parent Node or child node) */ private String createSql(Long identifier, String parentId) { String sql; if (!Constants.ZERO_ID.equals(parentId)) { sql = "SELECT cn.IDENTIFIER, cn.name, pos.PARENT_CONTAINER_ID,count(sc3.IDENTIFIER) " + "FROM CATISSUE_CONTAINER cn join CATISSUE_STORAGE_CONTAINER sc ON sc.IDENTIFIER=cn.IDENTIFIER " + "left outer join catissue_container_position pos on pos.CONTAINER_ID=cn.IDENTIFIER left outer join " + "catissue_container_position con_pos on con_pos.PARENT_CONTAINER_ID=cn.IDENTIFIER left outer join " + "CATISSUE_STORAGE_CONTAINER sc3 on con_pos.CONTAINER_ID=sc3.IDENTIFIER " + "WHERE pos.PARENT_CONTAINER_ID= " + identifier + " AND cn.ACTIVITY_STATUS!='Disabled' GROUP BY cn.IDENTIFIER, cn.NAME,pos.PARENT_CONTAINER_ID"; } else { sql = "SELECT cn.IDENTIFIER, cn.NAME,site.identifier,COUNT(sc3.IDENTIFIER) " + "FROM CATISSUE_CONTAINER cn join CATISSUE_STORAGE_CONTAINER sc " + "ON sc.IDENTIFIER=cn.IDENTIFIER join CATISSUE_SITE site " + "ON sc.site_id = site.identifier left outer join CATISSUE_CONTAINER_POSITION pos " + "ON pos.PARENT_CONTAINER_ID=cn.IDENTIFIER left outer join " + "CATISSUE_STORAGE_CONTAINER sc3 ON pos.CONTAINER_ID=sc3.IDENTIFIER " + "WHERE site.identifier=" + identifier + " AND cn.ACTIVITY_STATUS!='Disabled' AND cn.IDENTIFIER NOT IN (SELECT p2.CONTAINER_ID FROM CATISSUE_CONTAINER_POSITION p2) " + "GROUP BY cn.IDENTIFIER, cn.NAME,site.identifier "; } return sql; } private boolean[][] getStorageContainerFullStatus(DAO dao, StorageContainer parentContainer, Collection children) throws DAOException { // List list = dao.retrieve(StorageContainer.class.getName(), "id", id); boolean[][] fullStatus = null; Integer oneDimensionCapacity = parentContainer.getCapacity() .getOneDimensionCapacity(); Integer twoDimensionCapacity = parentContainer.getCapacity() .getTwoDimensionCapacity(); fullStatus = new boolean[oneDimensionCapacity.intValue() + 1][twoDimensionCapacity .intValue() + 1]; // Collection children = StorageContainerUtil.getChildren(dao, // storageContainer.getId()); if (children != null) { Iterator iterator = children.iterator(); Logger.out .debug("storageContainer.getChildrenContainerCollection().size(): " + children.size()); while (iterator.hasNext()) { StorageContainer childStorageContainer = (StorageContainer) iterator .next(); if (childStorageContainer.getLocatedAtPosition() != null) { Integer positionDimensionOne = childStorageContainer .getLocatedAtPosition().getPositionDimensionOne(); Integer positionDimensionTwo = childStorageContainer .getLocatedAtPosition().getPositionDimensionTwo(); Logger.out.debug("positionDimensionOne : " + positionDimensionOne.intValue()); Logger.out.debug("positionDimensionTwo : " + positionDimensionTwo.intValue()); fullStatus[positionDimensionOne.intValue()][positionDimensionTwo .intValue()] = true; } } } return fullStatus; } /** * @param containerId * @return * @throws DAOException */ public Collection getContainerChildren(Long containerId) throws DAOException { AbstractDAO dao = DAOFactory.getInstance().getDAO( Constants.HIBERNATE_DAO); Collection<Container> children = null; try { dao.openSession(null); children = StorageContainerUtil.getChildren(dao, containerId); } catch (DAOException daoExp) { daoExp.printStackTrace(); Logger.out.error(daoExp.getMessage(), daoExp); } finally { dao.closeSession(); } return children; } private void disableSubStorageContainer(DAO dao, SessionDataBean sessionDataBean, List<StorageContainer> disabledContainerList) throws DAOException, UserNotAuthorizedException { // adding updated participantMap to cache // catissueCoreCacheManager.addObjectToCache(Constants.MAP_OF_PARTICIPANTS, // participantMap); int count = disabledContainerList.size(); List containerIdList = new ArrayList(); for (int i = 0; i < count; i++) { StorageContainer container = disabledContainerList.get(i); containerIdList.add(container.getId()); } List listOfSpecimenIDs = getRelatedObjects(dao, Specimen.class, "specimenPosition.storageContainer", Utility .toLongArray(containerIdList)); if (!listOfSpecimenIDs.isEmpty()) { throw new DAOException(ApplicationProperties .getValue("errors.container.contains.specimen")); } // Uodate containers to disabled for (int i = 0; i < count; i++) { StorageContainer container = disabledContainerList.get(i); dao.update(container, sessionDataBean, true, true, false); } auditDisabledObjects(dao, "CATISSUE_CONTAINER", containerIdList); } private void disableSubStorageContainer(DAO dao, SessionDataBean sessionDataBean, Long storageContainerIDArr[]) throws DAOException, UserNotAuthorizedException { // adding updated participantMap to cache // catissueCoreCacheManager.addObjectToCache(Constants.MAP_OF_PARTICIPANTS, // participantMap); List listOfSpecimenIDs = getRelatedObjects(dao, Specimen.class, "specimenPosition.storageContainer", storageContainerIDArr); if (!listOfSpecimenIDs.isEmpty()) { throw new DAOException(ApplicationProperties .getValue("errors.container.contains.specimen")); } List listOfSubStorageContainerId = super.disableObjects(dao, Container.class, "locatedAtPosition.parentContainer", "CATISSUE_CONTAINER", "PARENT_CONTAINER_ID", storageContainerIDArr); if (listOfSubStorageContainerId.isEmpty()) { return; } else { Iterator itr = listOfSubStorageContainerId.iterator(); while (itr.hasNext()) { Long contId = (Long) itr.next(); String sourceObjectName = StorageContainer.class.getName(); Object object = dao.retrieve(sourceObjectName, contId); if (object != null) { StorageContainer cont = (StorageContainer) object; // cont.setParent(null); cont.setLocatedAtPosition(null); // dao.update(cont, sessionDataBean, true, true, false); } } } disableSubStorageContainer(dao, sessionDataBean, Utility .toLongArray(listOfSubStorageContainerId)); } // Checks for whether the user is trying to use a container without // privilege to use it // This is needed since now users can enter the values in the edit box public boolean validateContainerAccess(DAO dao, StorageContainer container, SessionDataBean sessionDataBean) throws SMException { Logger.out.debug("validateContainerAccess.................."); String userName = sessionDataBean.getUserName(); if(sessionDataBean != null && sessionDataBean.isAdmin()) { return true; } // To get privilegeCache through // Singleton instance of PrivilegeManager, requires User LoginName // PrivilegeManager privilegeManager = PrivilegeManager.getInstance(); // PrivilegeCache privilegeCache = // privilegeManager.getPrivilegeCache(userName); // Implemented as per the requirements of MSR. User should use only // those sites for which he has access to. Long userId = sessionDataBean.getUserId(); Site site = null; Set loggedInUserSiteIdSet = null; try { site = getSite(dao, container.getId()); loggedInUserSiteIdSet = new UserBizLogic().getRelatedSiteIds(userId); if(dao instanceof HibernateDAO) { ((HibernateDAO)dao).openSession(null); } } catch (DAOException e) { return false; } finally { // try { // //dao.closeSession(); // } catch (DAOException e) { // // TODO Auto-generated catch block // e.printStackTrace(); // } } if (loggedInUserSiteIdSet != null && loggedInUserSiteIdSet.contains(new Long(site.getId()))) { return true; } else { return false; } // if // (!SecurityManager.getInstance(this.getClass()).isAuthorized(userName, // StorageContainer.class.getName() + "_" + container.getId(), // Permissions.USE)) // Call to SecurityManager.isAuthorized bypassed & // instead, call redirected to privilegeCache.hasPrivilege // Commented by Ravindra and Vishvesh because this is not how // if (!privilegeCache.hasPrivilege(StorageContainer.class.getName() + // "_" + container.getId(), Permissions.USE)) // { // return false; // } // else // return true; } // Checks for whether the user is trying to place the container in a // position // outside the range of parent container // This is needed since now users can enter the values in the edit box protected boolean validatePosition(StorageContainer parent, StorageContainer current) { int posOneCapacity = parent.getCapacity().getOneDimensionCapacity() .intValue(); int posTwoCapacity = parent.getCapacity().getTwoDimensionCapacity() .intValue(); int positionDimensionOne = current.getLocatedAtPosition() .getPositionDimensionOne().intValue(); int positionDimensionTwo = current.getLocatedAtPosition() .getPositionDimensionTwo().intValue(); Logger.out.debug("validatePosition C : " + positionDimensionOne + " : " + positionDimensionTwo); Logger.out.debug("validatePosition P : " + posOneCapacity + " : " + posTwoCapacity); if ((positionDimensionOne > posOneCapacity) || (positionDimensionTwo > posTwoCapacity)) { Logger.out.debug("validatePosition false"); return false; } Logger.out.debug("validatePosition true"); return true; } private boolean validatePosition(int posOneCapacity, int posTwoCapacity, StorageContainer current) { int positionDimensionOne = current.getLocatedAtPosition() .getPositionDimensionOne().intValue(); int positionDimensionTwo = current.getLocatedAtPosition() .getPositionDimensionTwo().intValue(); Logger.out.debug("validatePosition C : " + positionDimensionOne + " : " + positionDimensionTwo); Logger.out.debug("validatePosition P : " + posOneCapacity + " : " + posTwoCapacity); if ((positionDimensionOne > posOneCapacity) || (positionDimensionTwo > posTwoCapacity)) { Logger.out.debug("validatePosition false"); return false; } Logger.out.debug("validatePosition true"); return true; } /** * Bug ID: 4038 Patch ID: 4038_2 See also: 1-3 */ /** * This method is to validae position based on parent container id * * @param dao * Object DAO * @param container * current container * @return boolean value based on validation * @throws DAOException * exception occured while DB handling */ private boolean validatePosition(DAO dao, StorageContainer container) throws DAOException { String sourceObjectName = StorageContainer.class.getName(); String[] selectColumnName = { "id", "capacity.oneDimensionCapacity", "capacity.twoDimensionCapacity" }; String[] whereColumnName = { "id" }; // "storageContainer."+Constants.SYSTEM_IDENTIFIER String[] whereColumnCondition = { "=" }; Object[] whereColumnValue = { container.getLocatedAtPosition() .getParentContainer().getId() }; String joinCondition = null; List list = dao.retrieve(sourceObjectName, selectColumnName, whereColumnName, whereColumnCondition, whereColumnValue, joinCondition); Integer pcCapacityOne = 0; Integer pcCapacityTwo = 0; if (!list.isEmpty()) { Object[] obj1 = (Object[]) list.get(0); pcCapacityOne = (Integer) obj1[1]; pcCapacityTwo = (Integer) obj1[2]; } int positionDimensionOne = container.getLocatedAtPosition() .getPositionDimensionOne().intValue(); int positionDimensionTwo = container.getLocatedAtPosition() .getPositionDimensionTwo().intValue(); Logger.out.debug("validatePosition C : " + positionDimensionOne + " : " + positionDimensionTwo); Logger.out.debug("validatePosition P : " + pcCapacityOne + " : " + pcCapacityTwo); if ((positionDimensionOne > pcCapacityOne) || (positionDimensionTwo > pcCapacityTwo)) { Logger.out.debug("validatePosition false"); return false; } Logger.out.debug("validatePosition true"); return true; } private boolean isContainerAvailableForDisabled(DAO dao, Long[] containerIds) { List containerList = new ArrayList(); if (containerIds.length != 0) { try { String sourceObjectName = Specimen.class.getName(); String[] selectColumnName = { "id" }; String[] whereColumnName1 = { "specimenPosition.storageContainer.id" }; // "storageContainer."+Constants.SYSTEM_IDENTIFIER String[] whereColumnCondition1 = { "in" }; Object[] whereColumnValue1 = { containerIds }; String joinCondition = Constants.AND_JOIN_CONDITION; List list = dao.retrieve(sourceObjectName, selectColumnName, whereColumnName1, whereColumnCondition1, whereColumnValue1, joinCondition); // check if Specimen exists with the given storageContainer // information if (list.size() != 0) { Object obj = list.get(0); return false; } else { sourceObjectName = SpecimenArray.class.getName(); whereColumnName1[0] = "locatedAtPosition.parentContainer.id"; list = dao.retrieve(sourceObjectName, selectColumnName, whereColumnName1, whereColumnCondition1, whereColumnValue1, joinCondition); // check if Specimen exists with the given storageContainer // information if (list.size() != 0) { return false; } /* * else { sourceObjectName = * StorageContainer.class.getName(); String[] * whereColumnName = {"parent.id"}; containerList = * dao.retrieve(sourceObjectName, selectColumnName, * whereColumnName, whereColumnCondition1, * whereColumnValue1, joinCondition); * } */ } } catch (Exception e) { Logger.out.debug("Error in isContainerAvailable : " + e); return false; } } else { return true; } return isContainerAvailableForDisabled(dao, Utility .toLongArray(containerList)); } // -- to check if storageContainer is available or used protected boolean isContainerAvailableForPositions(DAO dao, StorageContainer current) { try { String sourceObjectName = StorageContainer.class.getName(); String[] selectColumnName = { "id" }; String[] whereColumnName = { "locatedAtPosition.positionDimensionOne", "locatedAtPosition.positionDimensionTwo", "locatedAtPosition.parentContainer" }; // "storageContainer."+Constants.SYSTEM_IDENTIFIER String[] whereColumnCondition = { "=", "=", "=" }; Object[] whereColumnValue = { current.getLocatedAtPosition().getPositionDimensionOne(), current.getLocatedAtPosition().getPositionDimensionTwo(), current.getLocatedAtPosition().getParentContainer() }; String joinCondition = Constants.AND_JOIN_CONDITION; List list = dao.retrieve(sourceObjectName, selectColumnName, whereColumnName, whereColumnCondition, whereColumnValue, joinCondition); Logger.out.debug("current.getParentContainer() :" + current.getLocatedAtPosition().getParentContainer()); // check if StorageContainer exists with the given storageContainer // information if (list.size() != 0) { Object obj = list.get(0); Logger.out .debug("**********IN isContainerAvailable : obj::::::: --------- " + obj); return false; } else { sourceObjectName = Specimen.class.getName(); String[] whereColumnName1 = { "specimenPosition.positionDimensionOne", "specimenPosition.positionDimensionTwo", "specimenPosition.storageContainer.id" }; // "storageContainer."+Constants.SYSTEM_IDENTIFIER String[] whereColumnCondition1 = { "=", "=", "=" }; Object[] whereColumnValue1 = { current.getLocatedAtPosition() .getPositionDimensionOne(), current.getLocatedAtPosition() .getPositionDimensionTwo(), current.getLocatedAtPosition().getParentContainer() .getId() }; list = dao.retrieve(sourceObjectName, selectColumnName, whereColumnName1, whereColumnCondition1, whereColumnValue1, joinCondition); // check if Specimen exists with the given storageContainer // information if (list.size() != 0) { Object obj = list.get(0); Logger.out .debug("**************IN isPositionAvailable : obj::::::: --------------- " + obj); return false; } else { sourceObjectName = SpecimenArray.class.getName(); whereColumnName1[0] = "locatedAtPosition.positionDimensionOne"; whereColumnName1[1] = "locatedAtPosition.positionDimensionTwo"; whereColumnName1[2] = "locatedAtPosition.parentContainer.id"; list = dao.retrieve(sourceObjectName, selectColumnName, whereColumnName1, whereColumnCondition1, whereColumnValue1, joinCondition); // check if Specimen exists with the given storageContainer // information if (list.size() != 0) { Object obj = list.get(0); Logger.out .debug("**************IN isPositionAvailable : obj::::::: --------------- " + obj); return false; } } } return true; } catch (Exception e) { Logger.out.debug("Error in isContainerAvailable : " + e); return false; } } // Will check only for valid range of the StorageContainer protected boolean validatePosition(StorageContainer storageContainer, String posOne, String posTwo) { try { Logger.out .debug("storageContainer.getCapacity().getOneDimensionCapacity() : " + storageContainer.getCapacity() .getOneDimensionCapacity()); Logger.out .debug("storageContainer.getCapacity().getTwoDimensionCapacity() : " + storageContainer.getCapacity() .getTwoDimensionCapacity()); int oneDimensionCapacity = (storageContainer.getCapacity() .getOneDimensionCapacity() != null ? storageContainer .getCapacity().getOneDimensionCapacity().intValue() : -1); int twoDimensionCapacity = (storageContainer.getCapacity() .getTwoDimensionCapacity() != null ? storageContainer .getCapacity().getTwoDimensionCapacity().intValue() : -1); if (((oneDimensionCapacity) < Integer.parseInt(posOne)) || ((twoDimensionCapacity) < Integer.parseInt(posTwo))) { return false; } return true; } catch (Exception e) { Logger.out.debug("Error in validatePosition : " + e); return false; } } // Will check only for Position is used or not. protected boolean isPositionAvailable(DAO dao, StorageContainer storageContainer, String posOne, String posTwo) { try { String sourceObjectName = Specimen.class.getName(); String[] selectColumnName = { "id" }; String[] whereColumnName = { "specimenPosition.positionDimensionOne", "specimenPosition.positionDimensionTwo", "specimenPosition.storageContainer.id" }; // "storageContainer."+Constants.SYSTEM_IDENTIFIER String[] whereColumnCondition = { "=", "=", "=" }; Object[] whereColumnValue = { Integer.valueOf(posOne), Integer.valueOf(posTwo), storageContainer.getId() }; String joinCondition = Constants.AND_JOIN_CONDITION; List list = dao.retrieve(sourceObjectName, selectColumnName, whereColumnName, whereColumnCondition, whereColumnValue, joinCondition); Logger.out.debug("storageContainer.getId() :" + storageContainer.getId()); // check if Specimen exists with the given storageContainer // information if (list.size() != 0) { Object obj = list.get(0); Logger.out .debug("**************IN isPositionAvailable : obj::::::: --------------- " + obj); // Logger.out.debug((Long)obj[0] ); // Logger.out.debug((Integer)obj[1]); // Logger.out.debug((Integer )obj[2]); return false; } else { sourceObjectName = StorageContainer.class.getName(); String[] whereColumnName1 = { "locatedAtPosition.positionDimensionOne", "locatedAtPosition.positionDimensionTwo", "locatedAtPosition.parentContainer.id" }; // "storageContainer."+Constants.SYSTEM_IDENTIFIER String[] whereColumnCondition1 = { "=", "=", "=" }; Object[] whereColumnValue1 = { Integer.valueOf(posOne), Integer.valueOf(posTwo), storageContainer.getId() }; list = dao.retrieve(sourceObjectName, selectColumnName, whereColumnName1, whereColumnCondition1, whereColumnValue1, joinCondition); Logger.out.debug("storageContainer.getId() :" + storageContainer.getId()); // check if Specimen exists with the given storageContainer // information if (list.size() != 0) { Object obj = list.get(0); Logger.out .debug("**********IN isPositionAvailable : obj::::: --------- " + obj); return false; } else { sourceObjectName = SpecimenArray.class.getName(); String[] whereColumnName2 = { "locatedAtPosition.positionDimensionOne", "locatedAtPosition.positionDimensionTwo", "locatedAtPosition.parentContainer.id" }; String[] whereColumnCondition2 = { "=", "=", "=" }; Object[] whereColumnValue2 = { Integer.valueOf(posOne), Integer.valueOf(posTwo), storageContainer.getId() }; list = dao.retrieve(sourceObjectName, selectColumnName, whereColumnName2, whereColumnCondition2, whereColumnValue2, joinCondition); Logger.out.debug("storageContainer.getId() :" + storageContainer.getId()); // check if Specimen exists with the given storageContainer // information if (list.size() != 0) { Object obj = list.get(0); Logger.out .debug("**********IN isPositionAvailable : obj::::: --------- " + obj); return false; } } } return true; } catch (Exception e) { Logger.out.debug("Error in isPositionAvailable : " + e); return false; } } // -- storage container validation for specimen public void checkContainer(DAO dao, String storageContainerID, String positionOne, String positionTwo, SessionDataBean sessionDataBean, boolean multipleSpecimen) throws DAOException, SMException { // List list = dao.retrieve(StorageContainer.class.getName(), // "id",storageContainerID ); String sourceObjectName = StorageContainer.class.getName(); String[] selectColumnName = { Constants.SYSTEM_IDENTIFIER, "capacity.oneDimensionCapacity", "capacity.twoDimensionCapacity", "name" }; String[] whereColumnName = { Constants.SYSTEM_IDENTIFIER }; String[] whereColumnCondition = { "=" }; Object[] whereColumnValue = { Long.valueOf(storageContainerID) }; String joinCondition = Constants.AND_JOIN_CONDITION; List list = dao.retrieve(sourceObjectName, selectColumnName, whereColumnName, whereColumnCondition, whereColumnValue, joinCondition); // check if StorageContainer exists with the given ID if (list.size() != 0) { Object[] obj = (Object[]) list.get(0); Logger.out .debug("**********SC found for given ID ****obj::::::: --------------- " + obj); Logger.out.debug((Long) obj[0]); Logger.out.debug((Integer) obj[1]); Logger.out.debug((Integer) obj[2]); Logger.out.debug((String) obj[3]); StorageContainer pc = new StorageContainer(); pc.setId((Long) obj[0]); pc.setName((String) obj[3]); Capacity cntPos = new Capacity(); if (obj[1] != null && obj[2] != null) { cntPos.setOneDimensionCapacity((Integer) obj[1]); cntPos.setTwoDimensionCapacity((Integer) obj[2]); pc.setCapacity(cntPos); } // check if user has privilege to use the container boolean hasAccess = validateContainerAccess(dao,pc, sessionDataBean); Logger.out.debug("hasAccess..............." + hasAccess); if (!hasAccess) { throw new DAOException(ApplicationProperties .getValue("access.use.object.denied")); } // check for closed Container checkStatus(dao, pc, "Storage Container"); /** * Name : kalpana thakur Reviewer Name : Vaishali Bug ID: 4922 * Description:Storage container will not be added to closed site * :check for closed site */ checkClosedSite(dao, pc.getId(), "Container Site"); // check for valid position boolean isValidPosition = validatePosition(pc, positionOne, positionTwo); Logger.out.debug("isValidPosition : " + isValidPosition); boolean canUsePosition = false; if (isValidPosition) // if position is valid { /* * try { */ canUsePosition = isPositionAvailable(dao, pc, positionOne, positionTwo); /* * } catch (Exception e) { * * e.printStackTrace(); } */ /* * try { canUsePosition = * StorageContainerUtil.isPostionAvaialble(pc.getId().toString(), * pc.getName(), positionOne, positionTwo); } catch * (CacheException e) { // TODO Auto-generated catch block * e.printStackTrace(); } */ Logger.out.debug("canUsePosition : " + canUsePosition); if (canUsePosition) // position empty. can be used { } else // position already in use { if (multipleSpecimen) { throw new DAOException( ApplicationProperties .getValue("errors.storageContainer.Multiple.inUse")); } else { throw new DAOException(ApplicationProperties .getValue("errors.storageContainer.inUse")); } } } else // position is invalid { throw new DAOException(ApplicationProperties .getValue("errors.storageContainer.dimensionOverflow")); } } else // storageContainer does not exist { throw new DAOException(ApplicationProperties .getValue("errors.storageContainerExist")); } } /* * (non-Javadoc) * * @see edu.wustl.catissuecore.bizlogic.TreeDataInterface#getTreeViewData(edu.wustl.common.beans.SessionDataBean, * java.util.Map) */ public Vector getTreeViewData(SessionDataBean sessionData, Map map, List list) throws DAOException { return null; } /** * Overriding the parent class's method to validate the enumerated attribute * values */ protected boolean validate(Object obj, DAO dao, String operation) throws DAOException { StorageContainer container = (StorageContainer) obj; /** * Start: Change for API Search --- Jitendra 06/10/2006 In Case of Api * Search, default values will not get set for the object since * setAllValues() method of domainObject will not get called. To avoid * null pointer exception, we are setting the default values same we * were setting in setAllValues() method of domainObject. */ ApiSearchUtil.setContainerDefault(container); // End:- Change for API Search String message = ""; if (container == null) throw new DAOException("domain.object.null.err.msg"); Validator validator = new Validator(); if (container.getStorageType() == null) { message = ApplicationProperties.getValue("storageContainer.type"); throw new DAOException(ApplicationProperties.getValue( "errors.item.required", message)); } if (container.getNoOfContainers() == null) { Integer conts = new Integer(1); container.setNoOfContainers(conts); } if (validator.isEmpty(container.getNoOfContainers().toString())) { message = ApplicationProperties .getValue("storageContainer.noOfContainers"); throw new DAOException(ApplicationProperties.getValue( "errors.item.required", message)); } if (!validator.isNumeric(container.getNoOfContainers().toString(), 1)) { message = ApplicationProperties .getValue("storageContainer.noOfContainers"); throw new DAOException(ApplicationProperties.getValue( "errors.item.format", message)); } if (container.getLocatedAtPosition() != null && container.getLocatedAtPosition().getParentContainer() == null) { if (container.getSite() == null || container.getSite().getId() == null || container.getSite().getId() <= 0) { message = ApplicationProperties .getValue("storageContainer.site"); throw new DAOException(ApplicationProperties.getValue( "errors.item.invalid", message)); } } /* * if * (!validator.isNumeric(String.valueOf(container.getPositionDimensionOne()), * 1) || * !validator.isNumeric(String.valueOf(container.getPositionDimensionTwo()), * 1) || * !validator.isNumeric(String.valueOf(container.getParent().getId()), * 1)) { message = * ApplicationProperties.getValue("storageContainer.parentContainer"); * throw new * DAOException(ApplicationProperties.getValue("errors.item.format", * message)); } */ // validations for Container name // by falguni /* * if (validator.isEmpty(container.getName())) { message = * ApplicationProperties.getValue("storageContainer.name"); throw new * DAOException(ApplicationProperties.getValue("errors.item.required", * message)); } */ // validations for temperature if (container.getTempratureInCentigrade() != null && !validator.isEmpty(container.getTempratureInCentigrade() .toString()) && (!validator.isDouble(container.getTempratureInCentigrade() .toString(), false))) { message = ApplicationProperties .getValue("storageContainer.temperature"); throw new DAOException(ApplicationProperties.getValue( "errors.item.format", message)); } if (container.getLocatedAtPosition() != null && container.getLocatedAtPosition().getParentContainer() != null) { if (container.getLocatedAtPosition().getParentContainer().getId() == null) { String sourceObjectName = StorageContainer.class.getName(); String[] selectColumnName = { "id" }; String[] whereColumnName = { "name" }; String[] whereColumnCondition = { "=" }; Object[] whereColumnValue = { container.getLocatedAtPosition() .getParentContainer().getName() }; String joinCondition = null; List list = dao.retrieve(sourceObjectName, selectColumnName, whereColumnName, whereColumnCondition, whereColumnValue, joinCondition); if (!list.isEmpty()) { container.getLocatedAtPosition().getParentContainer() .setId((Long) list.get(0)); } else { String message1 = ApplicationProperties .getValue("specimen.storageContainer"); throw new DAOException(ApplicationProperties.getValue( "errors.invalid", message1)); } } // Long storageContainerId = specimen.getStorageContainer().getId(); Integer xPos = container.getLocatedAtPosition() .getPositionDimensionOne(); Integer yPos = container.getLocatedAtPosition() .getPositionDimensionTwo(); boolean isContainerFull = false; /** * Following code is added to set the x and y dimension in case only * storage container is given and x and y positions are not given */ if (xPos == null || yPos == null) { isContainerFull = true; Map containerMapFromCache = null; try { containerMapFromCache = (TreeMap) StorageContainerUtil .getContainerMapFromCache(); } catch (CacheException e) { e.printStackTrace(); } if (containerMapFromCache != null) { Iterator itr = containerMapFromCache.keySet().iterator(); while (itr.hasNext()) { NameValueBean nvb = (NameValueBean) itr.next(); if (container.getLocatedAtPosition() != null && container.getLocatedAtPosition() .getParentContainer() != null && nvb.getValue().toString().equals( container.getLocatedAtPosition() .getParentContainer().getId() .toString())) { Map tempMap = (Map) containerMapFromCache.get(nvb); Iterator tempIterator = tempMap.keySet().iterator(); ; NameValueBean nvb1 = (NameValueBean) tempIterator .next(); List list = (List) tempMap.get(nvb1); NameValueBean nvb2 = (NameValueBean) list.get(0); ContainerPosition cntPos = container .getLocatedAtPosition(); cntPos.setPositionDimensionOne(new Integer(nvb1 .getValue())); cntPos.setPositionDimensionTwo(new Integer(nvb2 .getValue())); cntPos.setOccupiedContainer(container); isContainerFull = false; break; } } } if (isContainerFull) { throw new DAOException( "The Storage Container you specified is full"); } } // VALIDATIONS FOR DIMENSION 1. if (container.getLocatedAtPosition() != null && container.getLocatedAtPosition() .getPositionDimensionOne() != null && validator.isEmpty(String.valueOf(container .getLocatedAtPosition().getPositionDimensionOne()))) { message = ApplicationProperties .getValue("storageContainer.oneDimension"); throw new DAOException(ApplicationProperties.getValue( "errors.item.required", message)); } else { if (container.getLocatedAtPosition() != null && container.getLocatedAtPosition() .getPositionDimensionOne() != null && !validator.isNumeric(String.valueOf(container .getLocatedAtPosition() .getPositionDimensionOne()))) { message = ApplicationProperties .getValue("storageContainer.oneDimension"); throw new DAOException(ApplicationProperties.getValue( "errors.item.format", message)); } } // Validations for dimension 2 if (container.getLocatedAtPosition() != null && container.getLocatedAtPosition() .getPositionDimensionTwo() != null && !validator.isEmpty(String.valueOf(container .getLocatedAtPosition().getPositionDimensionTwo())) && (!validator.isNumeric(String.valueOf(container .getLocatedAtPosition().getPositionDimensionTwo())))) { message = ApplicationProperties .getValue("storageContainer.twoDimension"); throw new DAOException(ApplicationProperties.getValue( "errors.item.format", message)); } } if (operation.equals(Constants.ADD)) { if (!Constants.ACTIVITY_STATUS_ACTIVE.equals(container .getActivityStatus())) { throw new DAOException(ApplicationProperties .getValue("activityStatus.active.errMsg")); } if (container.isFull().booleanValue()) { throw new DAOException(ApplicationProperties .getValue("storageContainer.isContainerFull.errMsg")); } } else { if (!Validator.isEnumeratedValue(Constants.ACTIVITY_STATUS_VALUES, container.getActivityStatus())) { throw new DAOException(ApplicationProperties .getValue("activityStatus.errMsg")); } } return true; } // TODO Write the proper business logic to return an appropriate list of // containers. public List getStorageContainerList() throws DAOException { String sourceObjectName = StorageContainer.class.getName(); String[] displayNameFields = { Constants.SYSTEM_IDENTIFIER }; String valueField = Constants.SYSTEM_IDENTIFIER; List list = getList(sourceObjectName, displayNameFields, valueField, true); return list; } public List getCollectionProtocolList() throws DAOException { String sourceObjectName = CollectionProtocol.class.getName(); List returnList = new ArrayList(); NameValueBean nvb1 = new NameValueBean("--Any--", "-1"); returnList.add(nvb1); List list = retrieve(sourceObjectName); Iterator itr = list.iterator(); while (itr.hasNext()) { CollectionProtocol collectionProtocol = (CollectionProtocol) itr .next(); NameValueBean nvb = new NameValueBean( collectionProtocol.getTitle(), collectionProtocol); returnList.add(nvb); } return returnList; } /** * This functions returns a double dimensional boolean array which tells the * availablity of storage positions of particular container. True - * Available. False - Not Available. * * @param container * The container. * @return Returns a double dimensional boolean array of position * availablity. * @throws DAOException */ public boolean[][] getAvailablePositionsForContainer(String containerId, int dimX, int dimY) throws DAOException { boolean[][] positions = new boolean[dimX][dimY]; // Initializing the array for (int i = 0; i < dimX; i++) { for (int j = 0; j < dimY; j++) { positions[i][j] = true; } } // Retrieving all the occupied positions by child containers String sourceObjectName = StorageContainer.class.getName(); String[] selectColumnName = { "locatedAtPosition.positionDimensionOne", "locatedAtPosition.positionDimensionTwo" }; String[] whereColumnName = { "locatedAtPosition.parentContainer.id" }; String[] whereColumnCondition = { "=" }; Object[] whereColumnValue = { new Long(containerId) }; List list = retrieve(sourceObjectName, selectColumnName, whereColumnName, whereColumnCondition, whereColumnValue, null); // Logger.out.debug("all the occupied positions by child // containers"+list); setPositions(positions, list); // Retrieving all the occupied positions by specimens sourceObjectName = Specimen.class.getName(); whereColumnName[0] = "specimenPosition.storageContainer.id"; selectColumnName[0] = "specimenPosition.positionDimensionOne"; selectColumnName[1] = "specimenPosition.positionDimensionTwo"; list = retrieve(sourceObjectName, selectColumnName, whereColumnName, whereColumnCondition, whereColumnValue, null); setPositions(positions, list); // Retrieving all the occupied positions by specimens array sourceObjectName = SpecimenArray.class.getName(); whereColumnName[0] = "locatedAtPosition.parentContainer.id"; selectColumnName[0] = "locatedAtPosition.positionDimensionOne"; selectColumnName[1] = "locatedAtPosition.positionDimensionTwo"; list = retrieve(sourceObjectName, selectColumnName, whereColumnName, whereColumnCondition, whereColumnValue, null); setPositions(positions, list); return positions; } /** * @param positions * @param list */ private void setPositions(boolean[][] positions, List list) { if (!list.isEmpty()) { int x, y; for (int i = 0; i < list.size(); i++) { Object[] object = (Object[]) list.get(i); x = Integer.parseInt(object[0].toString()); y = Integer.parseInt(object[1].toString()); positions[x][y] = false; } } } /** * This functions returns a double dimensional boolean array which tells the * availablity of storage positions of particular container. True - * Available. False - Not Available. * * @param containerId * The container identifier. * @return Returns a double dimensional boolean array of position * availablity. * @throws DAOException */ // public boolean[][] getAvailablePositions(String containerId) throws // DAOException // { // // List list = retrieve(StorageContainer.class.getName(), // Constants.SYSTEM_IDENTIFIER, new Long(containerId)); // // // // if (list != null) // // { // // StorageContainer container = (StorageContainer) list.get(0); // return getAvailablePositionsForContainer(containerId); // // } // // else // // { // // return new boolean[0][0]; // // } // } /** * This functions returns a map of available rows vs. available columns. * * @param container * The container. * @return Returns a map of available rows vs. available columns. * @throws DAOException */ public Map getAvailablePositionMapForContainer(String containerId, int aliquotCount, String positionDimensionOne, String positionDimensionTwo) throws DAOException { Map map = new TreeMap(); int count = 0; // Logger.out.debug("dimX:"+positionDimensionOne+":dimY:"+positionDimensionTwo); // if (!container.isFull().booleanValue()) // { int dimX = Integer.parseInt(positionDimensionOne) + 1; int dimY = Integer.parseInt(positionDimensionTwo) + 1; boolean[][] availablePosistions = getAvailablePositionsForContainer( containerId, dimX, dimY); for (int x = 1; x < availablePosistions.length; x++) { List list = new ArrayList(); for (int y = 1; y < availablePosistions[x].length; y++) { if (availablePosistions[x][y]) { list.add(new NameValueBean(new Integer(y), new Integer(y))); count++; } } if (!list.isEmpty()) { Integer xObj = new Integer(x); NameValueBean nvb = new NameValueBean(xObj, xObj); map.put(nvb, list); } } // } // Logger.out.info("Map :"+map); if (count < aliquotCount) { return new TreeMap(); } return map; } /** * This functions returns a map of available rows vs. available columns. * * @param containerId * The container identifier. * @return Returns a map of available rows vs. available columns. * @throws DAOException */ // public Map getAvailablePositionMap(String containerId, int aliquotCount) // throws DAOException // { // // List list = retrieve(StorageContainer.class.getName(), // Constants.SYSTEM_IDENTIFIER, new Long(containerId)); // // // // if (list != null) // // { // // StorageContainer container = (StorageContainer) list.get(0); // return getAvailablePositionMapForContainer(containerId, aliquotCount); // // } // // else // // { // // return new TreeMap(); // // } // } /** * This functions returns a map of allocated containers vs. their respective * free locations. * * @return Returns a map of allocated containers vs. their respective free * locations. * @throws DAOException */ public Map getAllocatedContainerMap() throws DAOException { /* * A code snippet inside the commented block should actually be replaced * by the code to get the allocated containers of specific collection * protocol */ // List list = retrieve(StorageContainer.class.getName()); String[] selectColumnName = { Constants.SYSTEM_IDENTIFIER, "name", "capacity.oneDimensionCapacity", "capacity.twoDimensionCapacity" }; List list = retrieve(StorageContainer.class.getName(), selectColumnName); Map containerMap = new TreeMap(); Logger.out.info("===================== list size:" + list.size()); Iterator itr = list.iterator(); while (itr.hasNext()) { Object containerList[] = (Object[]) itr.next(); // Logger.out.info("+++++++++++++++++++++++++++:"+container.getName()+"++++++++++:"+container.getId()); Map positionMap = getAvailablePositionMapForContainer(String .valueOf(containerList[0]), 0, containerList[2].toString(), containerList[3].toString()); if (!positionMap.isEmpty()) { // Logger.out.info("---------"+container.getName()+"------"+container.getId()); NameValueBean nvb = new NameValueBean(containerList[1], containerList[0]); containerMap.put(nvb, positionMap); } } return containerMap; } protected void loadSiteFromContainerId(DAO dao, StorageContainer container) throws DAOException { if (container != null) { Long sysId = container.getId(); Object object = dao.retrieve(StorageContainer.class.getName(), sysId); // System.out.println("siteIdList " + siteIdList); StorageContainer sc = (StorageContainer) object; // System.out.println("siteId " + sc.getSite().getId()); container.setSite(sc.getSite()); } } public TreeMap getAllocatedContaienrMapForContainer(long type_id, String exceedingMaxLimit, String selectedContainerName, SessionDataBean sessionDataBean) throws DAOException { long start = 0; long end = 0; TreeMap containerMap = new TreeMap(); JDBCDAO dao = (JDBCDAO) DAOFactory.getInstance().getDAO( Constants.JDBC_DAO); dao.openSession(null); start = System.currentTimeMillis(); // String queryStr = "SELECT t1.IDENTIFIER, t1.NAME FROM CATISSUE_CONTAINER t1 WHERE " // + "t1.IDENTIFIER IN (SELECT t4.STORAGE_CONTAINER_ID FROM CATISSUE_ST_CONT_ST_TYPE_REL t4 " // + "WHERE t4.STORAGE_TYPE_ID = '" // + type_id // + "' OR t4.STORAGE_TYPE_ID='1') AND " // + "t1.ACTIVITY_STATUS='" // + Constants.ACTIVITY_STATUS_ACTIVE + "' order by IDENTIFIER"; String queryStr = "SELECT t1.IDENTIFIER, t1.NAME FROM CATISSUE_CONTAINER t1 WHERE " + "t1.IDENTIFIER IN (SELECT t4.STORAGE_CONTAINER_ID FROM CATISSUE_ST_CONT_ST_TYPE_REL t4 " + "WHERE t4.STORAGE_TYPE_ID = '" + type_id + "' OR t4.STORAGE_TYPE_ID='1' and t4.STORAGE_CONTAINER_ID not in (select IDENTIFIER from catissue_storage_container where site_id in (select IDENTIFIER from catissue_site s1 where s1.ACTIVITY_STATUS='Closed'))) AND " + "t1.ACTIVITY_STATUS='" + Constants.ACTIVITY_STATUS_ACTIVE + "' order by IDENTIFIER"; Logger.out.debug("Storage Container query......................" + queryStr); List list = new ArrayList(); try { list = dao.executeQuery(queryStr, null, false, null); } catch (Exception ex) { throw new DAOException(ex.getMessage()); } end = System.currentTimeMillis(); System.out.println("Time taken for executing query : " + (end - start)); dao.closeSession(); Map containerMapFromCache = null; Set<Long> siteIds = new UserBizLogic().getRelatedSiteIds(sessionDataBean.getUserId()); try { containerMapFromCache = StorageContainerUtil .getContainerMapFromCache(); } catch (Exception e1) { // TODO Auto-generated catch block e1.printStackTrace(); } boolean flag = true; if (containerMapFromCache != null) { int i = 1; Iterator itr = list.iterator(); while (itr.hasNext()) { List list1 = (List) itr.next(); String Id = (String) list1.get(0); Long siteId = getSiteIdForStorageContainerId(Long.valueOf(Id)); if(!sessionDataBean.isAdmin()) { if(!siteIds.contains(siteId)) { continue; } } String name = (String) list1.get(1); NameValueBean nvb = new NameValueBean(name, Id, new Long(Id)); if (selectedContainerName != null && flag) { if (!name.equalsIgnoreCase(selectedContainerName.trim())) { continue; } flag = false; } try { Map positionMap = (TreeMap) containerMapFromCache.get(nvb); if (positionMap != null && !positionMap.isEmpty()) { Map positionMap1 = deepCopyMap(positionMap); // NameValueBean nvb = new NameValueBean(Name, Id); if (i > containersMaxLimit) { exceedingMaxLimit = "true"; break; } else { containerMap.put(nvb, positionMap1); } i++; } } catch (Exception e) { Logger.out.info("Error while getting map from cache"); e.printStackTrace(); } } } return containerMap; } /* temp function end */ private Long getSiteIdForStorageContainerId(Long scId) { Session session = null; Long siteId = null; try { session = DBUtil.getCleanSession(); StorageContainer sc = (StorageContainer) session.load(StorageContainer.class.getName(), scId); if(sc != null) { Site site = sc.getSite(); siteId = site.getId(); } } catch (BizLogicException e1) { Logger.out.debug(e1.getMessage(), e1); } finally { session.close(); } return siteId; } public TreeMap getAllocatedContaienrMapForSpecimen(long cpId, String specimenClass, int aliquotCount, String exceedingMaxLimit, SessionDataBean sessionData, boolean closeSession) throws DAOException { NameValueBeanRelevanceComparator comparator = new NameValueBeanRelevanceComparator(); Logger.out .debug("method : getAllocatedContaienrMapForSpecimen()---getting containers for specimen--------------"); TreeMap containerMap = new TreeMap(comparator); List list = getRelevantContainerList(cpId, specimenClass, closeSession); Logger.out .debug("getAllocatedContaienrMapForSpecimen()----- Size of list--------:" + list.size()); Map containerMapFromCache = null; try { containerMapFromCache = (TreeMap) StorageContainerUtil .getContainerMapFromCache(); } catch (Exception e1) { // TODO Auto-generated catch block e1.printStackTrace(); } if (containerMapFromCache != null) { int i = 1; int relevenceCounter = 1; Iterator itr = list.iterator(); while (itr.hasNext()) { List list1 = (List) itr.next(); String Id = (String) list1.get(1); String Name = (String) list1.get(2); NameValueBean nvb = new NameValueBean(Name, Id, new Long( relevenceCounter)); Map positionMap = (TreeMap) containerMapFromCache.get(nvb); if (positionMap != null && !positionMap.isEmpty()) { StorageContainer sc = new StorageContainer(); sc.setId(new Long(Id)); boolean hasAccess = true; try { AbstractDAO dao = DAOFactory.getInstance().getDAO(Constants.HIBERNATE_DAO); dao.openSession(null); hasAccess = validateContainerAccess(dao,sc, sessionData,cpId); dao.closeSession(); } catch (SMException sme) { sme.printStackTrace(); throw handleSMException(sme); } if (!hasAccess) continue; if (i > containersMaxLimit) { Logger.out.debug("CONTAINERS_MAX_LIMIT reached"); exceedingMaxLimit = new String("true"); break; } else { if (aliquotCount > 0) { long count = countPositionsInMap(positionMap); if (count >= aliquotCount) { containerMap.put(nvb, positionMap); } } else { containerMap.put(nvb, positionMap); } } i++; } relevenceCounter++; } Logger.out .debug("getAllocatedContaienrMapForSpecimen()----Size of containerMap:" + containerMap.size()); } Logger.out.debug("exceedingMaxLimit----------" + exceedingMaxLimit); return containerMap; } private boolean validateContainerAccess(AbstractDAO dao, StorageContainer sc, SessionDataBean sessionData, long cpId) throws SMException { boolean isValidContainer = validateContainerAccess(dao,sc,sessionData); if(sessionData != null && sessionData.isAdmin()) { return true; } Collection<Site> siteCollection = null; Site site = null; if (isValidContainer) { try { site = getSite(dao, sc.getId()); } catch (DAOException e) { Logger.out.debug(e.getMessage(), e); } siteCollection = new CollectionProtocolBizLogic().getRelatedSites(cpId); if (siteCollection != null) { for(Site site1 : siteCollection) { if(site1.getId().equals(site.getId())) { return true; } } } } return false; } /** * This function gets the list of container in order of there relvance. * * @param cpId * collection protocol Id * @param specimenClass * class of the specimen * @param closeSession * @return list of containers in order of there relevence. * @throws DAOException * @author Vaishali */ public List getRelevantContainerList(long cpId, String specimenClass, boolean closeSession) throws DAOException { List list = new ArrayList(); JDBCDAO dao = (JDBCDAO) DAOFactory.getInstance().getDAO( Constants.JDBC_DAO); dao.openSession(null); String[] queryArray = new String[6]; // category # 1 // Gets all container which stores just specified collection protocol // and specified specimen class String equalToOne = " = 1 "; String greaterThanOne = " > 1 "; String equalToFour = " = 4 "; String notEqualToFour = " !=4 "; String endQry = " and t1.IDENTIFIER = t6.STORAGE_CONTAINER_ID and t1.IDENTIFIER = t7.IDENTIFIER" + " group by t6.STORAGE_CONTAINER_ID, t1.NAME " + " order by co asc "; String cpRestrictionCountQuery = "(select count(*) from CATISSUE_ST_CONT_COLL_PROT_REL t4 where t4.STORAGE_CONTAINER_ID = t1.IDENTIFIER)"; String specimenClassRestrictionQuery = "(select count(*) from CATISSUE_STOR_CONT_SPEC_CLASS t5 where t5.STORAGE_CONTAINER_ID = t1.IDENTIFIER)"; // Vijay main query and default restriction query is updated according // to bug id#8076 String mainQuery = " SELECT count(*) co, t6.STORAGE_CONTAINER_ID, t1.NAME FROM CATISSUE_CONTAINER t1 , CATISSUE_STOR_CONT_SPEC_CLASS t6 , CATISSUE_STORAGE_CONTAINER t7 " + " WHERE t1.IDENTIFIER IN (SELECT t2.STORAGE_CONTAINER_ID" + " FROM CATISSUE_ST_CONT_COLL_PROT_REL t2 WHERE t2.COLLECTION_PROTOCOL_ID = '" + cpId + "')" + " AND t1.ACTIVITY_STATUS='Active'" + " and t1.IDENTIFIER IN (SELECT t3.STORAGE_CONTAINER_ID FROM CATISSUE_STOR_CONT_SPEC_CLASS t3" + " WHERE t3.SPECIMEN_CLASS = '" + specimenClass + "')" + " AND t1.ACTIVITY_STATUS='Active' AND t1.IDENTIFIER=t7.IDENTIFIER AND t7.SITE_ID NOT IN (SELECT IDENTIFIER FROM CATISSUE_SITE WHERE ACTIVITY_STATUS='Closed')"; String defaultRestrictionQuery = " SELECT count(*) co, t6.STORAGE_CONTAINER_ID, t1.NAME FROM CATISSUE_CONTAINER t1 , CATISSUE_STOR_CONT_SPEC_CLASS t6 , CATISSUE_STORAGE_CONTAINER t7 " + " WHERE t1.IDENTIFIER NOT IN (SELECT t2.STORAGE_CONTAINER_ID FROM CATISSUE_ST_CONT_COLL_PROT_REL t2)" + " and t1.IDENTIFIER IN (SELECT t3.STORAGE_CONTAINER_ID FROM CATISSUE_STOR_CONT_SPEC_CLASS t3" + " WHERE t3.SPECIMEN_CLASS = '" + specimenClass + "') " + " AND t1.ACTIVITY_STATUS='Active' AND t7.SITE_ID NOT IN (SELECT IDENTIFIER FROM CATISSUE_SITE WHERE ACTIVITY_STATUS='Closed')"; String queryStr1 = mainQuery + " and " + cpRestrictionCountQuery + equalToOne + " and " + specimenClassRestrictionQuery + equalToOne + endQry; // category # 2 // Gets all containers which holds just specified container and any // specimen class String queryStr2 = mainQuery + " and " + cpRestrictionCountQuery + equalToOne + " and " + specimenClassRestrictionQuery + greaterThanOne + endQry; // catgory # 3 // Gets all the containers which holds other than specified collection // protocol and only specified specimen class String queryStr3 = mainQuery + " and " + cpRestrictionCountQuery + greaterThanOne + " and " + specimenClassRestrictionQuery + equalToOne + endQry; // catgory # 4 // Gets all the containers which holds specified cp and other than // specified collection protocol and specified specimen class and other // than specified specimen class String queryStr4 = mainQuery + " and " + cpRestrictionCountQuery + greaterThanOne + " and " + specimenClassRestrictionQuery + greaterThanOne + endQry; // catgory # 5 // Gets all the containers which holds any collection protocol and // specified specimen class and other than specified specimen class String queryStr5 = defaultRestrictionQuery + " and " + specimenClassRestrictionQuery + notEqualToFour + endQry; // catgory # 6 // Gets all the containers which holds any collection protocol and any // specimen class String queryStr6 = defaultRestrictionQuery + " and " + specimenClassRestrictionQuery + equalToFour + endQry; queryArray[0] = queryStr1; queryArray[1] = queryStr2; queryArray[2] = queryStr3; queryArray[3] = queryStr4; queryArray[4] = queryStr5; queryArray[5] = queryStr6; for (int i = 0; i < 6; i++) { Logger.out.debug("Storage Container query......................" + queryArray[i]); System.out.println("Storage Container query......................" + queryArray[i]); List queryResultList = executeStorageContQuery(queryArray[i], dao); list.addAll(queryResultList); } if (closeSession) { dao.closeSession(); } return list; } /** * This function executes the query * * @param query * @param dao * @return * @throws DAOException */ public List executeStorageContQuery(String query, JDBCDAO dao) throws DAOException { Logger.out.debug("Storage Container query......................" + query); List list = new ArrayList(); try { list = dao.executeQuery(query, null, false, null); } catch (Exception ex) { throw new DAOException(ex.getMessage()); } return list; } /** * Gets allocated container map for specimen array. * * @param specimen_array_type_id * specimen array type id * @param noOfAliqoutes * No. of aliquotes * @return container map * @throws DAOException -- * throws DAO Exception * @see edu.wustl.common.dao.JDBCDAOImpl */ public TreeMap getAllocatedContaienrMapForSpecimenArray( long specimen_array_type_id, int noOfAliqoutes, SessionDataBean sessionData, String exceedingMaxLimit) throws DAOException { NameValueBeanValueComparator contComp = new NameValueBeanValueComparator(); TreeMap containerMap = new TreeMap(contComp); JDBCDAO dao = (JDBCDAO) DAOFactory.getInstance().getDAO( Constants.JDBC_DAO); dao.openSession(null); String includeAllIdQueryStr = " OR t4.SPECIMEN_ARRAY_TYPE_ID = '" + Constants.ARRAY_TYPE_ALL_ID + "'"; if (!(new Validator().isValidOption(String .valueOf(specimen_array_type_id)))) { includeAllIdQueryStr = ""; } String queryStr = "select t1.IDENTIFIER,t1.name from CATISSUE_CONTAINER t1,CATISSUE_STORAGE_CONTAINER t2 " + "where t1.IDENTIFIER IN (select t4.STORAGE_CONTAINER_ID from CATISSUE_CONT_HOLDS_SPARRTYPE t4 " + "where t4.SPECIMEN_ARRAY_TYPE_ID = '" + specimen_array_type_id + "'" + includeAllIdQueryStr + ") and t1.IDENTIFIER = t2.IDENTIFIER"; Logger.out.debug("SPECIMEN ARRAY QUERY ......................" + queryStr); List list = new ArrayList(); Set<Long> siteIds = new UserBizLogic().getRelatedSiteIds(sessionData.getUserId()); try { list = dao.executeQuery(queryStr, null, false, null); } catch (Exception ex) { throw new DAOException(ex.getMessage()); } dao.closeSession(); Logger.out.info("Size of list:" + list.size()); Map containerMapFromCache = null; try { containerMapFromCache = (TreeMap) StorageContainerUtil .getContainerMapFromCache(); } catch (Exception e1) { // TODO Auto-generated catch block e1.printStackTrace(); } if (containerMapFromCache != null) { int i = 1; Iterator itr = list.iterator(); while (itr.hasNext()) { List list1 = (List) itr.next(); String Id = (String) list1.get(0); Long siteId = getSiteIdForStorageContainerId(Long.valueOf(Id)); if(!sessionData.isAdmin()) { if(!siteIds.contains(siteId)) { continue; } } String Name = (String) list1.get(1); NameValueBean nvb = new NameValueBean(Name, Id); Map positionMap = (TreeMap) containerMapFromCache.get(nvb); if (positionMap != null && !positionMap.isEmpty()) { // deep copy is required due to cache updation by reference Map positionMap1 = deepCopyMap(positionMap); // NameValueBean nvb = new NameValueBean(Name, Id); StorageContainer sc = new StorageContainer(); sc.setId(new Long(Id)); /* * boolean hasAccess = true; try { hasAccess = * validateContainerAccess(sc, sessionData); } catch * (SMException sme) { sme.printStackTrace(); throw * handleSMException(sme); } if (!hasAccess) continue; */ if (i > containersMaxLimit) { exceedingMaxLimit = "true"; break; } else { containerMap.put(nvb, positionMap1); } i++; } } } return containerMap; } // --------------Code for Map Mandar: 04-Sep-06 start // Mandar : 29Aug06 : for StorageContainerMap /** * @param id * Identifier of the StorageContainer related to which the * collectionProtocol titles are to be retrieved. * @return List of collectionProtocol title. * @throws DAOException */ public List getCollectionProtocolList(String id) throws DAOException { // Query to return titles of collection protocol related to given // storagecontainer. 29-Aug-06 Mandar. String sql = " SELECT SP.TITLE TITLE FROM CATISSUE_SPECIMEN_PROTOCOL SP, CATISSUE_ST_CONT_COLL_PROT_REL SC " + " WHERE SP.IDENTIFIER = SC.COLLECTION_PROTOCOL_ID AND SC.STORAGE_CONTAINER_ID = " + id; List resultList = executeSQL(sql); Iterator iterator = resultList.iterator(); List returnList = new ArrayList(); while (iterator.hasNext()) { List list = (List) iterator.next(); String data = (String) list.get(0); returnList.add(data); } if (returnList.isEmpty()) { returnList.add(new String(Constants.ALL)); } return returnList; } /** * @param id * Identifier of the StorageContainer related to which the * collectionProtocol titles are to be retrieved. * @return List of collectionProtocol title. * @throws DAOException */ public List getSpecimenClassList(String id) throws DAOException { // Query to return specimen classes related to given storagecontainer. // 29-Aug-06 Mandar. String sql = " SELECT SP.SPECIMEN_CLASS CLASS FROM CATISSUE_STOR_CONT_SPEC_CLASS SP " + "WHERE SP.STORAGE_CONTAINER_ID = " + id; List resultList = executeSQL(sql); Iterator iterator = resultList.iterator(); List returnList = new ArrayList(); while (iterator.hasNext()) { List list = (List) iterator.next(); for (int cnt = 0; cnt < list.size(); cnt++) { String data = (String) list.get(cnt); returnList.add(data); } } if (returnList.isEmpty()) { // bug id 7438 // returnList.add(new String(Constants.ALL)); returnList.add(new String(Constants.NONE)); } return returnList; } /** * @param sql * @return * @throws DAOException */ private List executeSQL(String sql) throws DAOException { JDBCDAO dao = (JDBCDAO) DAOFactory.getInstance().getDAO( Constants.JDBC_DAO); List resultList = new ArrayList(); try { dao.openSession(null); resultList = dao.executeQuery(sql, null, false, null); dao.closeSession(); } catch (Exception daoExp) { throw new DAOException(daoExp.getMessage(), daoExp); } return resultList; } // prints results returned from DAO executeQuery To comment after debug private void printRecords(List list) { if (list != null) { if (!list.isEmpty()) { // System.out.println("OuterList Size : " + list.size()); for (int i = 0; i < list.size(); i++) { List innerList = (List) list.get(i); // System.out.println("\nInnerList Size : " + // innerList.size() + "\n"); String s = ""; for (int j = 0; j < innerList.size(); j++) { String s1 = (String) innerList.get(j); s = s + " | " + s1; } // System.out.print(s); } } } } // Method to fetch ToolTipData for a given Container private String getToolTipData(String containerID) throws DAOException { String toolTipData = ""; List specimenClassList = getSpecimenClassList(containerID); String classData = "SpecimenClass"; for (int counter = 0; counter < specimenClassList.size(); counter++) { String data = (String) specimenClassList.get(counter); classData = classData + " | " + data; } List collectionProtocolList = getCollectionProtocolList(containerID); String protocolData = "CollectionProtocol"; for (int cnt = 0; cnt < collectionProtocolList.size(); cnt++) { String data = (String) collectionProtocolList.get(cnt); protocolData = protocolData + " | " + data; } toolTipData = protocolData + "\n" + classData; // System.out.println(toolTipData); return toolTipData; } // --------------Code for Map Mandar: 04-Sep-06 end // this function is for making the deep copy of map private Map deepCopyMap(Map positionMap) { Map positionMap1 = new TreeMap(); Set keySet = positionMap.keySet(); Iterator itr = keySet.iterator(); while (itr.hasNext()) { NameValueBean key = (NameValueBean) itr.next(); NameValueBean key1 = new NameValueBean(new Integer(key.getName()), new Integer(key .getValue())); List value = (ArrayList) positionMap.get(key); List value1 = new ArrayList(); Iterator itr1 = value.iterator(); while (itr1.hasNext()) { NameValueBean ypos = (NameValueBean) itr1.next(); NameValueBean ypos1 = new NameValueBean(new Integer(ypos.getName()), new Integer(ypos .getValue())); value1.add(ypos1); } positionMap1.put(key1, value1); } return positionMap1; } private long countPositionsInMap(Map positionMap) { long count = 0; Set keySet = positionMap.keySet(); Iterator itr = keySet.iterator(); while (itr.hasNext()) { NameValueBean key = (NameValueBean) itr.next(); List value = (ArrayList) positionMap.get(key); count = count + value.size(); } return count; } /** * Bug ID: 4038 Patch ID: 4038_3 See also: 1-3 */ /** * * @param dao * Object of DAO * @param containerId * id of container whose site is to be retrieved * @return Site object belongs to container with given id * @throws DAOException * Exception occured while DB handling */ private Site getSite(DAO dao, Long containerId) throws DAOException { String sourceObjectName = StorageContainer.class.getName(); String[] selectColumnName = new String[] { "site" }; String[] whereColumnName = new String[] { "id" }; // "storageContainer."+Constants.SYSTEM_IDENTIFIER String[] whereColumnCondition = new String[] { "=" }; Object[] whereColumnValue = new Long[] { containerId }; String joinCondition = null; List list = dao.retrieve(sourceObjectName, selectColumnName, whereColumnName, whereColumnCondition, whereColumnValue, joinCondition); if (!list.isEmpty()) { return ((Site) list.get(0)); } return null; } /** * Name : kalpana thakur Reviewer Name : Vaishali Bug ID: 4922 * Description:Storage container will not be added to closed site :check for * closed site */ public void checkClosedSite(DAO dao, Long containerId, String errMessage) throws DAOException { Site site = getSite(dao, containerId); // check for closed Site if (site != null) { if (Constants.ACTIVITY_STATUS_CLOSED.equals(site .getActivityStatus())) { throw new DAOException(errMessage + " " + ApplicationProperties.getValue("error.object.closed")); } } } /** * To get the ids of the CollectionProtocol that the given StorageContainer * can hold. * * @param type * The reference to StorageType object. * @return The array of ids of CollectionProtocol that the given * StorageContainer can hold. * @throws DAOException */ public long[] getDefaultHoldCollectionProtocolList( StorageContainer container) throws DAOException { Collection spcimenArrayTypeCollection = (Collection) retrieveAttribute( StorageContainer.class.getName(), container.getId(), "elements(collectionProtocolCollection)"); if (spcimenArrayTypeCollection.isEmpty()) { return new long[] { -1 }; } else { return Utility.getobjectIds(spcimenArrayTypeCollection); } } /** * To check wether the Continer to display can holds the given type of * container. * * @param typeId * ContinerType id of container * @param storageContainer * The StorageContainer reference to be displayed on the page. * @param StorageContainerBizLogic * The reference to bizLogic class object. * @return true if the given continer can hold the typet. * @throws DAOException */ public boolean canHoldContainerType(int typeId, StorageContainer storageContainer) throws DAOException { /** * Name: Smita Reviewer: Sachin Bug iD: 4598 Patch ID: 4598_1 * Description: Check for valid container type */ if (!isValidContaierType(typeId)) { return false; } boolean canHold = false; Collection containerTypeCollection = (Collection) retrieveAttribute( StorageContainer.class.getName(), storageContainer.getId(), "elements(holdsStorageTypeCollection)");// storageContainer.getHoldsStorageTypeCollection(); if (!containerTypeCollection.isEmpty()) { Iterator itr = containerTypeCollection.iterator(); while (itr.hasNext()) { StorageType type = (StorageType) itr.next(); long storagetypeId = type.getId().longValue(); if (storagetypeId == Constants.ALL_STORAGE_TYPE_ID || storagetypeId == typeId) { return true; } } } return canHold; } /** * Patch ID: 4598_2 Is container type one from the container types present * in the system * * @param typeID * Container type ID * @return true/ false * @throws DAOException */ public boolean isValidContaierType(int typeID) throws DAOException { Long longId = (Long) retrieveAttribute(StorageType.class.getName(), new Long(typeID), "id"); return !(longId == null); } /** * To check wether the Continer to display can holds the given * CollectionProtocol. * * @param collectionProtocolId * The collectionProtocol Id. * @param storageContainer * The StorageContainer reference to be displayed on the page. * @return true if the given continer can hold the CollectionProtocol. * @throws DAOException */ public boolean canHoldCollectionProtocol(long collectionProtocolId, StorageContainer storageContainer) throws DAOException { boolean canHold = true; Collection collectionProtocols = (Collection) retrieveAttribute( StorageContainer.class.getName(), storageContainer.getId(), "elements(collectionProtocolCollection)");// storageContainer.getCollectionProtocolCollection(); if (!collectionProtocols.isEmpty()) { Iterator itr = collectionProtocols.iterator(); canHold = false; while (itr.hasNext()) { CollectionProtocol cp = (CollectionProtocol) itr.next(); if (cp.getId().longValue() == collectionProtocolId) { return true; } } } return canHold; } /** * To check wether the Continer to display can holds the given * specimenClass. * * @param specimenClass * The specimenClass Name. * @param storageContainer * The StorageContainer reference to be displayed on the page. * @param bizLogic * The reference to bizLogic class object. * @return true if the given continer can hold the specimenClass. * @throws DAOException */ public boolean canHoldSpecimenClass(String specimenClass, StorageContainer storageContainer) throws DAOException { Collection specimenClasses = (Collection) retrieveAttribute( StorageContainer.class.getName(), storageContainer.getId(), "elements(holdsSpecimenClassCollection)");// storageContainer.getHoldsSpecimenClassCollection(); Iterator itr = specimenClasses.iterator(); while (itr.hasNext()) { String className = (String) itr.next(); if (className.equals(specimenClass)) return true; } return false; } /** * To check wether the Continer to display can holds the given * specimenArrayTypeId. * * @param specimenArrayTypeId * The Specimen Array Type Id. * @param storageContainer * The StorageContainer reference to be displayed on the page. * @param bizLogic * The reference to bizLogic class object. * @return true if the given continer can hold the specimenArrayType. */ public boolean canHoldSpecimenArrayType(int specimenArrayTypeId, StorageContainer storageContainer) throws DAOException { boolean canHold = true; Collection specimenArrayTypes = (Collection) retrieveAttribute( StorageContainer.class.getName(), storageContainer.getId(), "elements(holdsSpecimenArrayTypeCollection)");// storageContainer.getHoldsSpArrayTypeCollection(); // if (!specimenArrayTypes.isEmpty()) { Iterator itr = specimenArrayTypes.iterator(); canHold = false; while (itr.hasNext()) { SpecimenArrayType specimenarrayType = (SpecimenArrayType) itr .next(); long arraytypeId = specimenarrayType.getId().longValue(); if (arraytypeId == Constants.ALL_SPECIMEN_ARRAY_TYPE_ID || arraytypeId == specimenArrayTypeId) { return true; } } } return canHold; } public Collection<SpecimenPosition> getSpecimenPositionCollForContainer( DAO dao, Long containerId) throws DAOException { if (containerId != null) { List specimenPosColl = dao.retrieve(SpecimenPosition.class .getName(), "storageContainer.id", containerId); return specimenPosColl; } return null; } /** * Called from DefaultBizLogic to get ObjectId for authorization check * (non-Javadoc) * @see edu.wustl.common.bizlogic.DefaultBizLogic#getObjectId(edu.wustl.common.dao.AbstractDAO, java.lang.Object) */ public String getObjectId(AbstractDAO dao, Object domainObject) { if (domainObject instanceof StorageContainer) { StorageContainer storageContainer = (StorageContainer) domainObject; Site site = null; if (storageContainer.getLocatedAtPosition() != null && storageContainer.getLocatedAtPosition().getParentContainer() != null) { try { Object object = dao.retrieve(StorageContainer.class.getName(), storageContainer.getLocatedAtPosition().getParentContainer() .getId()); if (object != null) { StorageContainer parentContainer = (StorageContainer) object; site = parentContainer.getSite(); } } catch (DAOException e) { return null; } } else { site = storageContainer.getSite(); } if (site != null) { StringBuffer sb = new StringBuffer(); sb.append(Site.class.getName()).append("_").append(site.getId().toString()); return sb.toString(); } } return null; } /** * To get PrivilegeName for authorization check from 'PermissionMapDetails.xml' * (non-Javadoc) * @see edu.wustl.common.bizlogic.DefaultBizLogic#getPrivilegeName(java.lang.Object) */ protected String getPrivilegeKey(Object domainObject) { return Constants.ADD_EDIT_STORAGE_CONTAINER; } }
WEB-INF/src/edu/wustl/catissuecore/bizlogic/StorageContainerBizLogic.java
/** * <p>Title: StorageContainerHDAO Class> * <p>Description: StorageContainerHDAO is used to add Storage Container information into the database using Hibernate.</p> * Copyright: Copyright (c) year * Company: Washington University, School of Medicine, St. Louis. * @author Aniruddha Phadnis * @version 1.00 * Created on Jul 23, 2005 */ package edu.wustl.catissuecore.bizlogic; import java.io.Serializable; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.Vector; import org.hibernate.Session; import net.sf.ehcache.CacheException; import edu.wustl.catissuecore.domain.Capacity; import edu.wustl.catissuecore.domain.CollectionProtocol; import edu.wustl.catissuecore.domain.Container; import edu.wustl.catissuecore.domain.ContainerPosition; import edu.wustl.catissuecore.domain.Site; import edu.wustl.catissuecore.domain.Specimen; import edu.wustl.catissuecore.domain.SpecimenArray; import edu.wustl.catissuecore.domain.SpecimenArrayType; import edu.wustl.catissuecore.domain.SpecimenPosition; import edu.wustl.catissuecore.domain.StorageContainer; import edu.wustl.catissuecore.domain.StorageType; import edu.wustl.catissuecore.namegenerator.BarcodeGenerator; import edu.wustl.catissuecore.namegenerator.BarcodeGeneratorFactory; import edu.wustl.catissuecore.namegenerator.LabelGenerator; import edu.wustl.catissuecore.namegenerator.LabelGeneratorFactory; import edu.wustl.catissuecore.namegenerator.NameGeneratorException; import edu.wustl.catissuecore.util.ApiSearchUtil; import edu.wustl.catissuecore.util.CatissueCoreCacheManager; import edu.wustl.catissuecore.util.StorageContainerUtil; import edu.wustl.catissuecore.util.global.Constants; import edu.wustl.catissuecore.util.global.Utility; import edu.wustl.common.beans.NameValueBean; import edu.wustl.common.beans.SessionDataBean; import edu.wustl.common.bizlogic.DefaultBizLogic; import edu.wustl.common.dao.AbstractDAO; import edu.wustl.common.dao.DAO; import edu.wustl.common.dao.DAOFactory; import edu.wustl.common.dao.HibernateDAO; import edu.wustl.common.dao.JDBCDAO; import edu.wustl.common.domain.AbstractDomainObject; import edu.wustl.common.exception.BizLogicException; import edu.wustl.common.security.PrivilegeCache; import edu.wustl.common.security.PrivilegeManager; import edu.wustl.common.security.SecurityManager; import edu.wustl.common.security.exceptions.SMException; import edu.wustl.common.security.exceptions.UserNotAuthorizedException; import edu.wustl.common.tree.StorageContainerTreeNode; import edu.wustl.common.tree.TreeDataInterface; import edu.wustl.common.tree.TreeNode; import edu.wustl.common.tree.TreeNodeImpl; import edu.wustl.common.util.NameValueBeanRelevanceComparator; import edu.wustl.common.util.NameValueBeanValueComparator; import edu.wustl.common.util.XMLPropertyHandler; import edu.wustl.common.util.dbManager.DAOException; import edu.wustl.common.util.dbManager.DBUtil; import edu.wustl.common.util.dbManager.HibernateMetaData; import edu.wustl.common.util.global.ApplicationProperties; import edu.wustl.common.util.global.Validator; import edu.wustl.common.util.logger.Logger; /** * StorageContainerHDAO is used to add Storage Container information into the * database using Hibernate. * * @author vaishali_khandelwal */ public class StorageContainerBizLogic extends DefaultBizLogic implements TreeDataInterface { // Getting containersMaxLimit from the xml file in static variable private static final int containersMaxLimit = Integer .parseInt(XMLPropertyHandler .getValue(Constants.CONTAINERS_MAX_LIMIT)); /** * Saves the storageContainer object in the database. * * @param obj * The storageType object to be saved. * @param session * The session in which the object is saved. * @throws DAOException */ protected void insert(Object obj, DAO dao, SessionDataBean sessionDataBean) throws DAOException, UserNotAuthorizedException { StorageContainer container = (StorageContainer) obj; container.setActivityStatus(Constants.ACTIVITY_STATUS_ACTIVE); // Setting the Parent Container if applicable int posOneCapacity = 1, posTwoCapacity = 1; int positionDimensionOne = Constants.STORAGE_CONTAINER_FIRST_ROW, positionDimensionTwo = Constants.STORAGE_CONTAINER_FIRST_COLUMN; boolean fullStatus[][] = null; int noOfContainers = container.getNoOfContainers().intValue(); if (container.getLocatedAtPosition() != null && container.getLocatedAtPosition().getParentContainer() != null) { Object object = dao.retrieve(StorageContainer.class.getName(), container.getLocatedAtPosition().getParentContainer() .getId()); if (object != null) { StorageContainer parentContainer = (StorageContainer) object; // check for closed ParentContainer checkStatus(dao, parentContainer, "Parent Container"); int totalCapacity = parentContainer.getCapacity() .getOneDimensionCapacity().intValue() * parentContainer.getCapacity() .getTwoDimensionCapacity().intValue(); Collection children = StorageContainerUtil.getChildren(dao, parentContainer.getId()); if ((noOfContainers + children.size()) > totalCapacity) { throw new DAOException(ApplicationProperties .getValue("errors.storageContainer.overflow")); } else { // Check if position specified is within the parent // container's // capacity if (false == validatePosition(parentContainer, container)) { throw new DAOException( ApplicationProperties .getValue("errors.storageContainer.dimensionOverflow")); } try { // check for all validations on the storage container. checkContainer(dao, container.getLocatedAtPosition() .getParentContainer().getId().toString(), container.getLocatedAtPosition() .getPositionDimensionOne().toString(), container.getLocatedAtPosition() .getPositionDimensionTwo().toString(), sessionDataBean, false); } catch (SMException sme) { sme.printStackTrace(); throw handleSMException(sme); } // check for availability of position /* * boolean canUse = isContainerAvailableForPositions(dao, * container); * * if (!canUse) { throw new * DAOException(ApplicationProperties.getValue("errors.storageContainer.inUse")); } */ // Check weather parent container is valid container to use boolean parentContainerValidToUSe = isParentContainerValidToUSe( container, parentContainer); if (!parentContainerValidToUSe) { throw new DAOException( "Parent Container is not valid for this container type"); } ContainerPosition cntPos = container.getLocatedAtPosition(); cntPos.setParentContainer(parentContainer); container.setSite(parentContainer.getSite()); posOneCapacity = parentContainer.getCapacity() .getOneDimensionCapacity().intValue(); posTwoCapacity = parentContainer.getCapacity() .getTwoDimensionCapacity().intValue(); fullStatus = getStorageContainerFullStatus(dao, parentContainer, children); positionDimensionOne = cntPos.getPositionDimensionOne() .intValue(); positionDimensionTwo = cntPos.getPositionDimensionTwo() .intValue(); container.setLocatedAtPosition(cntPos); } } else { throw new DAOException(ApplicationProperties .getValue("errors.storageContainerExist")); } } else { loadSite(dao, container); } loadStorageType(dao, container); for (int i = 0; i < noOfContainers; i++) { StorageContainer cont = new StorageContainer(container); if (cont.getLocatedAtPosition() != null && cont.getLocatedAtPosition().getParentContainer() != null) { ContainerPosition cntPos = cont.getLocatedAtPosition(); cntPos .setPositionDimensionOne(new Integer( positionDimensionOne)); cntPos .setPositionDimensionTwo(new Integer( positionDimensionTwo)); cntPos.setOccupiedContainer(cont); cont.setLocatedAtPosition(cntPos); } Logger.out.debug("Collection protocol size:" + container.getCollectionProtocolCollection().size()); // by falguni // Call Storage container label generator if its specified to use // automatic label generator if (edu.wustl.catissuecore.util.global.Variables.isStorageContainerLabelGeneratorAvl) { LabelGenerator storagecontLblGenerator; try { storagecontLblGenerator = LabelGeneratorFactory .getInstance(Constants.STORAGECONTAINER_LABEL_GENERATOR_PROPERTY_NAME); storagecontLblGenerator.setLabel(cont); container.setName(cont.getName()); } catch (NameGeneratorException e) { throw new DAOException(e.getMessage()); } } if (edu.wustl.catissuecore.util.global.Variables.isStorageContainerBarcodeGeneratorAvl) { BarcodeGenerator storagecontBarcodeGenerator; try { storagecontBarcodeGenerator = BarcodeGeneratorFactory .getInstance(Constants.STORAGECONTAINER_BARCODE_GENERATOR_PROPERTY_NAME); // storagecontBarcodeGenerator.setBarcode(cont); } catch (NameGeneratorException e) { throw new DAOException(e.getMessage()); } } dao.insert(cont.getCapacity(), sessionDataBean, true, true); dao.insert(cont, sessionDataBean, true, true); // Used for showing the success message after insert and using it // for edit. container.setId(cont.getId()); container.setCapacity(cont.getCapacity()); if (container.getLocatedAtPosition() != null && container.getLocatedAtPosition().getParentContainer() != null) { Logger.out.debug("In if: "); do { if (positionDimensionTwo == posTwoCapacity) { if (positionDimensionOne == posOneCapacity) positionDimensionOne = Constants.STORAGE_CONTAINER_FIRST_ROW; else positionDimensionOne = (positionDimensionOne + 1) % (posOneCapacity + 1); positionDimensionTwo = Constants.STORAGE_CONTAINER_FIRST_COLUMN; } else { positionDimensionTwo = positionDimensionTwo + 1; } Logger.out.debug("positionDimensionTwo: " + positionDimensionTwo); Logger.out.debug("positionDimensionOne: " + positionDimensionOne); } while (fullStatus[positionDimensionOne][positionDimensionTwo] != false); } // Inserting authorization data Set protectionObjects = new HashSet(); protectionObjects.add(cont); try { // SecurityManager.getInstance(this.getClass()).insertAuthorizationData(null, // protectionObjects, getDynamicGroups(cont)); PrivilegeManager privilegeManager = PrivilegeManager .getInstance(); privilegeManager.insertAuthorizationData(null, protectionObjects, getDynamicGroups(cont), cont .getObjectId()); } catch (SMException e) { throw handleSMException(e); } } } /** * Name : Pathik Sheth Reviewer Name :Vishvesh Mulay * Description:Retrive only repository sites which are not closed. */ public List getRepositorySiteList(String sourceObjectName, String[] displayNameFields, String valueField, String activityStatusArr[], boolean isToExcludeDisabled) throws DAOException { String[] whereColumnName = null; String[] whereColumnCondition = null; String joinCondition = null; String separatorBetweenFields = ", "; whereColumnName = new String[] { "activityStatus","type"}; whereColumnCondition = new String[] { "not in","=" }; // whereColumnCondition = new String[]{"in"}; Object[] whereColumnValue = { activityStatusArr,Constants.REPOSITORY}; return getList(sourceObjectName, displayNameFields, valueField, whereColumnName, whereColumnCondition, whereColumnValue, joinCondition, separatorBetweenFields, isToExcludeDisabled); } public List getSiteList(String[] displayNameFields, String valueField, String activityStatusArr[], Long userId) throws DAOException { List siteResultList = getRepositorySiteList(Site.class.getName(), displayNameFields, valueField, activityStatusArr, false); List userList = null; Set<Long> idSet = new UserBizLogic().getRelatedSiteIds(userId); userList = new ArrayList(); Iterator siteListIterator = siteResultList.iterator(); while (siteListIterator.hasNext()) { NameValueBean nameValBean = (NameValueBean) siteListIterator .next(); Long siteId = new Long(nameValBean.getValue()); if (hasPrivilegeonSite(idSet, siteId)) { userList.add(nameValBean); } } return userList; } private boolean hasPrivilegeonSite(Set<Long> siteidSet, Long siteId) { boolean hasPrivilege = true; if (siteidSet != null) { if (!siteidSet.contains(siteId)) { hasPrivilege = false; } } return hasPrivilege; } /** * this function checks weather parent of the container is valid or not * according to restriction provided for the containers * * @param container - * Container * @param parent - * Parent Container * @return boolean true indicating valid to use , false indicating not valid * to use. * @throws DAOException */ protected boolean isParentContainerValidToUSe(StorageContainer container, StorageContainer parent) throws DAOException { StorageType storageTypeAny = new StorageType(); storageTypeAny.setId(new Long("1")); storageTypeAny.setName("All"); if (parent.getHoldsStorageTypeCollection().contains(storageTypeAny)) { return true; } if (!parent.getHoldsStorageTypeCollection().contains( container.getStorageType())) { return false; } return true; } // This method sets the collection Storage Types. protected String[] getDynamicGroups(AbstractDomainObject obj) throws SMException { String[] dynamicGroups = null; StorageContainer storageContainer = (StorageContainer) obj; if (storageContainer.getLocatedAtPosition() != null && storageContainer.getLocatedAtPosition().getParentContainer() != null) { dynamicGroups = SecurityManager.getInstance(this.getClass()) .getProtectionGroupByName( storageContainer.getLocatedAtPosition() .getParentContainer()); } else { dynamicGroups = SecurityManager.getInstance(this.getClass()) .getProtectionGroupByName(storageContainer.getSite()); } return dynamicGroups; } public void postInsert(Object obj, DAO dao, SessionDataBean sessionDataBean) throws DAOException, UserNotAuthorizedException { StorageContainer container = (StorageContainer) obj; try { Map containerMap = StorageContainerUtil.getContainerMapFromCache(); StorageContainerUtil.addStorageContainerInContainerMap(container, containerMap); } catch (Exception e) { } } /** * Updates the persistent object in the database. * * @param obj * The object to be updated. * @param session * The session in which the object is saved. * @throws DAOException */ protected void update(DAO dao, Object obj, Object oldObj, SessionDataBean sessionDataBean) throws DAOException, UserNotAuthorizedException { StorageContainer container = (StorageContainer) obj; StorageContainer oldContainer = (StorageContainer) oldObj; // lazy change StorageContainer persistentOldContainerForChange = null; Object object = dao.retrieve(StorageContainer.class.getName(), oldContainer.getId()); persistentOldContainerForChange = (StorageContainer) object; // retrive parent container if (container.getLocatedAtPosition() != null) { StorageContainer parentStorageContainer = (StorageContainer) dao .retrieve(StorageContainer.class.getName(), container .getLocatedAtPosition().getParentContainer() .getId()); container.getLocatedAtPosition().setParentContainer( parentStorageContainer); } Logger.out.debug("container.isParentChanged() : " + container.isParentChanged()); if (container.isParentChanged()) { if (container.getLocatedAtPosition() != null && container.getLocatedAtPosition().getParentContainer() != null) { // Check whether continer is moved to one of its sub container. if (isUnderSubContainer(container, container .getLocatedAtPosition().getParentContainer().getId(), dao)) { throw new DAOException(ApplicationProperties .getValue("errors.container.under.subcontainer")); } Logger.out.debug("Loading ParentContainer: " + container.getLocatedAtPosition().getParentContainer() .getId()); /** * Name : Vijay_Pande Reviewer : Sntosh_Chandak Bug ID: 4038 * Patch ID: 4038_1 See also: 1-3 Description: In the edit mode * while updating parent container there was a hibernet session * error Since we were retrieving parent container it was * retriving all child containers as well. Hence only required * filed of parent containcer is retrieved. */ // StorageContainer pc = (StorageContainer) // dao.retrieve(StorageContainer.class.getName(), // container.getParent().getId()); /* * Check if position specified is within the parent container's * capacity */ if (false == validatePosition(dao, container)) { throw new DAOException( ApplicationProperties .getValue("errors.storageContainer.dimensionOverflow")); } // Mandar : code added for validation bug id 666. 24-11-2005 // start boolean canUse = isContainerAvailableForPositions(dao, container); Logger.out.debug("canUse : " + canUse); if (!canUse) { throw new DAOException(ApplicationProperties .getValue("errors.storageContainer.inUse")); } // Mandar : code added for validation bug id 666. 24-11-2005 end // check for closed ParentContainer checkStatus(dao, container.getLocatedAtPosition() .getParentContainer(), "Parent Container"); // container.setParent(pc); Site site = getSite(dao, container.getLocatedAtPosition() .getParentContainer().getId()); // Site // site=((StorageContainer)container.getParent()).getSite(); // check for closed Site checkStatus(dao, site, "Parent Container Site"); container.setSite(site); /** -- patch ends here -- */ } } // Mandar : code added for validation 25-11-05----------- else // if parent container is not changed only the position is changed. { if (container.isPositionChanged()) { // ----------------- String sourceObjectName = StorageContainer.class.getName(); String[] selectColumnName = { "id", "capacity.oneDimensionCapacity", "capacity.twoDimensionCapacity" }; String[] whereColumnName = { "id" }; // "storageContainer."+Constants.SYSTEM_IDENTIFIER String[] whereColumnCondition = { "=" }; Object[] whereColumnValue = { container.getLocatedAtPosition() .getParentContainer().getId() }; String joinCondition = null; List list = dao.retrieve(sourceObjectName, selectColumnName, whereColumnName, whereColumnCondition, whereColumnValue, joinCondition); if (!list.isEmpty()) { Object[] obj1 = (Object[]) list.get(0); Logger.out .debug("**************PC obj::::::: --------------- " + obj1); Logger.out.debug((Long) obj1[0]); Logger.out.debug((Integer) obj1[1]); Logger.out.debug((Integer) obj1[2]); Integer pcCapacityOne = (Integer) obj1[1]; Integer pcCapacityTwo = (Integer) obj1[2]; if (!validatePosition(pcCapacityOne.intValue(), pcCapacityTwo.intValue(), container)) { throw new DAOException( ApplicationProperties .getValue("errors.storageContainer.dimensionOverflow")); } } else { } // ----------------- // StorageContainer pc = (StorageContainer) // dao.retrieve(StorageContainer.class.getName(), // container.getParentContainer().getId()); // if(!validatePosition(container.getParentContainer().getStorageContainerCapacity().getOneDimensionCapacity().intValue(), // container.getParentContainer().getStorageContainerCapacity().getTwoDimensionCapacity().intValue(), // container)) // /*Check if position specified is within the parent // container's capacity*/ // // if(!validatePosition(pc,container)) // { // throw new // DAOException(ApplicationProperties.getValue("errors.storageContainer.dimensionOverflow")); // } // /** * Only if parentContainerID, positionOne or positionTwo is * changed check for availability of position */ if (oldContainer.getLocatedAtPosition() != null && oldContainer.getLocatedAtPosition() .getPositionDimensionOne() != null && oldContainer.getLocatedAtPosition() .getPositionDimensionOne().intValue() != container .getLocatedAtPosition() .getPositionDimensionOne().intValue() || oldContainer.getLocatedAtPosition() .getPositionDimensionTwo().intValue() != container .getLocatedAtPosition() .getPositionDimensionTwo().intValue()) { boolean canUse = isContainerAvailableForPositions(dao, container); Logger.out.debug("canUse : " + canUse); if (!canUse) { throw new DAOException(ApplicationProperties .getValue("errors.storageContainer.inUse")); } } } } // Mandar : --------- end 25-11-05 ----------------- boolean flag = true; if (container.getLocatedAtPosition() != null && container.getLocatedAtPosition().getParentContainer() != null && oldContainer.getLocatedAtPosition() != null && container.getLocatedAtPosition().getParentContainer() .getId().longValue() == oldContainer .getLocatedAtPosition().getParentContainer().getId() .longValue() && container.getLocatedAtPosition().getPositionDimensionOne() .longValue() == oldContainer.getLocatedAtPosition() .getPositionDimensionOne().longValue() && container.getLocatedAtPosition().getPositionDimensionTwo() .longValue() == oldContainer.getLocatedAtPosition() .getPositionDimensionTwo().longValue()) { flag = false; } if (flag) { try { // check for all validations on the storage container. if (container.getLocatedAtPosition() != null && container.getLocatedAtPosition() .getParentContainer() != null) { checkContainer(dao, container.getLocatedAtPosition() .getParentContainer().getId().toString(), container .getLocatedAtPosition().getPositionDimensionOne() .toString(), container.getLocatedAtPosition() .getPositionDimensionTwo().toString(), sessionDataBean, false); } } catch (SMException sme) { sme.printStackTrace(); throw handleSMException(sme); } } // Check whether size has been reduced // Sri: fix for bug #355 (Storage capacity: Reducing capacity should be // handled) Integer oldContainerDimOne = oldContainer.getCapacity() .getOneDimensionCapacity(); Integer oldContainerDimTwo = oldContainer.getCapacity() .getTwoDimensionCapacity(); Integer newContainerDimOne = container.getCapacity() .getOneDimensionCapacity(); Integer newContainerDimTwo = container.getCapacity() .getTwoDimensionCapacity(); // If any size is reduced, object was present at any of the deleted // positions throw error if (oldContainerDimOne.intValue() > newContainerDimOne.intValue() || oldContainerDimTwo.intValue() > newContainerDimTwo .intValue()) { boolean canReduceDimension = StorageContainerUtil .checkCanReduceDimension(oldContainer, container); if (!canReduceDimension) { throw new DAOException(ApplicationProperties .getValue("errors.storageContainer.cannotReduce")); } } /** * Name : kalpana thakur Reviewer Name : Vaishali Bug ID: 4922 * Description:Storage container will not be added to closed site :check * for closed site */ if (container.getId() != null) { checkClosedSite(dao, container.getId(), "Container site"); } setSiteForSubContainers(container, container.getSite(), dao); boolean restrictionsCanChange = isContainerEmpty(dao, container); Logger.out.info("--------------container Available :" + restrictionsCanChange); if (!restrictionsCanChange) { boolean restrictionsChanged = checkForRestrictionsChanged( container, oldContainer); Logger.out.info("---------------restriction changed -:" + restrictionsChanged); if (restrictionsChanged) { throw new DAOException( ApplicationProperties .getValue("errros.storageContainer.restrictionCannotChanged")); } } Collection<SpecimenPosition> specimenPosColl = getSpecimenPositionCollForContainer( dao, container.getId()); container.setSpecimenPositionCollection(specimenPosColl); setValuesinPersistentObject(persistentOldContainerForChange, container, dao); dao.update(persistentOldContainerForChange, sessionDataBean, true, true, false); dao.update(persistentOldContainerForChange.getCapacity(), sessionDataBean, true, true, false); // Audit of update of storage container. dao.audit(obj, oldObj, sessionDataBean, true); dao.audit(container.getCapacity(), oldContainer.getCapacity(), sessionDataBean, true); Logger.out.debug("container.getActivityStatus() " + container.getActivityStatus()); // lazy change /* * if (container.getParent() != null) { * * StorageContainer pc = (StorageContainer) * dao.retrieve(StorageContainer.class.getName(), * container.getParent().getId()); container.setParent(pc); } */ if (container.getActivityStatus().equals( Constants.ACTIVITY_STATUS_DISABLED)) { Long containerIDArr[] = { container.getId() }; if (isContainerAvailableForDisabled(dao, containerIDArr)) { List disabledConts = new ArrayList(); /** * Preapare list of parent/child containers to disable * */ List<StorageContainer> disabledContainerList = new ArrayList<StorageContainer>(); disabledContainerList.add(persistentOldContainerForChange); persistentOldContainerForChange.setLocatedAtPosition(null); addEntriesInDisabledMap(persistentOldContainerForChange, disabledConts); // disabledConts.add(new StorageContainer(container)); setDisableToSubContainer(persistentOldContainerForChange, disabledConts, dao, disabledContainerList); persistentOldContainerForChange.getOccupiedPositions().clear(); Logger.out.debug("container.getActivityStatus() " + container.getActivityStatus()); disableSubStorageContainer(dao, sessionDataBean, disabledContainerList); persistentOldContainerForChange.setLocatedAtPosition(null); dao.update(persistentOldContainerForChange, sessionDataBean, true, true, false); try { CatissueCoreCacheManager catissueCoreCacheManager = CatissueCoreCacheManager .getInstance(); catissueCoreCacheManager.addObjectToCache( Constants.MAP_OF_DISABLED_CONTAINERS, (Serializable) disabledConts); } catch (CacheException e) { } } else { throw new DAOException(ApplicationProperties .getValue("errors.container.contains.specimen")); } } } public void setValuesinPersistentObject(StorageContainer persistentobject, StorageContainer newObject, DAO dao) throws DAOException { persistentobject.setActivityStatus(newObject.getActivityStatus()); persistentobject.setBarcode(newObject.getBarcode()); Capacity persistCapacity = persistentobject.getCapacity(); Capacity newCapacity = newObject.getCapacity(); persistCapacity.setOneDimensionCapacity(newCapacity .getOneDimensionCapacity()); persistCapacity.setTwoDimensionCapacity(newCapacity .getTwoDimensionCapacity()); Collection children = StorageContainerUtil.getChildren(dao, newObject .getId()); StorageContainerUtil.setChildren(children, dao, persistentobject .getId()); // persistentobject.setChildren(newObject.getChildren()); persistentobject.setCollectionProtocolCollection(newObject .getCollectionProtocolCollection()); persistentobject.setComment(newObject.getComment()); persistentobject.setFull(newObject.isFull()); persistentobject.setHoldsSpecimenArrayTypeCollection(newObject .getHoldsSpecimenArrayTypeCollection()); persistentobject.setHoldsSpecimenClassCollection(newObject .getHoldsSpecimenClassCollection()); persistentobject.setHoldsStorageTypeCollection(newObject .getHoldsStorageTypeCollection()); persistentobject.setName(newObject.getName()); persistentobject.setNoOfContainers(newObject.getNoOfContainers()); persistentobject.setParentChanged(newObject.isParentChanged()); persistentobject.setPositionChanged(newObject.isPositionChanged()); if (newObject.getLocatedAtPosition() != null) { ContainerPosition cntPos = persistentobject.getLocatedAtPosition(); if (cntPos == null) { cntPos = new ContainerPosition(); } cntPos.setPositionDimensionOne(newObject.getLocatedAtPosition() .getPositionDimensionOne()); cntPos.setPositionDimensionTwo(newObject.getLocatedAtPosition() .getPositionDimensionTwo()); cntPos.setParentContainer(newObject.getLocatedAtPosition() .getParentContainer()); cntPos.setOccupiedContainer(persistentobject); // persistentobject.setLocatedAtPosition(cntPos); } persistentobject.setSimilarContainerMap(newObject .getSimilarContainerMap()); persistentobject.setSite(newObject.getSite()); if (newObject.getSpecimenPositionCollection() != null) { Collection<SpecimenPosition> specPosColl = persistentobject .getSpecimenPositionCollection(); // if(specPosColl == null) // { // specPosColl = new HashSet<SpecimenPosition>(); // } specPosColl.addAll(newObject.getSpecimenPositionCollection()); // specPos.setSpecimen(newObject.getSpecimenPosition().getSpecimen()); // specPos.setStorageContainer(newObject); // persistentobject.setSpecimenPosition(specPos); } persistentobject.setStartNo(newObject.getStartNo()); persistentobject.setStorageType(newObject.getStorageType()); persistentobject.setTempratureInCentigrade(newObject .getTempratureInCentigrade()); } private void addEntriesInDisabledMap(StorageContainer container, List disabledConts) { String contNameKey = "StorageContName"; String contIdKey = "StorageContIdKey"; String parentContNameKey = "ParentContName"; String parentContIdKey = "ParentContId"; String pos1Key = "pos1"; String pos2Key = "pos2"; Map containerDetails = new TreeMap(); containerDetails.put(contNameKey, container.getName()); containerDetails.put(contIdKey, container.getId()); if (container != null && container.getLocatedAtPosition() != null && container.getLocatedAtPosition().getParentContainer() != null) { containerDetails.put(parentContNameKey, container .getLocatedAtPosition().getParentContainer().getName()); containerDetails.put(parentContIdKey, container .getLocatedAtPosition().getParentContainer().getId()); containerDetails.put(pos1Key, container.getLocatedAtPosition() .getPositionDimensionOne()); containerDetails.put(pos2Key, container.getLocatedAtPosition() .getPositionDimensionTwo()); } disabledConts.add(containerDetails); } public void postUpdate(DAO dao, Object currentObj, Object oldObj, SessionDataBean sessionDataBean) throws BizLogicException, UserNotAuthorizedException { try { Map containerMap = StorageContainerUtil.getContainerMapFromCache(); StorageContainer currentContainer = (StorageContainer) currentObj; StorageContainer oldContainer = (StorageContainer) oldObj; // if name gets change then update the cache with new key if (!currentContainer.getName().equals(oldContainer.getName())) { StorageContainerUtil.updateNameInCache(containerMap, currentContainer, oldContainer); } // If capacity of container gets increased then insert all the new // positions in map .......... int xOld = oldContainer.getCapacity().getOneDimensionCapacity() .intValue(); int xNew = currentContainer.getCapacity().getOneDimensionCapacity() .intValue(); int yOld = oldContainer.getCapacity().getTwoDimensionCapacity() .intValue(); int yNew = currentContainer.getCapacity().getTwoDimensionCapacity() .intValue(); if (xNew != xOld || yNew != yOld) { StorageContainerUtil.updateStoragePositions(containerMap, currentContainer, oldContainer); } // finish if (oldContainer != null && oldContainer.getLocatedAtPosition() != null && oldContainer.getLocatedAtPosition().getParentContainer() != null) { StorageContainer oldParentCont = (StorageContainer) HibernateMetaData .getProxyObjectImpl(oldContainer.getLocatedAtPosition() .getParentContainer()); StorageContainerUtil.insertSinglePositionInContainerMap( oldParentCont, containerMap, oldContainer .getLocatedAtPosition() .getPositionDimensionOne().intValue(), oldContainer.getLocatedAtPosition() .getPositionDimensionTwo().intValue()); } if (currentContainer != null && currentContainer.getLocatedAtPosition() != null && currentContainer.getLocatedAtPosition() .getParentContainer() != null) { StorageContainer currentParentCont = (StorageContainer) currentContainer .getLocatedAtPosition().getParentContainer(); StorageContainerUtil.deleteSinglePositionInContainerMap( currentParentCont, containerMap, currentContainer .getLocatedAtPosition() .getPositionDimensionOne().intValue(), currentContainer.getLocatedAtPosition() .getPositionDimensionTwo().intValue()); } if (currentContainer.getActivityStatus().equals( Constants.ACTIVITY_STATUS_DISABLED)) { List disabledConts = StorageContainerUtil .getListOfDisabledContainersFromCache(); List disabledContsAfterReverse = new ArrayList(); for (int i = disabledConts.size() - 1; i >= 0; i--) { disabledContsAfterReverse.add(disabledConts.get(i)); } Iterator itr = disabledContsAfterReverse.iterator(); while (itr.hasNext()) { Map disabledContDetails = (TreeMap) itr.next(); String contNameKey = "StorageContName"; String contIdKey = "StorageContIdKey"; String parentContNameKey = "ParentContName"; String parentContIdKey = "ParentContId"; String pos1Key = "pos1"; String pos2Key = "pos2"; StorageContainer cont = new StorageContainer(); cont.setId((Long) disabledContDetails.get(contIdKey)); cont.setName((String) disabledContDetails.get(contNameKey)); if (disabledContDetails.get(parentContIdKey) != null) { StorageContainer parent = new StorageContainer(); parent.setName((String) disabledContDetails .get(parentContNameKey)); parent.setId((Long) disabledContDetails .get(parentContIdKey)); // cont.setParent(parent); ContainerPosition cntPos = new ContainerPosition(); cntPos .setPositionDimensionOne((Integer) disabledContDetails .get(pos1Key)); cntPos .setPositionDimensionTwo((Integer) disabledContDetails .get(pos2Key)); cntPos.setParentContainer(parent); cntPos.setOccupiedContainer(cont); cont.setLocatedAtPosition(cntPos); } StorageContainerUtil.removeStorageContainerInContainerMap( cont, containerMap); } } } catch (Exception e) { Logger.out.error(e.getMessage(), e); throw new BizLogicException(e.getMessage(), e); } } /* * public boolean isContainerFull(String containerId, int dimX, int dimY) * throws DAOException { * * boolean availablePositions[][] = * getAvailablePositionsForContainer(containerId, dimX, dimY); * * dimX = availablePositions.length; for (int x = 1; x < dimX; x++) { dimY = * availablePositions[x].length; for (int y = 1; y < dimY; y++) { if * (availablePositions[x][y] == true) return false; } } return true; * } */ private boolean checkForRestrictionsChanged(StorageContainer newContainer, StorageContainer oldContainer) { int flag = 0; Collection cpCollNew = newContainer.getCollectionProtocolCollection(); Collection cpCollOld = oldContainer.getCollectionProtocolCollection(); Collection storTypeCollNew = newContainer .getHoldsStorageTypeCollection(); Collection storTypeCollOld = oldContainer .getHoldsStorageTypeCollection(); Collection spClassCollNew = newContainer .getHoldsSpecimenClassCollection(); Collection spClassCollOld = oldContainer .getHoldsSpecimenClassCollection(); Collection spArrayTypeCollNew = newContainer .getHoldsSpecimenArrayTypeCollection(); Collection spArrayTypeCollOld = oldContainer .getHoldsSpecimenArrayTypeCollection(); /* * if (cpCollNew.size() != cpCollOld.size()) return true; */ /** * Bug 3612 - User should be able to change the restrictions if he * specifies the superset of the old restrictions if container is not * empty. */ Iterator itrOld = cpCollOld.iterator(); while (itrOld.hasNext()) { flag = 0; CollectionProtocol cpOld = (CollectionProtocol) itrOld.next(); Iterator itrNew = cpCollNew.iterator(); if (cpCollNew.size() == 0) { break; } while (itrNew.hasNext()) { CollectionProtocol cpNew = (CollectionProtocol) itrNew.next(); if (cpOld.getId().longValue() == cpNew.getId().longValue()) { flag = 1; break; } } if (flag != 1) return true; } /* * if (storTypeCollNew.size() != storTypeCollOld.size()) return true; */ itrOld = storTypeCollOld.iterator(); while (itrOld.hasNext()) { flag = 0; StorageType storOld = (StorageType) itrOld.next(); Iterator itrNew = storTypeCollNew.iterator(); while (itrNew.hasNext()) { StorageType storNew = (StorageType) itrNew.next(); if (storNew.getId().longValue() == storOld.getId().longValue() || storNew.getId().longValue() == 1) { flag = 1; break; } } if (flag != 1) return true; } /* * if (spClassCollNew.size() != spClassCollOld.size()) return true; */ itrOld = spClassCollOld.iterator(); while (itrOld.hasNext()) { flag = 0; String specimenOld = (String) itrOld.next(); Iterator itrNew = spClassCollNew.iterator(); while (itrNew.hasNext()) { String specimenNew = (String) itrNew.next(); if (specimenNew.equals(specimenOld)) { flag = 1; break; } } if (flag != 1) return true; } /* * if (spArrayTypeCollNew.size() != spArrayTypeCollOld.size()) return * true; */ itrOld = spArrayTypeCollOld.iterator(); while (itrOld.hasNext()) { flag = 0; SpecimenArrayType spArrayTypeOld = (SpecimenArrayType) itrOld .next(); Iterator itrNew = spArrayTypeCollNew.iterator(); while (itrNew.hasNext()) { SpecimenArrayType spArrayTypeNew = (SpecimenArrayType) itrNew .next(); if (spArrayTypeNew.getId().longValue() == spArrayTypeOld .getId().longValue() || spArrayTypeNew.getId().longValue() == 1) { flag = 1; break; } } if (flag != 1) return true; } return false; } protected void setPrivilege(DAO dao, String privilegeName, Class objectType, Long[] objectIds, Long userId, String roleId, boolean assignToUser, boolean assignOperation) throws SMException, DAOException { Logger.out.debug(" privilegeName:" + privilegeName + " objectType:" + objectType + " objectIds:" + edu.wustl.common.util.Utility.getArrayString(objectIds) + " userId:" + userId + " roleId:" + roleId + " assignToUser:" + assignToUser); // Aarti: Bug#1199 - We should be able to deassign // privilege on child even though user has privilege on the parent. // Thus commenting the check for privileges on parent. // if (assignOperation == Constants.PRIVILEGE_DEASSIGN) // { // isDeAssignable(dao, privilegeName, objectIds, userId, roleId, // assignToUser); // } super.setPrivilege(dao, privilegeName, objectType, objectIds, userId, roleId, assignToUser, assignOperation); assignPrivilegeToSubStorageContainer(dao, privilegeName, objectIds, userId, roleId, assignToUser, assignOperation); } /** * Checks whether the user/role has privilege on the parent * (Container/Site). If the user has privilege an exception is thrown * stating to deassign the privilege of parent first. * * @param dao * The dao object to get the related objects down the hierarchy. * @param objectIds * The objects ids of containerwhose parent is to be checked. * @param privilegeName * The privilege name. * @param userId * The user identifier. * @param roleId * The roleId in case privilege is assigned/deassigned to a role. * @param assignToUser * boolean which determines whether privilege is * assigned/deassigned to a user or role. * @throws Exception */ private void isDeAssignable(DAO dao, String privilegeName, Long[] objectIds, Long userId, String roleId, boolean assignToUser) throws Exception { // Aarti: Bug#2364 - Error while assigning privileges since attribute // parentContainer changed to parent String[] selectColumnNames = { "locatedAtPosition.parentContainer.id", "site.id" }; String[] whereColumnNames = { "id" }; List listOfSubElement = super.getRelatedObjects(dao, StorageContainer.class, selectColumnNames, whereColumnNames, objectIds); Logger.out.debug("Related Objects>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>" + listOfSubElement.size()); String userName = new String(); if (assignToUser == true) { userName = SecurityManager.getInstance( StorageContainerBizLogic.class).getUserById( userId.toString()).getLoginName(); } // To get privilegeCache through // Singleton instance of PrivilegeManager, requires User LoginName PrivilegeManager privilegeManager = PrivilegeManager.getInstance(); PrivilegeCache privilegeCache = privilegeManager .getPrivilegeCache(userName); Iterator iterator = listOfSubElement.iterator(); while (iterator.hasNext()) { Object[] row = (Object[]) iterator.next(); // Parent storage container identifier. Object containerObject = (Object) row[0]; String className = StorageContainer.class.getName(); // Parent storage container identifier is null, the parent is a // site.. if ((row[0] == null) || (row[0].equals(""))) { containerObject = row[1]; className = Site.class.getName(); } Logger.out.debug("Container Object After ********************** : " + containerObject + "row[1] : " + row[1]); boolean permission = false; // Check the permission on the parent container or site. if (assignToUser == true)// If the privilege is // assigned/deassigned to a user. { // Call to SecurityManager.checkPermission bypassed & // instead, call redirected to privilegeCache.hasPrivilege permission = true; // Commented by Vishvesh & Ravindra for MSR for C1 // privilegeCache.hasPrivilege(className+"_"+containerObject.toString(), // privilegeName); // permission = // SecurityManager.getInstance(StorageContainerBizLogic.class).checkPermission(userName, // className, // containerObject.toString(), privilegeName); } else // If the privilege is assigned/deassigned to a user group. { permission = privilegeManager.hasGroupPrivilege(roleId, className + "_" + containerObject.toString(), privilegeName); // permission = // SecurityManager.getInstance(StorageContainerBizLogic.class).checkPermission(roleId, // className, // containerObject.toString()); } // If the parent is a Site. if (permission == true && row[0] == null) { throw new DAOException( "Error : First de-assign privilege of the Parent Site with system identifier " + row[1].toString()); } else if (permission == true && row[0] != null)// If the parent is // a storage // container. { throw new DAOException( "Error : First de-assign privilege of the Parent Container with system identifier " + row[0].toString()); } } } /** * Assigns the privilege to all the sub-containers down the hierarchy. * * @param dao * The dao object to get the related objects down the hierarchy. * @param privilegeName * The privilege name. * @param storageContainerIDArr * The storage container id array. * @param userId * The user identifier. * @param roleId * The roleId in case privilege is assigned/deassigned to a role. * @param assignToUser * boolean which determines whether privilege is * assigned/deassigned to a user or role. * @param assignOperation * boolean which determines assign/deassign. * @throws SMException * @throws DAOException */ private void assignPrivilegeToSubStorageContainer(DAO dao, String privilegeName, Long[] storageContainerIDArr, Long userId, String roleId, boolean assignToUser, boolean assignOperation) throws SMException, DAOException { // Aarti: Bug#2364 - Error while assigning privileges since attribute // parentContainer changed to parent // Get list of sub container identifiers. List listOfSubStorageContainerId = super.getRelatedObjects(dao, StorageContainer.class, "locatedAtPosition.parentContainer", storageContainerIDArr); if (listOfSubStorageContainerId.isEmpty()) return; super.setPrivilege(dao, privilegeName, StorageContainer.class, Utility .toLongArray(listOfSubStorageContainerId), userId, roleId, assignToUser, assignOperation); assignPrivilegeToSubStorageContainer(dao, privilegeName, Utility .toLongArray(listOfSubStorageContainerId), userId, roleId, assignToUser, assignOperation); } /** * @param dao * @param objectIds * @param assignToUser * @param roleId * @throws DAOException * @throws SMException */ public void assignPrivilegeToRelatedObjectsForSite(DAO dao, String privilegeName, Long[] objectIds, Long userId, String roleId, boolean assignToUser, boolean assignOperation) throws SMException, DAOException { List listOfSubElement = super.getRelatedObjects(dao, StorageContainer.class, "site", objectIds); if (!listOfSubElement.isEmpty()) { super.setPrivilege(dao, privilegeName, StorageContainer.class, Utility.toLongArray(listOfSubElement), userId, roleId, assignToUser, assignOperation); } } // This method sets the Storage Type & Site (if applicable) of this // container. protected void loadSite(DAO dao, StorageContainer container) throws DAOException { Site site = container.getSite(); // Setting the site if applicable if (site != null) { // Commenting dao.retrive() call as retrived object is not realy // required for further processing -Prafull Site siteObj = (Site) dao.retrieve(Site.class.getName(), container .getSite().getId()); if (siteObj != null) { // check for closed site checkStatus(dao, siteObj, "Site"); container.setSite(siteObj); setSiteForSubContainers(container, siteObj, dao); } } } protected void loadStorageType(DAO dao, StorageContainer container) throws DAOException { // Setting the Storage Type Object storageTypeObj = dao.retrieve(StorageType.class.getName(), container.getStorageType().getId()); if (storageTypeObj != null) { StorageType type = (StorageType) storageTypeObj; container.setStorageType(type); } } private void setSiteForSubContainers(StorageContainer storageContainer, Site site, DAO dao) throws DAOException { // Added storageContainer.getId()!=null check as this method fails in // case when it gets called from insert(). -PRafull if (storageContainer != null && storageContainer.getId() != null) { // Collection children = (Collection) // dao.retrieveAttribute(storageContainer.getClass().getName(), // storageContainer.getId(), "elements(children)"); Collection children = StorageContainerUtil.getChildren(dao, storageContainer.getId()); Logger.out .debug("storageContainer.getChildrenContainerCollection() " + children.size()); Iterator iterator = children.iterator(); while (iterator.hasNext()) { StorageContainer container = (StorageContainer) HibernateMetaData .getProxyObjectImpl(iterator.next()); container.setSite(site); setSiteForSubContainers(container, site, dao); } } } private boolean isUnderSubContainer(StorageContainer storageContainer, Long parentContainerID, DAO dao) throws DAOException { if (storageContainer != null) { // Ashish - 11/6/07 - Retriving children containers for performance // improvement. // Collection childrenColl = // (Collection)dao.retrieveAttribute(StorageContainer.class.getName(), // storageContainer.getId(),Constants.COLUMN_NAME_CHILDREN ); Collection childrenColl = StorageContainerUtil.getChildren(dao, storageContainer.getId()); Iterator iterator = childrenColl.iterator(); // storageContainer.getChildren() while (iterator.hasNext()) { StorageContainer container = (StorageContainer) iterator.next(); // Logger.out.debug("SUB CONTINER container // "+parentContainerID.longValue()+" // "+container.getId().longValue()+" // "+(parentContainerID.longValue()==container.getId().longValue())); if (parentContainerID.longValue() == container.getId() .longValue()) return true; if (isUnderSubContainer(container, parentContainerID, dao)) return true; } } return false; } // TODO TO BE REMOVED private void setDisableToSubContainer(StorageContainer storageContainer, List disabledConts, DAO dao, List disabledContainerList) throws DAOException { if (storageContainer != null) { // Ashish - 11/6/07 - Retriving children containers for performance // improvement. // Collection childrenColl = // (Collection)dao.retrieveAttribute(StorageContainer.class.getName(), // storageContainer.getId(),Constants.COLUMN_NAME_CHILDREN ); Collection childrenColl = StorageContainerUtil.getChildren(dao, storageContainer.getId()); Iterator iterator = childrenColl.iterator(); while (iterator.hasNext()) { StorageContainer container = (StorageContainer) iterator.next(); container.setActivityStatus(Constants.ACTIVITY_STATUS_DISABLED); addEntriesInDisabledMap(container, disabledConts); /* whenever container is disabled free it's used positions */ container.setLocatedAtPosition(null); disabledContainerList.add(container); setDisableToSubContainer(container, disabledConts, dao, disabledContainerList); } } storageContainer.getOccupiedPositions().clear(); } // This method is called from labelgenerator. public long getNextContainerNumber() throws DAOException { String sourceObjectName = "CATISSUE_STORAGE_CONTAINER"; String[] selectColumnName = { "max(IDENTIFIER) as MAX_NAME" }; AbstractDAO dao = DAOFactory.getInstance().getDAO(Constants.JDBC_DAO); dao.openSession(null); List list = dao.retrieve(sourceObjectName, selectColumnName); dao.closeSession(); if (!list.isEmpty()) { List columnList = (List) list.get(0); if (!columnList.isEmpty()) { String str = (String) columnList.get(0); if (!str.equals("")) { long no = Long.parseLong(str); return no + 1; } } } return 1; } // what to do abt thi public String getContainerName(String siteName, String typeName, String operation, long Id) throws DAOException { String containerName = ""; if (typeName != null && siteName != null && !typeName.equals("") && !siteName.equals("")) { // Poornima:Max length of site name is 50 and Max length of // container type name is 100, in Oracle the name does not truncate // and it is giving error. So these fields are truncated in case it // is longer than 40. // It also solves Bug 2829:System fails to create a default unique // storage container name String maxSiteName = siteName; String maxTypeName = typeName; if (siteName.length() > 40) { maxSiteName = siteName.substring(0, 39); } if (typeName.length() > 40) { maxTypeName = typeName.substring(0, 39); } if (operation.equals(Constants.ADD)) { containerName = maxSiteName + "_" + maxTypeName + "_" + String.valueOf(getNextContainerNumber()); } else { containerName = maxSiteName + "_" + maxTypeName + "_" + String.valueOf(Id); } } return containerName; } public int getNextContainerNumber(long parentID, long typeID, boolean isInSite) throws DAOException { String sourceObjectName = "CATISSUE_STORAGE_CONTAINER"; String[] selectColumnName = { "max(IDENTIFIER) as MAX_NAME" }; String[] whereColumnName = { "STORAGE_TYPE_ID", "PARENT_CONTAINER_ID" }; String[] whereColumnCondition = { "=", "=" }; Object[] whereColumnValue = { Long.valueOf(typeID), Long.valueOf(parentID) }; if (isInSite) { whereColumnName = new String[3]; whereColumnName[0] = "STORAGE_TYPE_ID"; whereColumnName[1] = "SITE_ID"; whereColumnName[2] = "PARENT_CONTAINER_ID"; whereColumnValue = new Object[3]; whereColumnValue[0] = Long.valueOf(typeID); whereColumnValue[1] = Long.valueOf(parentID); whereColumnValue[2] = "null"; whereColumnCondition = new String[3]; whereColumnCondition[0] = "="; whereColumnCondition[1] = "="; whereColumnCondition[2] = "is"; } String joinCondition = Constants.AND_JOIN_CONDITION; AbstractDAO dao = DAOFactory.getInstance().getDAO(Constants.JDBC_DAO); dao.openSession(null); List list = dao.retrieve(sourceObjectName, selectColumnName, whereColumnName, whereColumnCondition, whereColumnValue, joinCondition); dao.closeSession(); if (!list.isEmpty()) { List columnList = (List) list.get(0); if (!columnList.isEmpty()) { String str = (String) columnList.get(0); Logger.out.info("str---------------:" + str); if (!str.equals("")) { int no = Integer.parseInt(str); return no + 1; } } } return 1; } private boolean isContainerEmpty(DAO dao, StorageContainer container) throws DAOException { // Retrieving all the occupied positions by child containers String sourceObjectName = StorageContainer.class.getName(); String[] selectColumnName = { "locatedAtPosition.positionDimensionOne", "locatedAtPosition.positionDimensionTwo" }; String[] whereColumnName = { "locatedAtPosition.parentContainer.id" }; String[] whereColumnCondition = { "=" }; Object[] whereColumnValue = { container.getId() }; List list = dao.retrieve(sourceObjectName, selectColumnName, whereColumnName, whereColumnCondition, whereColumnValue, null); if (!list.isEmpty()) { return false; } else { // Retrieving all the occupied positions by specimens sourceObjectName = Specimen.class.getName(); whereColumnName[0] = "specimenPosition.storageContainer.id"; selectColumnName[0] = "specimenPosition.positionDimensionOne"; selectColumnName[1] = "specimenPosition.positionDimensionTwo"; list = dao.retrieve(sourceObjectName, selectColumnName, whereColumnName, whereColumnCondition, whereColumnValue, null); if (!list.isEmpty()) { return false; } else { // Retrieving all the occupied positions by specimens array type sourceObjectName = SpecimenArray.class.getName(); whereColumnName[0] = "locatedAtPosition.parentContainer.id"; selectColumnName[0] = "locatedAtPosition.positionDimensionOne"; selectColumnName[1] = "locatedAtPosition.positionDimensionTwo"; list = dao.retrieve(sourceObjectName, selectColumnName, whereColumnName, whereColumnCondition, whereColumnValue, null); if (!list.isEmpty()) { return false; } } } return true; } /** * Returns the data for generation of storage container tree view. * * @return the vector of tree nodes for the storage containers. */ public Vector getTreeViewData() throws DAOException { JDBCDAO dao = (JDBCDAO) DAOFactory.getInstance().getDAO( Constants.JDBC_DAO); dao.openSession(null); // String queryStr = " SELECT t8.IDENTIFIER, t8.CONTAINER_NAME, t5.TYPE, // t8.SITE_ID, " // + " t4.TYPE, t8.PARENT_IDENTIFIER, " // + " t8.PARENT_CONTAINER_NAME, t8.PARENT_CONTAINER_TYPE " // + " FROM (SELECT t7.IDENTIFIER, t7.CONTAINER_NAME, t7.SITE_ID, " // + " t7.STORAGE_TYPE_ID, t7.PARENT_IDENTIFIER, " // + " t7.PARENT_CONTAINER_NAME, t6.TYPE AS PARENT_CONTAINER_TYPE FROM " // + " (select t1.IDENTIFIER AS IDENTIFIER, t1.CONTAINER_NAME AS // CONTAINER_NAME, " // + " t1.SITE_ID AS SITE_ID, t1.STORAGE_TYPE_ID AS STORAGE_TYPE_ID, " // + " t2.IDENTIFIER AS PARENT_IDENTIFIER, t2.CONTAINER_NAME AS // PARENT_CONTAINER_NAME, " // + " t2.STORAGE_TYPE_ID AS PARENT_STORAGE_TYPE_ID " // + " from CATISSUE_STORAGE_CONTAINER t1 LEFT OUTER JOIN // CATISSUE_STORAGE_CONTAINER t2 " // + " on t1.PARENT_CONTAINER_ID = t2.IDENTIFIER) AS t7 LEFT OUTER JOIN // CATISSUE_STORAGE_TYPE t6 " // + " on t7.PARENT_STORAGE_TYPE_ID = t6.IDENTIFIER) AS t8, " // + " CATISSUE_SITE t4, CATISSUE_STORAGE_TYPE t5 " // + " WHERE t8.SITE_ID = t4.IDENTIFIER " + " AND t8.STORAGE_TYPE_ID = // t5.IDENTIFIER "; // String queryStr = "SELECT " + " t8.IDENTIFIER, t8.CONTAINER_NAME, // t5.NAME, t8.SITE_ID, t4.TYPE, t8.PARENT_IDENTIFIER, " // + " t8.PARENT_CONTAINER_NAME, t8.PARENT_CONTAINER_TYPE, // t8.ACTIVITY_STATUS, t8.PARENT_ACTIVITY_STATUS " + " FROM ( " + " // SELECT " // + " t7.IDENTIFIER, t7.CONTAINER_NAME, t7.SITE_ID, t7.STORAGE_TYPE_ID, // t7.ACTIVITY_STATUS, t7.PARENT_IDENTIFIER, " // + " t7.PARENT_CONTAINER_NAME, t6.NAME AS PARENT_CONTAINER_TYPE, // t7.PARENT_ACTIVITY_STATUS " + " FROM " + " ( " // + " select " // + " t1.IDENTIFIER AS IDENTIFIER, t1.NAME AS CONTAINER_NAME, // t11.SITE_ID AS SITE_ID, T1.ACTIVITY_STATUS AS ACTIVITY_STATUS," // + " t11.STORAGE_TYPE_ID AS STORAGE_TYPE_ID, t2.IDENTIFIER AS // PARENT_IDENTIFIER, " // + " t2.NAME AS PARENT_CONTAINER_NAME, t22.STORAGE_TYPE_ID AS // PARENT_STORAGE_TYPE_ID, T2.ACTIVITY_STATUS AS PARENT_ACTIVITY_STATUS" // + " from " + " CATISSUE_STORAGE_CONTAINER t11, // CATISSUE_STORAGE_CONTAINER t22, " // + " CATISSUE_CONTAINER t1 LEFT OUTER JOIN CATISSUE_CONTAINER t2 " + " // on t1.PARENT_CONTAINER_ID = t2.IDENTIFIER " // + " where " + " t1.identifier = t11.identifier and (t2.identifier is // null OR t2.identifier = t22.identifier)" + " ) " // + " t7 LEFT OUTER JOIN CATISSUE_CONTAINER_TYPE t6 on " + " // t7.PARENT_STORAGE_TYPE_ID = t6.IDENTIFIER " + " ) " // + " t8, CATISSUE_SITE t4, CATISSUE_CONTAINER_TYPE t5 WHERE t8.SITE_ID // = t4.IDENTIFIER " + " AND t8.STORAGE_TYPE_ID = t5.IDENTIFIER "; // Bug-2630: Added by jitendra String queryStr = "SELECT " + "t8.IDENTIFIER, t8.CONTAINER_NAME, t5.NAME, t8.SITE_ID, t4.TYPE, " + "t8. PARENT_IDENTIFIER, t8.PARENT_CONTAINER_NAME, t8.PARENT_CONTAINER_TYPE, " + "t8. ACTIVITY_STATUS, t8.PARENT_ACTIVITY_STATUS " + "FROM " + "( " + "SELECT " + "t7. IDENTIFIER, t7.CONTAINER_NAME, t7.SITE_ID, t7.STORAGE_TYPE_ID, " + "t7.ACTIVITY_STATUS, t7. PARENT_IDENTIFIER, " + "t7.PARENT_CONTAINER_NAME, t6.NAME AS PARENT_CONTAINER_TYPE, t7.PARENT_ACTIVITY_STATUS " + "FROM " + "( " + "select " + "t10. IDENTIFIER AS IDENTIFIER, t10.CONTAINER_NAME AS CONTAINER_NAME, t10.SITE_ID AS SITE_ID, " + "T10. ACTIVITY_STATUS AS ACTIVITY_STATUS, t10.STORAGE_TYPE_ID AS STORAGE_TYPE_ID, " + "t10.PARENT_IDENTIFIER AS PARENT_IDENTIFIER, t10.PARENT_CONTAINER_NAME AS PARENT_CONTAINER_NAME, " + "t22. STORAGE_TYPE_ID AS PARENT_STORAGE_TYPE_ID, T10.PARENT_ACTIVITY_STATUS AS PARENT_ACTIVITY_STATUS " + "from " + "( " + "select " + "t1. IDENTIFIER AS IDENTIFIER, t1.NAME AS CONTAINER_NAME, t11.SITE_ID AS SITE_ID, " + "T1. ACTIVITY_STATUS AS ACTIVITY_STATUS, t11.STORAGE_TYPE_ID AS STORAGE_TYPE_ID, " + "t2.IDENTIFIER AS PARENT_IDENTIFIER, t2.NAME AS PARENT_CONTAINER_NAME, " + "T2.ACTIVITY_STATUS AS PARENT_ACTIVITY_STATUS " + "from " + "CATISSUE_STORAGE_CONTAINER t11,CATISSUE_CONTAINER t1 LEFT OUTER JOIN " + "CATISSUE_CONTAINER t2 " + "on t1.PARENT_CONTAINER_ID = t2.IDENTIFIER " + "where t1.identifier = t11.identifier " + ")t10 " + "LEFT OUTER JOIN CATISSUE_STORAGE_CONTAINER t22 on t10.PARENT_IDENTIFIER = t22.identifier " + ")t7 " + "LEFT OUTER JOIN CATISSUE_CONTAINER_TYPE t6 on t7.PARENT_STORAGE_TYPE_ID = t6.IDENTIFIER " + ") t8, CATISSUE_SITE t4, CATISSUE_CONTAINER_TYPE t5 " + "WHERE " + "t8.SITE_ID = t4.IDENTIFIER AND t8.STORAGE_TYPE_ID = t5.IDENTIFIER "; Logger.out.debug("Storage Container query......................" + queryStr); List list = null; try { list = dao.executeQuery(queryStr, null, false, null); // printRecords(list); } catch (Exception ex) { throw new DAOException(ex.getMessage()); } dao.closeSession(); return getTreeNodeList(list); } /** * Returns the vector of tree node for the storage container list. * * @param resultList * the storage container list. * @return the vector of tree node for the storage container list. * @throws DAOException */ public Vector getTreeNodeList(List resultList) throws DAOException { Map containerRelationMap = new HashMap(); // Vector of Tree Nodes for all the storage containers. Vector treeNodeVector = new Vector(); Vector finalNodeVector = new Vector(); if (resultList.isEmpty() == false) { Iterator iterator = resultList.iterator(); while (iterator.hasNext()) { List rowList = (List) iterator.next(); // Bug-2630: Added by jitendra if ((String) rowList.get(8) != null && !((String) rowList.get(8)) .equals(Constants.ACTIVITY_STATUS_DISABLED)) { // Mandar : code for tooltip for the container String toolTip = getToolTipData((String) rowList.get(0)); // Create the tree node for the child node. TreeNode treeNodeImpl = new StorageContainerTreeNode(Long .valueOf((String) rowList.get(0)), (String) rowList .get(1), (String) rowList.get(1), toolTip, (String) rowList.get(8)); // Add the tree node in the Vector if it is not present. if (treeNodeVector.contains(treeNodeImpl) == false) { treeNodeVector.add(treeNodeImpl); } } if ((String) rowList.get(5) != "") // if parent container is // not null { List childIds = new ArrayList(); // Create the relationship map for parent container id and // the child container ids. // Check if the parent container already has an entry in the // Map and get it. if (containerRelationMap.containsKey(Long .valueOf((String) rowList.get(5)))) { childIds = (List) containerRelationMap.get(Long .valueOf((String) rowList.get(5))); } // Put the container in the child container list of the // parent container // and update the Map. childIds.add(Long.valueOf((String) rowList.get(0))); containerRelationMap.put(Long.valueOf((String) rowList .get(5)), childIds); // Create the tree node for the parent node and add it in // the vector if not present. String toolTip = getToolTipData((String) rowList.get(5)); TreeNode treeNodeImpl = new StorageContainerTreeNode(Long .valueOf((String) rowList.get(5)), (String) rowList .get(6), (String) rowList.get(6), toolTip, (String) rowList.get(9)); if (treeNodeVector.contains(treeNodeImpl) == false) { treeNodeVector.add(treeNodeImpl); } } } // printVectorMap(treeNodeVector, containerRelationMap); finalNodeVector = createHierarchy(containerRelationMap, treeNodeVector); } return finalNodeVector; } /** * Creates the hierarchy of the tree nodes of the container according to the * container relationship map. * * @param containerRelationMap * the container relationship map. * @param treeNodeVector * the vector of tree nodes. * @return the hierarchy of the tree nodes of the container according to the * container relationship map. * @throws DAOException */ private Vector createHierarchy(Map containerRelationMap, Vector treeNodeVector) throws DAOException { // Get the ket set of the parent containers. Set keySet = containerRelationMap.keySet(); Iterator iterator = keySet.iterator(); while (iterator.hasNext()) { // Get the parent container id and create the tree node. Long parentId = (Long) iterator.next(); StorageContainerTreeNode parentTreeNodeImpl = new StorageContainerTreeNode( parentId, null, null); parentTreeNodeImpl = (StorageContainerTreeNode) treeNodeVector .get(treeNodeVector.indexOf(parentTreeNodeImpl)); // Get the child container ids and create the tree nodes. List childNodeList = (List) containerRelationMap.get(parentId); Iterator childIterator = childNodeList.iterator(); while (childIterator.hasNext()) { Long childId = (Long) childIterator.next(); StorageContainerTreeNode childTreeNodeImpl = new StorageContainerTreeNode( childId, null, null); childTreeNodeImpl = (StorageContainerTreeNode) treeNodeVector .get(treeNodeVector.indexOf(childTreeNodeImpl)); // Set the relationship between the parent and child tree nodes. childTreeNodeImpl.setParentNode(parentTreeNodeImpl); parentTreeNodeImpl.getChildNodes().add(childTreeNodeImpl); } // for sorting Vector tempChildNodeList = parentTreeNodeImpl.getChildNodes(); parentTreeNodeImpl.setChildNodes(tempChildNodeList); } // Get the container whose tree node has parent null // and get its site tree node and set it as its child. Vector parentNodeVector = new Vector(); iterator = treeNodeVector.iterator(); // System.out.println("\nNodes without Parent\n"); while (iterator.hasNext()) { StorageContainerTreeNode treeNodeImpl = (StorageContainerTreeNode) iterator .next(); if (treeNodeImpl.getParentNode() == null) { // System.out.print("\n" + treeNodeImpl); TreeNodeImpl siteNode = getSiteTreeNode(treeNodeImpl .getIdentifier()); // System.out.print("\tSiteNodecreated: " + siteNode); if (parentNodeVector.contains(siteNode)) { siteNode = (TreeNodeImpl) parentNodeVector .get(parentNodeVector.indexOf(siteNode)); // System.out.print("SiteNode Found"); } else { parentNodeVector.add(siteNode); // System.out.print("\tSiteNodeSet: " + siteNode); } treeNodeImpl.setParentNode(siteNode); siteNode.getChildNodes().add(treeNodeImpl); // for sorting Vector tempChildNodeList = siteNode.getChildNodes(); siteNode.setChildNodes(tempChildNodeList); } } // Get the containers under site. Vector containersUnderSite = getContainersUnderSite(); containersUnderSite.removeAll(parentNodeVector); parentNodeVector.addAll(containersUnderSite); Utility.sortTreeVector(parentNodeVector); return parentNodeVector; } private Vector getContainersUnderSite() throws DAOException { // String sql = " SELECT sc.IDENTIFIER, sc.CONTAINER_NAME, scType.TYPE, // site.IDENTIFIER, site.NAME, site.TYPE " // + " from catissue_storage_container sc, catissue_site site, // catissue_storage_type scType " // + " where sc.SITE_ID = site.IDENTIFIER AND sc.STORAGE_TYPE_ID = // scType.IDENTIFIER " // + " and sc.PARENT_CONTAINER_ID is NULL"; String sql = "SELECT sc.IDENTIFIER, cn.NAME, scType.NAME, site.IDENTIFIER," + "site.NAME, site.TYPE from catissue_storage_container sc, " + "catissue_site site, catissue_container_type scType, " + "catissue_container cn where sc.SITE_ID = site.IDENTIFIER " + "AND sc.STORAGE_TYPE_ID = scType.IDENTIFIER " + "and sc.IDENTIFIER = cn.IDENTIFIER " + "and cn.IDENTIFIER not in (select pos.CONTAINER_ID from catissue_container_position pos)"; JDBCDAO dao = (JDBCDAO) DAOFactory.getInstance().getDAO( Constants.JDBC_DAO); List resultList = new ArrayList(); Vector containerNodeVector = new Vector(); try { dao.openSession(null); resultList = dao.executeQuery(sql, null, false, null); dao.closeSession(); // System.out.println("\nIn getContainersUnderSite()\n "); printRecords(resultList); } catch (Exception daoExp) { throw new DAOException(daoExp.getMessage(), daoExp); } Iterator iterator = resultList.iterator(); while (iterator.hasNext()) { List rowList = (List) iterator.next(); StorageContainerTreeNode containerNode = new StorageContainerTreeNode( Long.valueOf((String) rowList.get(0)), (String) rowList .get(1), (String) rowList.get(1)); StorageContainerTreeNode siteNode = new StorageContainerTreeNode( Long.valueOf((String) rowList.get(3)), (String) rowList .get(4), (String) rowList.get(4)); if (containerNodeVector.contains(siteNode)) { siteNode = (StorageContainerTreeNode) containerNodeVector .get(containerNodeVector.indexOf(siteNode)); } else containerNodeVector.add(siteNode); containerNode.setParentNode(siteNode); siteNode.getChildNodes().add(containerNode); } return containerNodeVector; } /** * Returns the site tree node of the container with the given identifier. * * @param identifier * the identifier of the container. * @return the site tree node of the container with the given identifier. * @throws DAOException */ private TreeNodeImpl getSiteTreeNode(Long identifier) throws DAOException { String sql = "SELECT site.IDENTIFIER, site.NAME, site.TYPE " + " from catissue_storage_container sc, catissue_site site " + " where sc.SITE_ID = site.IDENTIFIER AND sc.IDENTIFIER = " + identifier.longValue(); Logger.out.debug("Site Query........................." + sql); List resultList = executeSQL(sql); TreeNodeImpl siteTreeNode = null; if (resultList.isEmpty() == false) { List siteRecord = (List) resultList.get(0); siteTreeNode = new StorageContainerTreeNode(Long .valueOf((String) siteRecord.get(0)), (String) siteRecord .get(1), (String) siteRecord.get(1)); } return siteTreeNode; } /** * This method will add all the node into the vector that contains any * container node and add a dummy container node to show [+] sign on the UI, * so that on clicking expand sign ajax call will retrieve child container * node under the site node. */ public Vector getSiteWithDummyContainer(Long userId) throws DAOException { String sql = "SELECT site.IDENTIFIER, site.NAME,COUNT(site.NAME) FROM CATISSUE_SITE " + " site join CATISSUE_STORAGE_CONTAINER sc ON sc.site_id = site.identifier join " + "CATISSUE_CONTAINER con ON con.identifier = sc.identifier WHERE con.ACTIVITY_STATUS!='Disabled' " + "GROUP BY site.IDENTIFIER, site.NAME" +" order by upper(site.NAME)"; JDBCDAO dao = (JDBCDAO) DAOFactory.getInstance().getDAO( Constants.JDBC_DAO); List resultList = new ArrayList(); Long nodeIdentifier; String nodeName = null; String dummyNodeName = null; Vector containerNodeVector = new Vector(); try { dao.openSession(null); resultList = dao.executeQuery(sql, null, false, null); dao.closeSession(); } catch (Exception daoExp) { throw new DAOException(daoExp.getMessage(), daoExp); } Iterator iterator = resultList.iterator(); Set<Long> siteIdSet = new UserBizLogic().getRelatedSiteIds(userId); while (iterator.hasNext()) { List rowList = (List) iterator.next(); nodeIdentifier = Long.valueOf((String) rowList.get(0)); if (hasPrivilegeonSite(siteIdSet, nodeIdentifier)) { nodeName = (String) rowList.get(1); dummyNodeName = Constants.DUMMY_NODE_NAME; StorageContainerTreeNode siteNode = new StorageContainerTreeNode( nodeIdentifier, nodeName, nodeName); StorageContainerTreeNode dummyContainerNode = new StorageContainerTreeNode( nodeIdentifier, dummyNodeName, dummyNodeName); dummyContainerNode.setParentNode(siteNode); siteNode.getChildNodes().add(dummyContainerNode); containerNodeVector.add(siteNode); } } return containerNodeVector; } /** * @param identifier * Identifier of the container or site node * @param nodeName * Name of the site or container * @param parentId * parent identifier of the selected node * @return containerNodeVector This vector contains all the containers * @throws DAOException * @Description This method will retrieve all the containers under the * selected node */ public Vector<StorageContainerTreeNode> getStorageContainers( Long identifier, String nodeName, String parentId) throws DAOException { String sql = createSql(identifier, parentId); JDBCDAO dao = (JDBCDAO) DAOFactory.getInstance().getDAO( Constants.JDBC_DAO); String dummyNodeName = Constants.DUMMY_NODE_NAME; String containerName = null; Long nodeIdentifier; Long parentContainerId; Long childCount; List resultList = new ArrayList(); Vector<StorageContainerTreeNode> containerNodeVector = new Vector<StorageContainerTreeNode>(); try { dao.openSession(null); resultList = dao.executeQuery(sql, null, false, null); dao.closeSession(); } catch (Exception daoExp) { throw new DAOException(daoExp.getMessage(), daoExp); } Iterator iterator = resultList.iterator(); while (iterator.hasNext()) { List rowList = (List) iterator.next(); nodeIdentifier = Long.valueOf((String) rowList.get(0)); containerName = (String) rowList.get(1); parentContainerId = Long.valueOf((String) rowList.get(2)); childCount = Long.valueOf((String) rowList.get(3)); StorageContainerTreeNode containerNode = new StorageContainerTreeNode( nodeIdentifier, containerName, containerName); StorageContainerTreeNode parneContainerNode = new StorageContainerTreeNode( parentContainerId, nodeName, nodeName); if (childCount != null && childCount > 0) { StorageContainerTreeNode dummyContainerNode = new StorageContainerTreeNode( Long.valueOf((String) rowList.get(0)), dummyNodeName, dummyNodeName); dummyContainerNode.setParentNode(containerNode); containerNode.getChildNodes().add(dummyContainerNode); } if (containerNodeVector.contains(containerNode)) { containerNode = (StorageContainerTreeNode) containerNodeVector .get(containerNodeVector.indexOf(containerNode)); } else { containerNodeVector.add(containerNode); } containerNode.setParentNode(parneContainerNode); parneContainerNode.getChildNodes().add(containerNode); } if (containerNodeVector.isEmpty()) { StorageContainerTreeNode containerNode = new StorageContainerTreeNode( identifier, nodeName, nodeName); containerNodeVector.add(containerNode); } return containerNodeVector; } /** * @param identifier * Identifier of the container or site node * @param parentId * Parent identifier of the selected node * @return String sql This method with return the sql depending on the node * clicked (i.e parent Node or child node) */ private String createSql(Long identifier, String parentId) { String sql; if (!Constants.ZERO_ID.equals(parentId)) { sql = "SELECT cn.IDENTIFIER, cn.name, pos.PARENT_CONTAINER_ID,count(sc3.IDENTIFIER) " + "FROM CATISSUE_CONTAINER cn join CATISSUE_STORAGE_CONTAINER sc ON sc.IDENTIFIER=cn.IDENTIFIER " + "left outer join catissue_container_position pos on pos.CONTAINER_ID=cn.IDENTIFIER left outer join " + "catissue_container_position con_pos on con_pos.PARENT_CONTAINER_ID=cn.IDENTIFIER left outer join " + "CATISSUE_STORAGE_CONTAINER sc3 on con_pos.CONTAINER_ID=sc3.IDENTIFIER " + "WHERE pos.PARENT_CONTAINER_ID= " + identifier + " AND cn.ACTIVITY_STATUS!='Disabled' GROUP BY cn.IDENTIFIER, cn.NAME,pos.PARENT_CONTAINER_ID"; } else { sql = "SELECT cn.IDENTIFIER, cn.NAME,site.identifier,COUNT(sc3.IDENTIFIER) " + "FROM CATISSUE_CONTAINER cn join CATISSUE_STORAGE_CONTAINER sc " + "ON sc.IDENTIFIER=cn.IDENTIFIER join CATISSUE_SITE site " + "ON sc.site_id = site.identifier left outer join CATISSUE_CONTAINER_POSITION pos " + "ON pos.PARENT_CONTAINER_ID=cn.IDENTIFIER left outer join " + "CATISSUE_STORAGE_CONTAINER sc3 ON pos.CONTAINER_ID=sc3.IDENTIFIER " + "WHERE site.identifier=" + identifier + " AND cn.ACTIVITY_STATUS!='Disabled' AND cn.IDENTIFIER NOT IN (SELECT p2.CONTAINER_ID FROM CATISSUE_CONTAINER_POSITION p2) " + "GROUP BY cn.IDENTIFIER, cn.NAME,site.identifier "; } return sql; } private boolean[][] getStorageContainerFullStatus(DAO dao, StorageContainer parentContainer, Collection children) throws DAOException { // List list = dao.retrieve(StorageContainer.class.getName(), "id", id); boolean[][] fullStatus = null; Integer oneDimensionCapacity = parentContainer.getCapacity() .getOneDimensionCapacity(); Integer twoDimensionCapacity = parentContainer.getCapacity() .getTwoDimensionCapacity(); fullStatus = new boolean[oneDimensionCapacity.intValue() + 1][twoDimensionCapacity .intValue() + 1]; // Collection children = StorageContainerUtil.getChildren(dao, // storageContainer.getId()); if (children != null) { Iterator iterator = children.iterator(); Logger.out .debug("storageContainer.getChildrenContainerCollection().size(): " + children.size()); while (iterator.hasNext()) { StorageContainer childStorageContainer = (StorageContainer) iterator .next(); if (childStorageContainer.getLocatedAtPosition() != null) { Integer positionDimensionOne = childStorageContainer .getLocatedAtPosition().getPositionDimensionOne(); Integer positionDimensionTwo = childStorageContainer .getLocatedAtPosition().getPositionDimensionTwo(); Logger.out.debug("positionDimensionOne : " + positionDimensionOne.intValue()); Logger.out.debug("positionDimensionTwo : " + positionDimensionTwo.intValue()); fullStatus[positionDimensionOne.intValue()][positionDimensionTwo .intValue()] = true; } } } return fullStatus; } /** * @param containerId * @return * @throws DAOException */ public Collection getContainerChildren(Long containerId) throws DAOException { AbstractDAO dao = DAOFactory.getInstance().getDAO( Constants.HIBERNATE_DAO); Collection<Container> children = null; try { dao.openSession(null); children = StorageContainerUtil.getChildren(dao, containerId); } catch (DAOException daoExp) { daoExp.printStackTrace(); Logger.out.error(daoExp.getMessage(), daoExp); } finally { dao.closeSession(); } return children; } private void disableSubStorageContainer(DAO dao, SessionDataBean sessionDataBean, List<StorageContainer> disabledContainerList) throws DAOException, UserNotAuthorizedException { // adding updated participantMap to cache // catissueCoreCacheManager.addObjectToCache(Constants.MAP_OF_PARTICIPANTS, // participantMap); int count = disabledContainerList.size(); List containerIdList = new ArrayList(); for (int i = 0; i < count; i++) { StorageContainer container = disabledContainerList.get(i); containerIdList.add(container.getId()); } List listOfSpecimenIDs = getRelatedObjects(dao, Specimen.class, "specimenPosition.storageContainer", Utility .toLongArray(containerIdList)); if (!listOfSpecimenIDs.isEmpty()) { throw new DAOException(ApplicationProperties .getValue("errors.container.contains.specimen")); } // Uodate containers to disabled for (int i = 0; i < count; i++) { StorageContainer container = disabledContainerList.get(i); dao.update(container, sessionDataBean, true, true, false); } auditDisabledObjects(dao, "CATISSUE_CONTAINER", containerIdList); } private void disableSubStorageContainer(DAO dao, SessionDataBean sessionDataBean, Long storageContainerIDArr[]) throws DAOException, UserNotAuthorizedException { // adding updated participantMap to cache // catissueCoreCacheManager.addObjectToCache(Constants.MAP_OF_PARTICIPANTS, // participantMap); List listOfSpecimenIDs = getRelatedObjects(dao, Specimen.class, "specimenPosition.storageContainer", storageContainerIDArr); if (!listOfSpecimenIDs.isEmpty()) { throw new DAOException(ApplicationProperties .getValue("errors.container.contains.specimen")); } List listOfSubStorageContainerId = super.disableObjects(dao, Container.class, "locatedAtPosition.parentContainer", "CATISSUE_CONTAINER", "PARENT_CONTAINER_ID", storageContainerIDArr); if (listOfSubStorageContainerId.isEmpty()) { return; } else { Iterator itr = listOfSubStorageContainerId.iterator(); while (itr.hasNext()) { Long contId = (Long) itr.next(); String sourceObjectName = StorageContainer.class.getName(); Object object = dao.retrieve(sourceObjectName, contId); if (object != null) { StorageContainer cont = (StorageContainer) object; // cont.setParent(null); cont.setLocatedAtPosition(null); // dao.update(cont, sessionDataBean, true, true, false); } } } disableSubStorageContainer(dao, sessionDataBean, Utility .toLongArray(listOfSubStorageContainerId)); } // Checks for whether the user is trying to use a container without // privilege to use it // This is needed since now users can enter the values in the edit box public boolean validateContainerAccess(DAO dao, StorageContainer container, SessionDataBean sessionDataBean) throws SMException { Logger.out.debug("validateContainerAccess.................."); String userName = sessionDataBean.getUserName(); if(sessionDataBean != null && sessionDataBean.isAdmin()) { return true; } // To get privilegeCache through // Singleton instance of PrivilegeManager, requires User LoginName // PrivilegeManager privilegeManager = PrivilegeManager.getInstance(); // PrivilegeCache privilegeCache = // privilegeManager.getPrivilegeCache(userName); // Implemented as per the requirements of MSR. User should use only // those sites for which he has access to. Long userId = sessionDataBean.getUserId(); Site site = null; Set loggedInUserSiteIdSet = null; try { site = getSite(dao, container.getId()); loggedInUserSiteIdSet = new UserBizLogic().getRelatedSiteIds(userId); if(dao instanceof HibernateDAO) { ((HibernateDAO)dao).openSession(null); } } catch (DAOException e) { return false; } finally { // try { // //dao.closeSession(); // } catch (DAOException e) { // // TODO Auto-generated catch block // e.printStackTrace(); // } } if (loggedInUserSiteIdSet != null && loggedInUserSiteIdSet.contains(new Long(site.getId()))) { return true; } else { return false; } // if // (!SecurityManager.getInstance(this.getClass()).isAuthorized(userName, // StorageContainer.class.getName() + "_" + container.getId(), // Permissions.USE)) // Call to SecurityManager.isAuthorized bypassed & // instead, call redirected to privilegeCache.hasPrivilege // Commented by Ravindra and Vishvesh because this is not how // if (!privilegeCache.hasPrivilege(StorageContainer.class.getName() + // "_" + container.getId(), Permissions.USE)) // { // return false; // } // else // return true; } // Checks for whether the user is trying to place the container in a // position // outside the range of parent container // This is needed since now users can enter the values in the edit box protected boolean validatePosition(StorageContainer parent, StorageContainer current) { int posOneCapacity = parent.getCapacity().getOneDimensionCapacity() .intValue(); int posTwoCapacity = parent.getCapacity().getTwoDimensionCapacity() .intValue(); int positionDimensionOne = current.getLocatedAtPosition() .getPositionDimensionOne().intValue(); int positionDimensionTwo = current.getLocatedAtPosition() .getPositionDimensionTwo().intValue(); Logger.out.debug("validatePosition C : " + positionDimensionOne + " : " + positionDimensionTwo); Logger.out.debug("validatePosition P : " + posOneCapacity + " : " + posTwoCapacity); if ((positionDimensionOne > posOneCapacity) || (positionDimensionTwo > posTwoCapacity)) { Logger.out.debug("validatePosition false"); return false; } Logger.out.debug("validatePosition true"); return true; } private boolean validatePosition(int posOneCapacity, int posTwoCapacity, StorageContainer current) { int positionDimensionOne = current.getLocatedAtPosition() .getPositionDimensionOne().intValue(); int positionDimensionTwo = current.getLocatedAtPosition() .getPositionDimensionTwo().intValue(); Logger.out.debug("validatePosition C : " + positionDimensionOne + " : " + positionDimensionTwo); Logger.out.debug("validatePosition P : " + posOneCapacity + " : " + posTwoCapacity); if ((positionDimensionOne > posOneCapacity) || (positionDimensionTwo > posTwoCapacity)) { Logger.out.debug("validatePosition false"); return false; } Logger.out.debug("validatePosition true"); return true; } /** * Bug ID: 4038 Patch ID: 4038_2 See also: 1-3 */ /** * This method is to validae position based on parent container id * * @param dao * Object DAO * @param container * current container * @return boolean value based on validation * @throws DAOException * exception occured while DB handling */ private boolean validatePosition(DAO dao, StorageContainer container) throws DAOException { String sourceObjectName = StorageContainer.class.getName(); String[] selectColumnName = { "id", "capacity.oneDimensionCapacity", "capacity.twoDimensionCapacity" }; String[] whereColumnName = { "id" }; // "storageContainer."+Constants.SYSTEM_IDENTIFIER String[] whereColumnCondition = { "=" }; Object[] whereColumnValue = { container.getLocatedAtPosition() .getParentContainer().getId() }; String joinCondition = null; List list = dao.retrieve(sourceObjectName, selectColumnName, whereColumnName, whereColumnCondition, whereColumnValue, joinCondition); Integer pcCapacityOne = 0; Integer pcCapacityTwo = 0; if (!list.isEmpty()) { Object[] obj1 = (Object[]) list.get(0); pcCapacityOne = (Integer) obj1[1]; pcCapacityTwo = (Integer) obj1[2]; } int positionDimensionOne = container.getLocatedAtPosition() .getPositionDimensionOne().intValue(); int positionDimensionTwo = container.getLocatedAtPosition() .getPositionDimensionTwo().intValue(); Logger.out.debug("validatePosition C : " + positionDimensionOne + " : " + positionDimensionTwo); Logger.out.debug("validatePosition P : " + pcCapacityOne + " : " + pcCapacityTwo); if ((positionDimensionOne > pcCapacityOne) || (positionDimensionTwo > pcCapacityTwo)) { Logger.out.debug("validatePosition false"); return false; } Logger.out.debug("validatePosition true"); return true; } private boolean isContainerAvailableForDisabled(DAO dao, Long[] containerIds) { List containerList = new ArrayList(); if (containerIds.length != 0) { try { String sourceObjectName = Specimen.class.getName(); String[] selectColumnName = { "id" }; String[] whereColumnName1 = { "specimenPosition.storageContainer.id" }; // "storageContainer."+Constants.SYSTEM_IDENTIFIER String[] whereColumnCondition1 = { "in" }; Object[] whereColumnValue1 = { containerIds }; String joinCondition = Constants.AND_JOIN_CONDITION; List list = dao.retrieve(sourceObjectName, selectColumnName, whereColumnName1, whereColumnCondition1, whereColumnValue1, joinCondition); // check if Specimen exists with the given storageContainer // information if (list.size() != 0) { Object obj = list.get(0); return false; } else { sourceObjectName = SpecimenArray.class.getName(); whereColumnName1[0] = "locatedAtPosition.parentContainer.id"; list = dao.retrieve(sourceObjectName, selectColumnName, whereColumnName1, whereColumnCondition1, whereColumnValue1, joinCondition); // check if Specimen exists with the given storageContainer // information if (list.size() != 0) { return false; } /* * else { sourceObjectName = * StorageContainer.class.getName(); String[] * whereColumnName = {"parent.id"}; containerList = * dao.retrieve(sourceObjectName, selectColumnName, * whereColumnName, whereColumnCondition1, * whereColumnValue1, joinCondition); * } */ } } catch (Exception e) { Logger.out.debug("Error in isContainerAvailable : " + e); return false; } } else { return true; } return isContainerAvailableForDisabled(dao, Utility .toLongArray(containerList)); } // -- to check if storageContainer is available or used protected boolean isContainerAvailableForPositions(DAO dao, StorageContainer current) { try { String sourceObjectName = StorageContainer.class.getName(); String[] selectColumnName = { "id" }; String[] whereColumnName = { "locatedAtPosition.positionDimensionOne", "locatedAtPosition.positionDimensionTwo", "locatedAtPosition.parentContainer" }; // "storageContainer."+Constants.SYSTEM_IDENTIFIER String[] whereColumnCondition = { "=", "=", "=" }; Object[] whereColumnValue = { current.getLocatedAtPosition().getPositionDimensionOne(), current.getLocatedAtPosition().getPositionDimensionTwo(), current.getLocatedAtPosition().getParentContainer() }; String joinCondition = Constants.AND_JOIN_CONDITION; List list = dao.retrieve(sourceObjectName, selectColumnName, whereColumnName, whereColumnCondition, whereColumnValue, joinCondition); Logger.out.debug("current.getParentContainer() :" + current.getLocatedAtPosition().getParentContainer()); // check if StorageContainer exists with the given storageContainer // information if (list.size() != 0) { Object obj = list.get(0); Logger.out .debug("**********IN isContainerAvailable : obj::::::: --------- " + obj); return false; } else { sourceObjectName = Specimen.class.getName(); String[] whereColumnName1 = { "specimenPosition.positionDimensionOne", "specimenPosition.positionDimensionTwo", "specimenPosition.storageContainer.id" }; // "storageContainer."+Constants.SYSTEM_IDENTIFIER String[] whereColumnCondition1 = { "=", "=", "=" }; Object[] whereColumnValue1 = { current.getLocatedAtPosition() .getPositionDimensionOne(), current.getLocatedAtPosition() .getPositionDimensionTwo(), current.getLocatedAtPosition().getParentContainer() .getId() }; list = dao.retrieve(sourceObjectName, selectColumnName, whereColumnName1, whereColumnCondition1, whereColumnValue1, joinCondition); // check if Specimen exists with the given storageContainer // information if (list.size() != 0) { Object obj = list.get(0); Logger.out .debug("**************IN isPositionAvailable : obj::::::: --------------- " + obj); return false; } else { sourceObjectName = SpecimenArray.class.getName(); whereColumnName1[0] = "locatedAtPosition.positionDimensionOne"; whereColumnName1[1] = "locatedAtPosition.positionDimensionTwo"; whereColumnName1[2] = "locatedAtPosition.parentContainer.id"; list = dao.retrieve(sourceObjectName, selectColumnName, whereColumnName1, whereColumnCondition1, whereColumnValue1, joinCondition); // check if Specimen exists with the given storageContainer // information if (list.size() != 0) { Object obj = list.get(0); Logger.out .debug("**************IN isPositionAvailable : obj::::::: --------------- " + obj); return false; } } } return true; } catch (Exception e) { Logger.out.debug("Error in isContainerAvailable : " + e); return false; } } // Will check only for valid range of the StorageContainer protected boolean validatePosition(StorageContainer storageContainer, String posOne, String posTwo) { try { Logger.out .debug("storageContainer.getCapacity().getOneDimensionCapacity() : " + storageContainer.getCapacity() .getOneDimensionCapacity()); Logger.out .debug("storageContainer.getCapacity().getTwoDimensionCapacity() : " + storageContainer.getCapacity() .getTwoDimensionCapacity()); int oneDimensionCapacity = (storageContainer.getCapacity() .getOneDimensionCapacity() != null ? storageContainer .getCapacity().getOneDimensionCapacity().intValue() : -1); int twoDimensionCapacity = (storageContainer.getCapacity() .getTwoDimensionCapacity() != null ? storageContainer .getCapacity().getTwoDimensionCapacity().intValue() : -1); if (((oneDimensionCapacity) < Integer.parseInt(posOne)) || ((twoDimensionCapacity) < Integer.parseInt(posTwo))) { return false; } return true; } catch (Exception e) { Logger.out.debug("Error in validatePosition : " + e); return false; } } // Will check only for Position is used or not. protected boolean isPositionAvailable(DAO dao, StorageContainer storageContainer, String posOne, String posTwo) { try { String sourceObjectName = Specimen.class.getName(); String[] selectColumnName = { "id" }; String[] whereColumnName = { "specimenPosition.positionDimensionOne", "specimenPosition.positionDimensionTwo", "specimenPosition.storageContainer.id" }; // "storageContainer."+Constants.SYSTEM_IDENTIFIER String[] whereColumnCondition = { "=", "=", "=" }; Object[] whereColumnValue = { Integer.valueOf(posOne), Integer.valueOf(posTwo), storageContainer.getId() }; String joinCondition = Constants.AND_JOIN_CONDITION; List list = dao.retrieve(sourceObjectName, selectColumnName, whereColumnName, whereColumnCondition, whereColumnValue, joinCondition); Logger.out.debug("storageContainer.getId() :" + storageContainer.getId()); // check if Specimen exists with the given storageContainer // information if (list.size() != 0) { Object obj = list.get(0); Logger.out .debug("**************IN isPositionAvailable : obj::::::: --------------- " + obj); // Logger.out.debug((Long)obj[0] ); // Logger.out.debug((Integer)obj[1]); // Logger.out.debug((Integer )obj[2]); return false; } else { sourceObjectName = StorageContainer.class.getName(); String[] whereColumnName1 = { "locatedAtPosition.positionDimensionOne", "locatedAtPosition.positionDimensionTwo", "locatedAtPosition.parentContainer.id" }; // "storageContainer."+Constants.SYSTEM_IDENTIFIER String[] whereColumnCondition1 = { "=", "=", "=" }; Object[] whereColumnValue1 = { Integer.valueOf(posOne), Integer.valueOf(posTwo), storageContainer.getId() }; list = dao.retrieve(sourceObjectName, selectColumnName, whereColumnName1, whereColumnCondition1, whereColumnValue1, joinCondition); Logger.out.debug("storageContainer.getId() :" + storageContainer.getId()); // check if Specimen exists with the given storageContainer // information if (list.size() != 0) { Object obj = list.get(0); Logger.out .debug("**********IN isPositionAvailable : obj::::: --------- " + obj); return false; } else { sourceObjectName = SpecimenArray.class.getName(); String[] whereColumnName2 = { "locatedAtPosition.positionDimensionOne", "locatedAtPosition.positionDimensionTwo", "locatedAtPosition.parentContainer.id" }; String[] whereColumnCondition2 = { "=", "=", "=" }; Object[] whereColumnValue2 = { Integer.valueOf(posOne), Integer.valueOf(posTwo), storageContainer.getId() }; list = dao.retrieve(sourceObjectName, selectColumnName, whereColumnName2, whereColumnCondition2, whereColumnValue2, joinCondition); Logger.out.debug("storageContainer.getId() :" + storageContainer.getId()); // check if Specimen exists with the given storageContainer // information if (list.size() != 0) { Object obj = list.get(0); Logger.out .debug("**********IN isPositionAvailable : obj::::: --------- " + obj); return false; } } } return true; } catch (Exception e) { Logger.out.debug("Error in isPositionAvailable : " + e); return false; } } // -- storage container validation for specimen public void checkContainer(DAO dao, String storageContainerID, String positionOne, String positionTwo, SessionDataBean sessionDataBean, boolean multipleSpecimen) throws DAOException, SMException { // List list = dao.retrieve(StorageContainer.class.getName(), // "id",storageContainerID ); String sourceObjectName = StorageContainer.class.getName(); String[] selectColumnName = { Constants.SYSTEM_IDENTIFIER, "capacity.oneDimensionCapacity", "capacity.twoDimensionCapacity", "name" }; String[] whereColumnName = { Constants.SYSTEM_IDENTIFIER }; String[] whereColumnCondition = { "=" }; Object[] whereColumnValue = { Long.valueOf(storageContainerID) }; String joinCondition = Constants.AND_JOIN_CONDITION; List list = dao.retrieve(sourceObjectName, selectColumnName, whereColumnName, whereColumnCondition, whereColumnValue, joinCondition); // check if StorageContainer exists with the given ID if (list.size() != 0) { Object[] obj = (Object[]) list.get(0); Logger.out .debug("**********SC found for given ID ****obj::::::: --------------- " + obj); Logger.out.debug((Long) obj[0]); Logger.out.debug((Integer) obj[1]); Logger.out.debug((Integer) obj[2]); Logger.out.debug((String) obj[3]); StorageContainer pc = new StorageContainer(); pc.setId((Long) obj[0]); pc.setName((String) obj[3]); Capacity cntPos = new Capacity(); if (obj[1] != null && obj[2] != null) { cntPos.setOneDimensionCapacity((Integer) obj[1]); cntPos.setTwoDimensionCapacity((Integer) obj[2]); pc.setCapacity(cntPos); } // check if user has privilege to use the container boolean hasAccess = validateContainerAccess(dao,pc, sessionDataBean); Logger.out.debug("hasAccess..............." + hasAccess); if (!hasAccess) { throw new DAOException(ApplicationProperties .getValue("access.use.object.denied")); } // check for closed Container checkStatus(dao, pc, "Storage Container"); /** * Name : kalpana thakur Reviewer Name : Vaishali Bug ID: 4922 * Description:Storage container will not be added to closed site * :check for closed site */ checkClosedSite(dao, pc.getId(), "Container Site"); // check for valid position boolean isValidPosition = validatePosition(pc, positionOne, positionTwo); Logger.out.debug("isValidPosition : " + isValidPosition); boolean canUsePosition = false; if (isValidPosition) // if position is valid { /* * try { */ canUsePosition = isPositionAvailable(dao, pc, positionOne, positionTwo); /* * } catch (Exception e) { * * e.printStackTrace(); } */ /* * try { canUsePosition = * StorageContainerUtil.isPostionAvaialble(pc.getId().toString(), * pc.getName(), positionOne, positionTwo); } catch * (CacheException e) { // TODO Auto-generated catch block * e.printStackTrace(); } */ Logger.out.debug("canUsePosition : " + canUsePosition); if (canUsePosition) // position empty. can be used { } else // position already in use { if (multipleSpecimen) { throw new DAOException( ApplicationProperties .getValue("errors.storageContainer.Multiple.inUse")); } else { throw new DAOException(ApplicationProperties .getValue("errors.storageContainer.inUse")); } } } else // position is invalid { throw new DAOException(ApplicationProperties .getValue("errors.storageContainer.dimensionOverflow")); } } else // storageContainer does not exist { throw new DAOException(ApplicationProperties .getValue("errors.storageContainerExist")); } } /* * (non-Javadoc) * * @see edu.wustl.catissuecore.bizlogic.TreeDataInterface#getTreeViewData(edu.wustl.common.beans.SessionDataBean, * java.util.Map) */ public Vector getTreeViewData(SessionDataBean sessionData, Map map, List list) throws DAOException { return null; } /** * Overriding the parent class's method to validate the enumerated attribute * values */ protected boolean validate(Object obj, DAO dao, String operation) throws DAOException { StorageContainer container = (StorageContainer) obj; /** * Start: Change for API Search --- Jitendra 06/10/2006 In Case of Api * Search, default values will not get set for the object since * setAllValues() method of domainObject will not get called. To avoid * null pointer exception, we are setting the default values same we * were setting in setAllValues() method of domainObject. */ ApiSearchUtil.setContainerDefault(container); // End:- Change for API Search String message = ""; if (container == null) throw new DAOException("domain.object.null.err.msg"); Validator validator = new Validator(); if (container.getStorageType() == null) { message = ApplicationProperties.getValue("storageContainer.type"); throw new DAOException(ApplicationProperties.getValue( "errors.item.required", message)); } if (container.getNoOfContainers() == null) { Integer conts = new Integer(1); container.setNoOfContainers(conts); } if (validator.isEmpty(container.getNoOfContainers().toString())) { message = ApplicationProperties .getValue("storageContainer.noOfContainers"); throw new DAOException(ApplicationProperties.getValue( "errors.item.required", message)); } if (!validator.isNumeric(container.getNoOfContainers().toString(), 1)) { message = ApplicationProperties .getValue("storageContainer.noOfContainers"); throw new DAOException(ApplicationProperties.getValue( "errors.item.format", message)); } if (container.getLocatedAtPosition() != null && container.getLocatedAtPosition().getParentContainer() == null) { if (container.getSite() == null || container.getSite().getId() == null || container.getSite().getId() <= 0) { message = ApplicationProperties .getValue("storageContainer.site"); throw new DAOException(ApplicationProperties.getValue( "errors.item.invalid", message)); } } /* * if * (!validator.isNumeric(String.valueOf(container.getPositionDimensionOne()), * 1) || * !validator.isNumeric(String.valueOf(container.getPositionDimensionTwo()), * 1) || * !validator.isNumeric(String.valueOf(container.getParent().getId()), * 1)) { message = * ApplicationProperties.getValue("storageContainer.parentContainer"); * throw new * DAOException(ApplicationProperties.getValue("errors.item.format", * message)); } */ // validations for Container name // by falguni /* * if (validator.isEmpty(container.getName())) { message = * ApplicationProperties.getValue("storageContainer.name"); throw new * DAOException(ApplicationProperties.getValue("errors.item.required", * message)); } */ // validations for temperature if (container.getTempratureInCentigrade() != null && !validator.isEmpty(container.getTempratureInCentigrade() .toString()) && (!validator.isDouble(container.getTempratureInCentigrade() .toString(), false))) { message = ApplicationProperties .getValue("storageContainer.temperature"); throw new DAOException(ApplicationProperties.getValue( "errors.item.format", message)); } if (container.getLocatedAtPosition() != null && container.getLocatedAtPosition().getParentContainer() != null) { if (container.getLocatedAtPosition().getParentContainer().getId() == null) { String sourceObjectName = StorageContainer.class.getName(); String[] selectColumnName = { "id" }; String[] whereColumnName = { "name" }; String[] whereColumnCondition = { "=" }; Object[] whereColumnValue = { container.getLocatedAtPosition() .getParentContainer().getName() }; String joinCondition = null; List list = dao.retrieve(sourceObjectName, selectColumnName, whereColumnName, whereColumnCondition, whereColumnValue, joinCondition); if (!list.isEmpty()) { container.getLocatedAtPosition().getParentContainer() .setId((Long) list.get(0)); } else { String message1 = ApplicationProperties .getValue("specimen.storageContainer"); throw new DAOException(ApplicationProperties.getValue( "errors.invalid", message1)); } } // Long storageContainerId = specimen.getStorageContainer().getId(); Integer xPos = container.getLocatedAtPosition() .getPositionDimensionOne(); Integer yPos = container.getLocatedAtPosition() .getPositionDimensionTwo(); boolean isContainerFull = false; /** * Following code is added to set the x and y dimension in case only * storage container is given and x and y positions are not given */ if (xPos == null || yPos == null) { isContainerFull = true; Map containerMapFromCache = null; try { containerMapFromCache = (TreeMap) StorageContainerUtil .getContainerMapFromCache(); } catch (CacheException e) { e.printStackTrace(); } if (containerMapFromCache != null) { Iterator itr = containerMapFromCache.keySet().iterator(); while (itr.hasNext()) { NameValueBean nvb = (NameValueBean) itr.next(); if (container.getLocatedAtPosition() != null && container.getLocatedAtPosition() .getParentContainer() != null && nvb.getValue().toString().equals( container.getLocatedAtPosition() .getParentContainer().getId() .toString())) { Map tempMap = (Map) containerMapFromCache.get(nvb); Iterator tempIterator = tempMap.keySet().iterator(); ; NameValueBean nvb1 = (NameValueBean) tempIterator .next(); List list = (List) tempMap.get(nvb1); NameValueBean nvb2 = (NameValueBean) list.get(0); ContainerPosition cntPos = container .getLocatedAtPosition(); cntPos.setPositionDimensionOne(new Integer(nvb1 .getValue())); cntPos.setPositionDimensionTwo(new Integer(nvb2 .getValue())); cntPos.setOccupiedContainer(container); isContainerFull = false; break; } } } if (isContainerFull) { throw new DAOException( "The Storage Container you specified is full"); } } // VALIDATIONS FOR DIMENSION 1. if (container.getLocatedAtPosition() != null && container.getLocatedAtPosition() .getPositionDimensionOne() != null && validator.isEmpty(String.valueOf(container .getLocatedAtPosition().getPositionDimensionOne()))) { message = ApplicationProperties .getValue("storageContainer.oneDimension"); throw new DAOException(ApplicationProperties.getValue( "errors.item.required", message)); } else { if (container.getLocatedAtPosition() != null && container.getLocatedAtPosition() .getPositionDimensionOne() != null && !validator.isNumeric(String.valueOf(container .getLocatedAtPosition() .getPositionDimensionOne()))) { message = ApplicationProperties .getValue("storageContainer.oneDimension"); throw new DAOException(ApplicationProperties.getValue( "errors.item.format", message)); } } // Validations for dimension 2 if (container.getLocatedAtPosition() != null && container.getLocatedAtPosition() .getPositionDimensionTwo() != null && !validator.isEmpty(String.valueOf(container .getLocatedAtPosition().getPositionDimensionTwo())) && (!validator.isNumeric(String.valueOf(container .getLocatedAtPosition().getPositionDimensionTwo())))) { message = ApplicationProperties .getValue("storageContainer.twoDimension"); throw new DAOException(ApplicationProperties.getValue( "errors.item.format", message)); } } if (operation.equals(Constants.ADD)) { if (!Constants.ACTIVITY_STATUS_ACTIVE.equals(container .getActivityStatus())) { throw new DAOException(ApplicationProperties .getValue("activityStatus.active.errMsg")); } if (container.isFull().booleanValue()) { throw new DAOException(ApplicationProperties .getValue("storageContainer.isContainerFull.errMsg")); } } else { if (!Validator.isEnumeratedValue(Constants.ACTIVITY_STATUS_VALUES, container.getActivityStatus())) { throw new DAOException(ApplicationProperties .getValue("activityStatus.errMsg")); } } return true; } // TODO Write the proper business logic to return an appropriate list of // containers. public List getStorageContainerList() throws DAOException { String sourceObjectName = StorageContainer.class.getName(); String[] displayNameFields = { Constants.SYSTEM_IDENTIFIER }; String valueField = Constants.SYSTEM_IDENTIFIER; List list = getList(sourceObjectName, displayNameFields, valueField, true); return list; } public List getCollectionProtocolList() throws DAOException { String sourceObjectName = CollectionProtocol.class.getName(); List returnList = new ArrayList(); NameValueBean nvb1 = new NameValueBean("--Any--", "-1"); returnList.add(nvb1); List list = retrieve(sourceObjectName); Iterator itr = list.iterator(); while (itr.hasNext()) { CollectionProtocol collectionProtocol = (CollectionProtocol) itr .next(); NameValueBean nvb = new NameValueBean( collectionProtocol.getTitle(), collectionProtocol); returnList.add(nvb); } return returnList; } /** * This functions returns a double dimensional boolean array which tells the * availablity of storage positions of particular container. True - * Available. False - Not Available. * * @param container * The container. * @return Returns a double dimensional boolean array of position * availablity. * @throws DAOException */ public boolean[][] getAvailablePositionsForContainer(String containerId, int dimX, int dimY) throws DAOException { boolean[][] positions = new boolean[dimX][dimY]; // Initializing the array for (int i = 0; i < dimX; i++) { for (int j = 0; j < dimY; j++) { positions[i][j] = true; } } // Retrieving all the occupied positions by child containers String sourceObjectName = StorageContainer.class.getName(); String[] selectColumnName = { "locatedAtPosition.positionDimensionOne", "locatedAtPosition.positionDimensionTwo" }; String[] whereColumnName = { "locatedAtPosition.parentContainer.id" }; String[] whereColumnCondition = { "=" }; Object[] whereColumnValue = { new Long(containerId) }; List list = retrieve(sourceObjectName, selectColumnName, whereColumnName, whereColumnCondition, whereColumnValue, null); // Logger.out.debug("all the occupied positions by child // containers"+list); setPositions(positions, list); // Retrieving all the occupied positions by specimens sourceObjectName = Specimen.class.getName(); whereColumnName[0] = "specimenPosition.storageContainer.id"; selectColumnName[0] = "specimenPosition.positionDimensionOne"; selectColumnName[1] = "specimenPosition.positionDimensionTwo"; list = retrieve(sourceObjectName, selectColumnName, whereColumnName, whereColumnCondition, whereColumnValue, null); setPositions(positions, list); // Retrieving all the occupied positions by specimens array sourceObjectName = SpecimenArray.class.getName(); whereColumnName[0] = "locatedAtPosition.parentContainer.id"; selectColumnName[0] = "locatedAtPosition.positionDimensionOne"; selectColumnName[1] = "locatedAtPosition.positionDimensionTwo"; list = retrieve(sourceObjectName, selectColumnName, whereColumnName, whereColumnCondition, whereColumnValue, null); setPositions(positions, list); return positions; } /** * @param positions * @param list */ private void setPositions(boolean[][] positions, List list) { if (!list.isEmpty()) { int x, y; for (int i = 0; i < list.size(); i++) { Object[] object = (Object[]) list.get(i); x = Integer.parseInt(object[0].toString()); y = Integer.parseInt(object[1].toString()); positions[x][y] = false; } } } /** * This functions returns a double dimensional boolean array which tells the * availablity of storage positions of particular container. True - * Available. False - Not Available. * * @param containerId * The container identifier. * @return Returns a double dimensional boolean array of position * availablity. * @throws DAOException */ // public boolean[][] getAvailablePositions(String containerId) throws // DAOException // { // // List list = retrieve(StorageContainer.class.getName(), // Constants.SYSTEM_IDENTIFIER, new Long(containerId)); // // // // if (list != null) // // { // // StorageContainer container = (StorageContainer) list.get(0); // return getAvailablePositionsForContainer(containerId); // // } // // else // // { // // return new boolean[0][0]; // // } // } /** * This functions returns a map of available rows vs. available columns. * * @param container * The container. * @return Returns a map of available rows vs. available columns. * @throws DAOException */ public Map getAvailablePositionMapForContainer(String containerId, int aliquotCount, String positionDimensionOne, String positionDimensionTwo) throws DAOException { Map map = new TreeMap(); int count = 0; // Logger.out.debug("dimX:"+positionDimensionOne+":dimY:"+positionDimensionTwo); // if (!container.isFull().booleanValue()) // { int dimX = Integer.parseInt(positionDimensionOne) + 1; int dimY = Integer.parseInt(positionDimensionTwo) + 1; boolean[][] availablePosistions = getAvailablePositionsForContainer( containerId, dimX, dimY); for (int x = 1; x < availablePosistions.length; x++) { List list = new ArrayList(); for (int y = 1; y < availablePosistions[x].length; y++) { if (availablePosistions[x][y]) { list.add(new NameValueBean(new Integer(y), new Integer(y))); count++; } } if (!list.isEmpty()) { Integer xObj = new Integer(x); NameValueBean nvb = new NameValueBean(xObj, xObj); map.put(nvb, list); } } // } // Logger.out.info("Map :"+map); if (count < aliquotCount) { return new TreeMap(); } return map; } /** * This functions returns a map of available rows vs. available columns. * * @param containerId * The container identifier. * @return Returns a map of available rows vs. available columns. * @throws DAOException */ // public Map getAvailablePositionMap(String containerId, int aliquotCount) // throws DAOException // { // // List list = retrieve(StorageContainer.class.getName(), // Constants.SYSTEM_IDENTIFIER, new Long(containerId)); // // // // if (list != null) // // { // // StorageContainer container = (StorageContainer) list.get(0); // return getAvailablePositionMapForContainer(containerId, aliquotCount); // // } // // else // // { // // return new TreeMap(); // // } // } /** * This functions returns a map of allocated containers vs. their respective * free locations. * * @return Returns a map of allocated containers vs. their respective free * locations. * @throws DAOException */ public Map getAllocatedContainerMap() throws DAOException { /* * A code snippet inside the commented block should actually be replaced * by the code to get the allocated containers of specific collection * protocol */ // List list = retrieve(StorageContainer.class.getName()); String[] selectColumnName = { Constants.SYSTEM_IDENTIFIER, "name", "capacity.oneDimensionCapacity", "capacity.twoDimensionCapacity" }; List list = retrieve(StorageContainer.class.getName(), selectColumnName); Map containerMap = new TreeMap(); Logger.out.info("===================== list size:" + list.size()); Iterator itr = list.iterator(); while (itr.hasNext()) { Object containerList[] = (Object[]) itr.next(); // Logger.out.info("+++++++++++++++++++++++++++:"+container.getName()+"++++++++++:"+container.getId()); Map positionMap = getAvailablePositionMapForContainer(String .valueOf(containerList[0]), 0, containerList[2].toString(), containerList[3].toString()); if (!positionMap.isEmpty()) { // Logger.out.info("---------"+container.getName()+"------"+container.getId()); NameValueBean nvb = new NameValueBean(containerList[1], containerList[0]); containerMap.put(nvb, positionMap); } } return containerMap; } protected void loadSiteFromContainerId(DAO dao, StorageContainer container) throws DAOException { if (container != null) { Long sysId = container.getId(); Object object = dao.retrieve(StorageContainer.class.getName(), sysId); // System.out.println("siteIdList " + siteIdList); StorageContainer sc = (StorageContainer) object; // System.out.println("siteId " + sc.getSite().getId()); container.setSite(sc.getSite()); } } public TreeMap getAllocatedContaienrMapForContainer(long type_id, String exceedingMaxLimit, String selectedContainerName, SessionDataBean sessionDataBean) throws DAOException { long start = 0; long end = 0; TreeMap containerMap = new TreeMap(); JDBCDAO dao = (JDBCDAO) DAOFactory.getInstance().getDAO( Constants.JDBC_DAO); dao.openSession(null); start = System.currentTimeMillis(); // String queryStr = "SELECT t1.IDENTIFIER, t1.NAME FROM CATISSUE_CONTAINER t1 WHERE " // + "t1.IDENTIFIER IN (SELECT t4.STORAGE_CONTAINER_ID FROM CATISSUE_ST_CONT_ST_TYPE_REL t4 " // + "WHERE t4.STORAGE_TYPE_ID = '" // + type_id // + "' OR t4.STORAGE_TYPE_ID='1') AND " // + "t1.ACTIVITY_STATUS='" // + Constants.ACTIVITY_STATUS_ACTIVE + "' order by IDENTIFIER"; String queryStr = "SELECT t1.IDENTIFIER, t1.NAME FROM CATISSUE_CONTAINER t1 WHERE " + "t1.IDENTIFIER IN (SELECT t4.STORAGE_CONTAINER_ID FROM CATISSUE_ST_CONT_ST_TYPE_REL t4 " + "WHERE t4.STORAGE_TYPE_ID = '" + type_id + "' OR t4.STORAGE_TYPE_ID='1' and t4.STORAGE_CONTAINER_ID not in (select IDENTIFIER from catissue_storage_container where site_id in (select IDENTIFIER from catissue_site s1 where s1.ACTIVITY_STATUS='Closed'))) AND " + "t1.ACTIVITY_STATUS='" + Constants.ACTIVITY_STATUS_ACTIVE + "' order by IDENTIFIER"; Logger.out.debug("Storage Container query......................" + queryStr); List list = new ArrayList(); try { list = dao.executeQuery(queryStr, null, false, null); } catch (Exception ex) { throw new DAOException(ex.getMessage()); } end = System.currentTimeMillis(); System.out.println("Time taken for executing query : " + (end - start)); dao.closeSession(); Map containerMapFromCache = null; Set<Long> siteIds = new UserBizLogic().getRelatedSiteIds(sessionDataBean.getUserId()); try { containerMapFromCache = StorageContainerUtil .getContainerMapFromCache(); } catch (Exception e1) { // TODO Auto-generated catch block e1.printStackTrace(); } boolean flag = true; if (containerMapFromCache != null) { int i = 1; Iterator itr = list.iterator(); while (itr.hasNext()) { List list1 = (List) itr.next(); String Id = (String) list1.get(0); Long siteId = getSiteIdForStorageContainerId(Long.valueOf(Id)); if(!sessionDataBean.isAdmin()) { if(!siteIds.contains(siteId)) { continue; } } String name = (String) list1.get(1); NameValueBean nvb = new NameValueBean(name, Id, new Long(Id)); if (selectedContainerName != null && flag) { if (!name.equalsIgnoreCase(selectedContainerName.trim())) { continue; } flag = false; } try { Map positionMap = (TreeMap) containerMapFromCache.get(nvb); if (positionMap != null && !positionMap.isEmpty()) { Map positionMap1 = deepCopyMap(positionMap); // NameValueBean nvb = new NameValueBean(Name, Id); if (i > containersMaxLimit) { exceedingMaxLimit = "true"; break; } else { containerMap.put(nvb, positionMap1); } i++; } } catch (Exception e) { Logger.out.info("Error while getting map from cache"); e.printStackTrace(); } } } return containerMap; } /* temp function end */ private Long getSiteIdForStorageContainerId(Long scId) { Session session = null; Long siteId = null; try { session = DBUtil.getCleanSession(); StorageContainer sc = (StorageContainer) session.load(StorageContainer.class.getName(), scId); if(sc != null) { Site site = sc.getSite(); siteId = site.getId(); } } catch (BizLogicException e1) { Logger.out.debug(e1.getMessage(), e1); } finally { session.close(); } return siteId; } public TreeMap getAllocatedContaienrMapForSpecimen(long cpId, String specimenClass, int aliquotCount, String exceedingMaxLimit, SessionDataBean sessionData, boolean closeSession) throws DAOException { NameValueBeanRelevanceComparator comparator = new NameValueBeanRelevanceComparator(); Logger.out .debug("method : getAllocatedContaienrMapForSpecimen()---getting containers for specimen--------------"); TreeMap containerMap = new TreeMap(comparator); List list = getRelevantContainerList(cpId, specimenClass, closeSession); Logger.out .debug("getAllocatedContaienrMapForSpecimen()----- Size of list--------:" + list.size()); Map containerMapFromCache = null; try { containerMapFromCache = (TreeMap) StorageContainerUtil .getContainerMapFromCache(); } catch (Exception e1) { // TODO Auto-generated catch block e1.printStackTrace(); } if (containerMapFromCache != null) { int i = 1; int relevenceCounter = 1; Iterator itr = list.iterator(); while (itr.hasNext()) { List list1 = (List) itr.next(); String Id = (String) list1.get(1); String Name = (String) list1.get(2); NameValueBean nvb = new NameValueBean(Name, Id, new Long( relevenceCounter)); Map positionMap = (TreeMap) containerMapFromCache.get(nvb); if (positionMap != null && !positionMap.isEmpty()) { StorageContainer sc = new StorageContainer(); sc.setId(new Long(Id)); boolean hasAccess = true; try { AbstractDAO dao = DAOFactory.getInstance().getDAO(Constants.HIBERNATE_DAO); dao.openSession(null); hasAccess = validateContainerAccess(dao,sc, sessionData,cpId); dao.closeSession(); } catch (SMException sme) { sme.printStackTrace(); throw handleSMException(sme); } if (!hasAccess) continue; if (i > containersMaxLimit) { Logger.out.debug("CONTAINERS_MAX_LIMIT reached"); exceedingMaxLimit = new String("true"); break; } else { if (aliquotCount > 0) { long count = countPositionsInMap(positionMap); if (count >= aliquotCount) { containerMap.put(nvb, positionMap); } } else { containerMap.put(nvb, positionMap); } } i++; } relevenceCounter++; } Logger.out .debug("getAllocatedContaienrMapForSpecimen()----Size of containerMap:" + containerMap.size()); } Logger.out.debug("exceedingMaxLimit----------" + exceedingMaxLimit); return containerMap; } private boolean validateContainerAccess(AbstractDAO dao, StorageContainer sc, SessionDataBean sessionData, long cpId) throws SMException { boolean isValidContainer = validateContainerAccess(dao,sc,sessionData); if(sessionData != null && sessionData.isAdmin()) { return true; } Collection<Site> siteCollection = null; Site site = null; if (isValidContainer) { try { site = getSite(dao, sc.getId()); } catch (DAOException e) { Logger.out.debug(e.getMessage(), e); } siteCollection = new CollectionProtocolBizLogic().getRelatedSites(cpId); if (siteCollection != null) { for(Site site1 : siteCollection) { if(site1.getId().equals(site.getId())) { return true; } } } } return false; } /** * This function gets the list of container in order of there relvance. * * @param cpId * collection protocol Id * @param specimenClass * class of the specimen * @param closeSession * @return list of containers in order of there relevence. * @throws DAOException * @author Vaishali */ public List getRelevantContainerList(long cpId, String specimenClass, boolean closeSession) throws DAOException { List list = new ArrayList(); JDBCDAO dao = (JDBCDAO) DAOFactory.getInstance().getDAO( Constants.JDBC_DAO); dao.openSession(null); String[] queryArray = new String[6]; // category # 1 // Gets all container which stores just specified collection protocol // and specified specimen class String equalToOne = " = 1 "; String greaterThanOne = " > 1 "; String equalToFour = " = 4 "; String notEqualToFour = " !=4 "; String endQry = " and t1.IDENTIFIER = t6.STORAGE_CONTAINER_ID and t1.IDENTIFIER = t7.IDENTIFIER" + " group by t6.STORAGE_CONTAINER_ID, t1.NAME " + " order by co asc "; String cpRestrictionCountQuery = "(select count(*) from CATISSUE_ST_CONT_COLL_PROT_REL t4 where t4.STORAGE_CONTAINER_ID = t1.IDENTIFIER)"; String specimenClassRestrictionQuery = "(select count(*) from CATISSUE_STOR_CONT_SPEC_CLASS t5 where t5.STORAGE_CONTAINER_ID = t1.IDENTIFIER)"; // Vijay main query and default restriction query is updated according // to bug id#8076 String mainQuery = " SELECT count(*) co, t6.STORAGE_CONTAINER_ID, t1.NAME FROM CATISSUE_CONTAINER t1 , CATISSUE_STOR_CONT_SPEC_CLASS t6 , CATISSUE_STORAGE_CONTAINER t7 " + " WHERE t1.IDENTIFIER IN (SELECT t2.STORAGE_CONTAINER_ID" + " FROM CATISSUE_ST_CONT_COLL_PROT_REL t2 WHERE t2.COLLECTION_PROTOCOL_ID = '" + cpId + "')" + " AND t1.ACTIVITY_STATUS='Active'" + " and t1.IDENTIFIER IN (SELECT t3.STORAGE_CONTAINER_ID FROM CATISSUE_STOR_CONT_SPEC_CLASS t3" + " WHERE t3.SPECIMEN_CLASS = '" + specimenClass + "')" + " AND t1.ACTIVITY_STATUS='Active' AND t1.IDENTIFIER=t7.IDENTIFIER AND t7.SITE_ID NOT IN (SELECT IDENTIFIER FROM CATISSUE_SITE WHERE ACTIVITY_STATUS='Closed')"; String defaultRestrictionQuery = " SELECT count(*) co, t6.STORAGE_CONTAINER_ID, t1.NAME FROM CATISSUE_CONTAINER t1 , CATISSUE_STOR_CONT_SPEC_CLASS t6 , CATISSUE_STORAGE_CONTAINER t7 " + " WHERE t1.IDENTIFIER NOT IN (SELECT t2.STORAGE_CONTAINER_ID FROM CATISSUE_ST_CONT_COLL_PROT_REL t2)" + " and t1.IDENTIFIER IN (SELECT t3.STORAGE_CONTAINER_ID FROM CATISSUE_STOR_CONT_SPEC_CLASS t3" + " WHERE t3.SPECIMEN_CLASS = '" + specimenClass + "') " + " AND t1.ACTIVITY_STATUS='Active' AND t7.SITE_ID NOT IN (SELECT IDENTIFIER FROM CATISSUE_SITE WHERE ACTIVITY_STATUS='Closed')"; String queryStr1 = mainQuery + " and " + cpRestrictionCountQuery + equalToOne + " and " + specimenClassRestrictionQuery + equalToOne + endQry; // category # 2 // Gets all containers which holds just specified container and any // specimen class String queryStr2 = mainQuery + " and " + cpRestrictionCountQuery + equalToOne + " and " + specimenClassRestrictionQuery + greaterThanOne + endQry; // catgory # 3 // Gets all the containers which holds other than specified collection // protocol and only specified specimen class String queryStr3 = mainQuery + " and " + cpRestrictionCountQuery + greaterThanOne + " and " + specimenClassRestrictionQuery + equalToOne + endQry; // catgory # 4 // Gets all the containers which holds specified cp and other than // specified collection protocol and specified specimen class and other // than specified specimen class String queryStr4 = mainQuery + " and " + cpRestrictionCountQuery + greaterThanOne + " and " + specimenClassRestrictionQuery + greaterThanOne + endQry; // catgory # 5 // Gets all the containers which holds any collection protocol and // specified specimen class and other than specified specimen class String queryStr5 = defaultRestrictionQuery + " and " + specimenClassRestrictionQuery + notEqualToFour + endQry; // catgory # 6 // Gets all the containers which holds any collection protocol and any // specimen class String queryStr6 = defaultRestrictionQuery + " and " + specimenClassRestrictionQuery + equalToFour + endQry; queryArray[0] = queryStr1; queryArray[1] = queryStr2; queryArray[2] = queryStr3; queryArray[3] = queryStr4; queryArray[4] = queryStr5; queryArray[5] = queryStr6; for (int i = 0; i < 6; i++) { Logger.out.debug("Storage Container query......................" + queryArray[i]); System.out.println("Storage Container query......................" + queryArray[i]); List queryResultList = executeStorageContQuery(queryArray[i], dao); list.addAll(queryResultList); } if (closeSession) { dao.closeSession(); } return list; } /** * This function executes the query * * @param query * @param dao * @return * @throws DAOException */ public List executeStorageContQuery(String query, JDBCDAO dao) throws DAOException { Logger.out.debug("Storage Container query......................" + query); List list = new ArrayList(); try { list = dao.executeQuery(query, null, false, null); } catch (Exception ex) { throw new DAOException(ex.getMessage()); } return list; } /** * Gets allocated container map for specimen array. * * @param specimen_array_type_id * specimen array type id * @param noOfAliqoutes * No. of aliquotes * @return container map * @throws DAOException -- * throws DAO Exception * @see edu.wustl.common.dao.JDBCDAOImpl */ public TreeMap getAllocatedContaienrMapForSpecimenArray( long specimen_array_type_id, int noOfAliqoutes, SessionDataBean sessionData, String exceedingMaxLimit) throws DAOException { NameValueBeanValueComparator contComp = new NameValueBeanValueComparator(); TreeMap containerMap = new TreeMap(contComp); JDBCDAO dao = (JDBCDAO) DAOFactory.getInstance().getDAO( Constants.JDBC_DAO); dao.openSession(null); String includeAllIdQueryStr = " OR t4.SPECIMEN_ARRAY_TYPE_ID = '" + Constants.ARRAY_TYPE_ALL_ID + "'"; if (!(new Validator().isValidOption(String .valueOf(specimen_array_type_id)))) { includeAllIdQueryStr = ""; } String queryStr = "select t1.IDENTIFIER,t1.name from CATISSUE_CONTAINER t1,CATISSUE_STORAGE_CONTAINER t2 " + "where t1.IDENTIFIER IN (select t4.STORAGE_CONTAINER_ID from CATISSUE_CONT_HOLDS_SPARRTYPE t4 " + "where t4.SPECIMEN_ARRAY_TYPE_ID = '" + specimen_array_type_id + "'" + includeAllIdQueryStr + ") and t1.IDENTIFIER = t2.IDENTIFIER"; Logger.out.debug("SPECIMEN ARRAY QUERY ......................" + queryStr); List list = new ArrayList(); Set<Long> siteIds = new UserBizLogic().getRelatedSiteIds(sessionData.getUserId()); try { list = dao.executeQuery(queryStr, null, false, null); } catch (Exception ex) { throw new DAOException(ex.getMessage()); } dao.closeSession(); Logger.out.info("Size of list:" + list.size()); Map containerMapFromCache = null; try { containerMapFromCache = (TreeMap) StorageContainerUtil .getContainerMapFromCache(); } catch (Exception e1) { // TODO Auto-generated catch block e1.printStackTrace(); } if (containerMapFromCache != null) { int i = 1; Iterator itr = list.iterator(); while (itr.hasNext()) { List list1 = (List) itr.next(); String Id = (String) list1.get(0); Long siteId = getSiteIdForStorageContainerId(Long.valueOf(Id)); if(!sessionData.isAdmin()) { if(!siteIds.contains(siteId)) { continue; } } String Name = (String) list1.get(1); NameValueBean nvb = new NameValueBean(Name, Id); Map positionMap = (TreeMap) containerMapFromCache.get(nvb); if (positionMap != null && !positionMap.isEmpty()) { // deep copy is required due to cache updation by reference Map positionMap1 = deepCopyMap(positionMap); // NameValueBean nvb = new NameValueBean(Name, Id); StorageContainer sc = new StorageContainer(); sc.setId(new Long(Id)); /* * boolean hasAccess = true; try { hasAccess = * validateContainerAccess(sc, sessionData); } catch * (SMException sme) { sme.printStackTrace(); throw * handleSMException(sme); } if (!hasAccess) continue; */ if (i > containersMaxLimit) { exceedingMaxLimit = "true"; break; } else { containerMap.put(nvb, positionMap1); } i++; } } } return containerMap; } // --------------Code for Map Mandar: 04-Sep-06 start // Mandar : 29Aug06 : for StorageContainerMap /** * @param id * Identifier of the StorageContainer related to which the * collectionProtocol titles are to be retrieved. * @return List of collectionProtocol title. * @throws DAOException */ public List getCollectionProtocolList(String id) throws DAOException { // Query to return titles of collection protocol related to given // storagecontainer. 29-Aug-06 Mandar. String sql = " SELECT SP.TITLE TITLE FROM CATISSUE_SPECIMEN_PROTOCOL SP, CATISSUE_ST_CONT_COLL_PROT_REL SC " + " WHERE SP.IDENTIFIER = SC.COLLECTION_PROTOCOL_ID AND SC.STORAGE_CONTAINER_ID = " + id; List resultList = executeSQL(sql); Iterator iterator = resultList.iterator(); List returnList = new ArrayList(); while (iterator.hasNext()) { List list = (List) iterator.next(); String data = (String) list.get(0); returnList.add(data); } if (returnList.isEmpty()) { returnList.add(new String(Constants.ALL)); } return returnList; } /** * @param id * Identifier of the StorageContainer related to which the * collectionProtocol titles are to be retrieved. * @return List of collectionProtocol title. * @throws DAOException */ public List getSpecimenClassList(String id) throws DAOException { // Query to return specimen classes related to given storagecontainer. // 29-Aug-06 Mandar. String sql = " SELECT SP.SPECIMEN_CLASS CLASS FROM CATISSUE_STOR_CONT_SPEC_CLASS SP " + "WHERE SP.STORAGE_CONTAINER_ID = " + id; List resultList = executeSQL(sql); Iterator iterator = resultList.iterator(); List returnList = new ArrayList(); while (iterator.hasNext()) { List list = (List) iterator.next(); for (int cnt = 0; cnt < list.size(); cnt++) { String data = (String) list.get(cnt); returnList.add(data); } } if (returnList.isEmpty()) { // bug id 7438 // returnList.add(new String(Constants.ALL)); returnList.add(new String(Constants.NONE)); } return returnList; } /** * @param sql * @return * @throws DAOException */ private List executeSQL(String sql) throws DAOException { JDBCDAO dao = (JDBCDAO) DAOFactory.getInstance().getDAO( Constants.JDBC_DAO); List resultList = new ArrayList(); try { dao.openSession(null); resultList = dao.executeQuery(sql, null, false, null); dao.closeSession(); } catch (Exception daoExp) { throw new DAOException(daoExp.getMessage(), daoExp); } return resultList; } // prints results returned from DAO executeQuery To comment after debug private void printRecords(List list) { if (list != null) { if (!list.isEmpty()) { // System.out.println("OuterList Size : " + list.size()); for (int i = 0; i < list.size(); i++) { List innerList = (List) list.get(i); // System.out.println("\nInnerList Size : " + // innerList.size() + "\n"); String s = ""; for (int j = 0; j < innerList.size(); j++) { String s1 = (String) innerList.get(j); s = s + " | " + s1; } // System.out.print(s); } } } } // Method to fetch ToolTipData for a given Container private String getToolTipData(String containerID) throws DAOException { String toolTipData = ""; List specimenClassList = getSpecimenClassList(containerID); String classData = "SpecimenClass"; for (int counter = 0; counter < specimenClassList.size(); counter++) { String data = (String) specimenClassList.get(counter); classData = classData + " | " + data; } List collectionProtocolList = getCollectionProtocolList(containerID); String protocolData = "CollectionProtocol"; for (int cnt = 0; cnt < collectionProtocolList.size(); cnt++) { String data = (String) collectionProtocolList.get(cnt); protocolData = protocolData + " | " + data; } toolTipData = protocolData + "\n" + classData; // System.out.println(toolTipData); return toolTipData; } // --------------Code for Map Mandar: 04-Sep-06 end // this function is for making the deep copy of map private Map deepCopyMap(Map positionMap) { Map positionMap1 = new TreeMap(); Set keySet = positionMap.keySet(); Iterator itr = keySet.iterator(); while (itr.hasNext()) { NameValueBean key = (NameValueBean) itr.next(); NameValueBean key1 = new NameValueBean(key.getName(), key .getValue()); List value = (ArrayList) positionMap.get(key); List value1 = new ArrayList(); Iterator itr1 = value.iterator(); while (itr1.hasNext()) { NameValueBean ypos = (NameValueBean) itr1.next(); NameValueBean ypos1 = new NameValueBean(ypos.getName(), ypos .getValue()); value1.add(ypos1); } positionMap1.put(key1, value1); } return positionMap1; } private long countPositionsInMap(Map positionMap) { long count = 0; Set keySet = positionMap.keySet(); Iterator itr = keySet.iterator(); while (itr.hasNext()) { NameValueBean key = (NameValueBean) itr.next(); List value = (ArrayList) positionMap.get(key); count = count + value.size(); } return count; } /** * Bug ID: 4038 Patch ID: 4038_3 See also: 1-3 */ /** * * @param dao * Object of DAO * @param containerId * id of container whose site is to be retrieved * @return Site object belongs to container with given id * @throws DAOException * Exception occured while DB handling */ private Site getSite(DAO dao, Long containerId) throws DAOException { String sourceObjectName = StorageContainer.class.getName(); String[] selectColumnName = new String[] { "site" }; String[] whereColumnName = new String[] { "id" }; // "storageContainer."+Constants.SYSTEM_IDENTIFIER String[] whereColumnCondition = new String[] { "=" }; Object[] whereColumnValue = new Long[] { containerId }; String joinCondition = null; List list = dao.retrieve(sourceObjectName, selectColumnName, whereColumnName, whereColumnCondition, whereColumnValue, joinCondition); if (!list.isEmpty()) { return ((Site) list.get(0)); } return null; } /** * Name : kalpana thakur Reviewer Name : Vaishali Bug ID: 4922 * Description:Storage container will not be added to closed site :check for * closed site */ public void checkClosedSite(DAO dao, Long containerId, String errMessage) throws DAOException { Site site = getSite(dao, containerId); // check for closed Site if (site != null) { if (Constants.ACTIVITY_STATUS_CLOSED.equals(site .getActivityStatus())) { throw new DAOException(errMessage + " " + ApplicationProperties.getValue("error.object.closed")); } } } /** * To get the ids of the CollectionProtocol that the given StorageContainer * can hold. * * @param type * The reference to StorageType object. * @return The array of ids of CollectionProtocol that the given * StorageContainer can hold. * @throws DAOException */ public long[] getDefaultHoldCollectionProtocolList( StorageContainer container) throws DAOException { Collection spcimenArrayTypeCollection = (Collection) retrieveAttribute( StorageContainer.class.getName(), container.getId(), "elements(collectionProtocolCollection)"); if (spcimenArrayTypeCollection.isEmpty()) { return new long[] { -1 }; } else { return Utility.getobjectIds(spcimenArrayTypeCollection); } } /** * To check wether the Continer to display can holds the given type of * container. * * @param typeId * ContinerType id of container * @param storageContainer * The StorageContainer reference to be displayed on the page. * @param StorageContainerBizLogic * The reference to bizLogic class object. * @return true if the given continer can hold the typet. * @throws DAOException */ public boolean canHoldContainerType(int typeId, StorageContainer storageContainer) throws DAOException { /** * Name: Smita Reviewer: Sachin Bug iD: 4598 Patch ID: 4598_1 * Description: Check for valid container type */ if (!isValidContaierType(typeId)) { return false; } boolean canHold = false; Collection containerTypeCollection = (Collection) retrieveAttribute( StorageContainer.class.getName(), storageContainer.getId(), "elements(holdsStorageTypeCollection)");// storageContainer.getHoldsStorageTypeCollection(); if (!containerTypeCollection.isEmpty()) { Iterator itr = containerTypeCollection.iterator(); while (itr.hasNext()) { StorageType type = (StorageType) itr.next(); long storagetypeId = type.getId().longValue(); if (storagetypeId == Constants.ALL_STORAGE_TYPE_ID || storagetypeId == typeId) { return true; } } } return canHold; } /** * Patch ID: 4598_2 Is container type one from the container types present * in the system * * @param typeID * Container type ID * @return true/ false * @throws DAOException */ public boolean isValidContaierType(int typeID) throws DAOException { Long longId = (Long) retrieveAttribute(StorageType.class.getName(), new Long(typeID), "id"); return !(longId == null); } /** * To check wether the Continer to display can holds the given * CollectionProtocol. * * @param collectionProtocolId * The collectionProtocol Id. * @param storageContainer * The StorageContainer reference to be displayed on the page. * @return true if the given continer can hold the CollectionProtocol. * @throws DAOException */ public boolean canHoldCollectionProtocol(long collectionProtocolId, StorageContainer storageContainer) throws DAOException { boolean canHold = true; Collection collectionProtocols = (Collection) retrieveAttribute( StorageContainer.class.getName(), storageContainer.getId(), "elements(collectionProtocolCollection)");// storageContainer.getCollectionProtocolCollection(); if (!collectionProtocols.isEmpty()) { Iterator itr = collectionProtocols.iterator(); canHold = false; while (itr.hasNext()) { CollectionProtocol cp = (CollectionProtocol) itr.next(); if (cp.getId().longValue() == collectionProtocolId) { return true; } } } return canHold; } /** * To check wether the Continer to display can holds the given * specimenClass. * * @param specimenClass * The specimenClass Name. * @param storageContainer * The StorageContainer reference to be displayed on the page. * @param bizLogic * The reference to bizLogic class object. * @return true if the given continer can hold the specimenClass. * @throws DAOException */ public boolean canHoldSpecimenClass(String specimenClass, StorageContainer storageContainer) throws DAOException { Collection specimenClasses = (Collection) retrieveAttribute( StorageContainer.class.getName(), storageContainer.getId(), "elements(holdsSpecimenClassCollection)");// storageContainer.getHoldsSpecimenClassCollection(); Iterator itr = specimenClasses.iterator(); while (itr.hasNext()) { String className = (String) itr.next(); if (className.equals(specimenClass)) return true; } return false; } /** * To check wether the Continer to display can holds the given * specimenArrayTypeId. * * @param specimenArrayTypeId * The Specimen Array Type Id. * @param storageContainer * The StorageContainer reference to be displayed on the page. * @param bizLogic * The reference to bizLogic class object. * @return true if the given continer can hold the specimenArrayType. */ public boolean canHoldSpecimenArrayType(int specimenArrayTypeId, StorageContainer storageContainer) throws DAOException { boolean canHold = true; Collection specimenArrayTypes = (Collection) retrieveAttribute( StorageContainer.class.getName(), storageContainer.getId(), "elements(holdsSpecimenArrayTypeCollection)");// storageContainer.getHoldsSpArrayTypeCollection(); // if (!specimenArrayTypes.isEmpty()) { Iterator itr = specimenArrayTypes.iterator(); canHold = false; while (itr.hasNext()) { SpecimenArrayType specimenarrayType = (SpecimenArrayType) itr .next(); long arraytypeId = specimenarrayType.getId().longValue(); if (arraytypeId == Constants.ALL_SPECIMEN_ARRAY_TYPE_ID || arraytypeId == specimenArrayTypeId) { return true; } } } return canHold; } public Collection<SpecimenPosition> getSpecimenPositionCollForContainer( DAO dao, Long containerId) throws DAOException { if (containerId != null) { List specimenPosColl = dao.retrieve(SpecimenPosition.class .getName(), "storageContainer.id", containerId); return specimenPosColl; } return null; } /** * Called from DefaultBizLogic to get ObjectId for authorization check * (non-Javadoc) * @see edu.wustl.common.bizlogic.DefaultBizLogic#getObjectId(edu.wustl.common.dao.AbstractDAO, java.lang.Object) */ public String getObjectId(AbstractDAO dao, Object domainObject) { if (domainObject instanceof StorageContainer) { StorageContainer storageContainer = (StorageContainer) domainObject; Site site = null; if (storageContainer.getLocatedAtPosition() != null && storageContainer.getLocatedAtPosition().getParentContainer() != null) { try { Object object = dao.retrieve(StorageContainer.class.getName(), storageContainer.getLocatedAtPosition().getParentContainer() .getId()); if (object != null) { StorageContainer parentContainer = (StorageContainer) object; site = parentContainer.getSite(); } } catch (DAOException e) { return null; } } else { site = storageContainer.getSite(); } if (site != null) { StringBuffer sb = new StringBuffer(); sb.append(Site.class.getName()).append("_").append(site.getId().toString()); return sb.toString(); } } return null; } /** * To get PrivilegeName for authorization check from 'PermissionMapDetails.xml' * (non-Javadoc) * @see edu.wustl.common.bizlogic.DefaultBizLogic#getPrivilegeName(java.lang.Object) */ protected String getPrivilegeKey(Object domainObject) { return Constants.ADD_EDIT_STORAGE_CONTAINER; } }
8313 and 9309 fix SVN-Revision: 15444
WEB-INF/src/edu/wustl/catissuecore/bizlogic/StorageContainerBizLogic.java
8313 and 9309 fix
Java
bsd-3-clause
70623d7a0245d4a3dbd3009947569bf38ff21c3a
0
NCIP/caintegrator,NCIP/caintegrator,NCIP/caintegrator,NCIP/caintegrator,NCIP/caintegrator
/** * The software subject to this notice and license includes both human readable * source code form and machine readable, binary, object code form. The caIntegrator2 * Software was developed in conjunction with the National Cancer Institute * (NCI) by NCI employees, 5AM Solutions, Inc. (5AM), ScenPro, Inc. (ScenPro) * and Science Applications International Corporation (SAIC). To the extent * government employees are authors, any rights in such works shall be subject * to Title 17 of the United States Code, section 105. * * This caIntegrator2 Software License (the License) is between NCI and You. You (or * Your) shall mean a person or an entity, and all other entities that control, * are controlled by, or are under common control with the entity. Control for * purposes of this definition means (i) the direct or indirect power to cause * the direction or management of such entity, whether by contract or otherwise, * or (ii) ownership of fifty percent (50%) or more of the outstanding shares, * or (iii) beneficial ownership of such entity. * * This License is granted provided that You agree to the conditions described * below. NCI grants You a non-exclusive, worldwide, perpetual, fully-paid-up, * no-charge, irrevocable, transferable and royalty-free right and license in * its rights in the caIntegrator2 Software to (i) use, install, access, operate, * execute, copy, modify, translate, market, publicly display, publicly perform, * and prepare derivative works of the caIntegrator2 Software; (ii) distribute and * have distributed to and by third parties the caIntegrator2 Software and any * modifications and derivative works thereof; and (iii) sublicense the * foregoing rights set out in (i) and (ii) to third parties, including the * right to license such rights to further third parties. For sake of clarity, * and not by way of limitation, NCI shall have no right of accounting or right * of payment from You or Your sub-licensees for the rights granted under this * License. This License is granted at no charge to You. * * Your redistributions of the source code for the Software must retain the * above copyright notice, this list of conditions and the disclaimer and * limitation of liability of Article 6, below. Your redistributions in object * code form must reproduce the above copyright notice, this list of conditions * and the disclaimer of Article 6 in the documentation and/or other materials * provided with the distribution, if any. * * Your end-user documentation included with the redistribution, if any, must * include the following acknowledgment: This product includes software * developed by 5AM, ScenPro, SAIC and the National Cancer Institute. If You do * not include such end-user documentation, You shall include this acknowledgment * in the Software itself, wherever such third-party acknowledgments normally * appear. * * You may not use the names "The National Cancer Institute", "NCI", "ScenPro", * "SAIC" or "5AM" to endorse or promote products derived from this Software. * This License does not authorize You to use any trademarks, service marks, * trade names, logos or product names of either NCI, ScenPro, SAID or 5AM, * except as required to comply with the terms of this License. * * For sake of clarity, and not by way of limitation, You may incorporate this * Software into Your proprietary programs and into any third party proprietary * programs. However, if You incorporate the Software into third party * proprietary programs, You agree that You are solely responsible for obtaining * any permission from such third parties required to incorporate the Software * into such third party proprietary programs and for informing Your a * sub-licensees, including without limitation Your end-users, of their * obligation to secure any required permissions from such third parties before * incorporating the Software into such third party proprietary software * programs. In the event that You fail to obtain such permissions, You agree * to indemnify NCI for any claims against NCI by such third parties, except to * the extent prohibited by law, resulting from Your failure to obtain such * permissions. * * For sake of clarity, and not by way of limitation, You may add Your own * copyright statement to Your modifications and to the derivative works, and * You may provide additional or different license terms and conditions in Your * sublicenses of modifications of the Software, or any derivative works of the * Software as a whole, provided Your use, reproduction, and distribution of the * Work otherwise complies with the conditions stated in this License. * * THIS SOFTWARE IS PROVIDED "AS IS," AND ANY EXPRESSED OR IMPLIED WARRANTIES, * (INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY, * NON-INFRINGEMENT AND FITNESS FOR A PARTICULAR PURPOSE) ARE DISCLAIMED. IN NO * EVENT SHALL THE NATIONAL CANCER INSTITUTE, 5AM SOLUTIONS, INC., SCENPRO, INC., * SCIENCE APPLICATIONS INTERNATIONAL CORPORATION OR THEIR * AFFILIATES BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package gov.nih.nci.caintegrator2.web.action.study.management; import static org.junit.Assert.*; import java.util.HashMap; import gov.nih.nci.caintegrator2.AcegiAuthenticationStub; import gov.nih.nci.caintegrator2.TestDataFiles; import gov.nih.nci.caintegrator2.application.study.StudyManagementServiceStub; import gov.nih.nci.caintegrator2.application.workspace.WorkspaceServiceStub; import org.acegisecurity.context.SecurityContextHolder; import org.junit.Before; import org.junit.Test; import com.opensymphony.xwork2.ActionContext; import com.opensymphony.xwork2.ActionSupport; public class SaveControlSamplesActionTest { private SaveControlSamplesAction action = new SaveControlSamplesAction(); StudyManagementServiceStub studyManagementServiceStub = new StudyManagementServiceStub(); @Before public void setUp() { SecurityContextHolder.getContext().setAuthentication(new AcegiAuthenticationStub()); ActionContext.getContext().setSession(new HashMap<String, Object>()); action.setStudyManagementService(studyManagementServiceStub); action.setWorkspaceService(new WorkspaceServiceStub()); } @Test public void testValidate() { action.validate(); assertTrue(action.hasFieldErrors()); action.clearErrorsAndMessages(); action.setControlSampleFile(TestDataFiles.REMBRANDT_CONTROL_SAMPLES_FILE); assertFalse(action.hasFieldErrors()); } @Test public void testExecute() { action.setControlSampleFile(TestDataFiles.REMBRANDT_CONTROL_SAMPLES_FILE); assertEquals(ActionSupport.SUCCESS, action.execute()); assertTrue(studyManagementServiceStub.addControlSamplesCalled); } }
caintegrator2-war/test/src/gov/nih/nci/caintegrator2/web/action/study/management/SaveControlSamplesActionTest.java
/** * The software subject to this notice and license includes both human readable * source code form and machine readable, binary, object code form. The caIntegrator2 * Software was developed in conjunction with the National Cancer Institute * (NCI) by NCI employees, 5AM Solutions, Inc. (5AM), ScenPro, Inc. (ScenPro) * and Science Applications International Corporation (SAIC). To the extent * government employees are authors, any rights in such works shall be subject * to Title 17 of the United States Code, section 105. * * This caIntegrator2 Software License (the License) is between NCI and You. You (or * Your) shall mean a person or an entity, and all other entities that control, * are controlled by, or are under common control with the entity. Control for * purposes of this definition means (i) the direct or indirect power to cause * the direction or management of such entity, whether by contract or otherwise, * or (ii) ownership of fifty percent (50%) or more of the outstanding shares, * or (iii) beneficial ownership of such entity. * * This License is granted provided that You agree to the conditions described * below. NCI grants You a non-exclusive, worldwide, perpetual, fully-paid-up, * no-charge, irrevocable, transferable and royalty-free right and license in * its rights in the caIntegrator2 Software to (i) use, install, access, operate, * execute, copy, modify, translate, market, publicly display, publicly perform, * and prepare derivative works of the caIntegrator2 Software; (ii) distribute and * have distributed to and by third parties the caIntegrator2 Software and any * modifications and derivative works thereof; and (iii) sublicense the * foregoing rights set out in (i) and (ii) to third parties, including the * right to license such rights to further third parties. For sake of clarity, * and not by way of limitation, NCI shall have no right of accounting or right * of payment from You or Your sub-licensees for the rights granted under this * License. This License is granted at no charge to You. * * Your redistributions of the source code for the Software must retain the * above copyright notice, this list of conditions and the disclaimer and * limitation of liability of Article 6, below. Your redistributions in object * code form must reproduce the above copyright notice, this list of conditions * and the disclaimer of Article 6 in the documentation and/or other materials * provided with the distribution, if any. * * Your end-user documentation included with the redistribution, if any, must * include the following acknowledgment: This product includes software * developed by 5AM, ScenPro, SAIC and the National Cancer Institute. If You do * not include such end-user documentation, You shall include this acknowledgment * in the Software itself, wherever such third-party acknowledgments normally * appear. * * You may not use the names "The National Cancer Institute", "NCI", "ScenPro", * "SAIC" or "5AM" to endorse or promote products derived from this Software. * This License does not authorize You to use any trademarks, service marks, * trade names, logos or product names of either NCI, ScenPro, SAID or 5AM, * except as required to comply with the terms of this License. * * For sake of clarity, and not by way of limitation, You may incorporate this * Software into Your proprietary programs and into any third party proprietary * programs. However, if You incorporate the Software into third party * proprietary programs, You agree that You are solely responsible for obtaining * any permission from such third parties required to incorporate the Software * into such third party proprietary programs and for informing Your a * sub-licensees, including without limitation Your end-users, of their * obligation to secure any required permissions from such third parties before * incorporating the Software into such third party proprietary software * programs. In the event that You fail to obtain such permissions, You agree * to indemnify NCI for any claims against NCI by such third parties, except to * the extent prohibited by law, resulting from Your failure to obtain such * permissions. * * For sake of clarity, and not by way of limitation, You may add Your own * copyright statement to Your modifications and to the derivative works, and * You may provide additional or different license terms and conditions in Your * sublicenses of modifications of the Software, or any derivative works of the * Software as a whole, provided Your use, reproduction, and distribution of the * Work otherwise complies with the conditions stated in this License. * * THIS SOFTWARE IS PROVIDED "AS IS," AND ANY EXPRESSED OR IMPLIED WARRANTIES, * (INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY, * NON-INFRINGEMENT AND FITNESS FOR A PARTICULAR PURPOSE) ARE DISCLAIMED. IN NO * EVENT SHALL THE NATIONAL CANCER INSTITUTE, 5AM SOLUTIONS, INC., SCENPRO, INC., * SCIENCE APPLICATIONS INTERNATIONAL CORPORATION OR THEIR * AFFILIATES BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package gov.nih.nci.caintegrator2.web.action.study.management; import static org.junit.Assert.*; import gov.nih.nci.caintegrator2.TestDataFiles; import gov.nih.nci.caintegrator2.application.study.StudyManagementServiceStub; import org.junit.Before; import org.junit.Test; import com.opensymphony.xwork2.ActionSupport; public class SaveControlSamplesActionTest { private SaveControlSamplesAction action = new SaveControlSamplesAction(); StudyManagementServiceStub studyManagementServiceStub = new StudyManagementServiceStub(); @Before public void setUp() { action.setStudyManagementService(studyManagementServiceStub); } @Test public void testValidate() { action.validate(); assertTrue(action.hasFieldErrors()); action.clearErrorsAndMessages(); action.setControlSampleFile(TestDataFiles.REMBRANDT_CONTROL_SAMPLES_FILE); assertFalse(action.hasFieldErrors()); } @Test public void testExecute() { action.setControlSampleFile(TestDataFiles.REMBRANDT_CONTROL_SAMPLES_FILE); assertEquals(ActionSupport.SUCCESS, action.execute()); assertTrue(studyManagementServiceStub.addControlSamplesCalled); } }
Fix for failing build. Peer review pending from TJ.
caintegrator2-war/test/src/gov/nih/nci/caintegrator2/web/action/study/management/SaveControlSamplesActionTest.java
Fix for failing build. Peer review pending from TJ.
Java
bsd-3-clause
fc9c67045c620197fff615e02d055902bec3efe7
0
AlexRNL/SubtitleCorrector
package com.alexrnl.subtitlecorrector.gui.view; import static com.alexrnl.subtitlecorrector.common.TranslationKeys.KEYS; import java.io.InputStream; import java.io.PrintStream; import java.util.Scanner; import com.alexrnl.commons.translation.Translator; import com.alexrnl.commons.utils.Word; import com.alexrnl.subtitlecorrector.common.TranslationKeys.UserPrompt.Console; import com.alexrnl.subtitlecorrector.service.SessionParameters; import com.alexrnl.subtitlecorrector.service.UserPrompt; import com.alexrnl.subtitlecorrector.service.UserPromptAnswer; /** * A console implementation for the {@link UserPrompt} interface. * @author Alex */ public class ConsoleUserPrompt implements UserPrompt { /** The translator to use */ private Translator translator; /** The console input stream */ private final InputStream input; /** The scanner plugged on the console inputScanner */ private Scanner inputScanner; /** The console output */ private final PrintStream output; /** * Constructor #1.<br /> * @param input * the input stream to use for reading the user's answers. * @param output * the output to use for displaying information to the user. */ public ConsoleUserPrompt (final InputStream input, final PrintStream output) { super(); this.input = input; this.output = output; } /** * Constructor #2.<br /> * Build a {@link ConsoleUserPrompt} with the {@link System#in} and {@link System#out}. */ public ConsoleUserPrompt () { this(System.in, System.out); } @Override public void setTranslator (final Translator translator) { this.translator = translator; } @Override public void startSession (final SessionParameters parameters) { if (translator == null) { throw new IllegalStateException("Cannot start session without translator set"); } if (inputScanner != null) { throw new IllegalStateException("Session was not properly stop, inputScanner was not null"); } inputScanner = new Scanner(input); } @Override public void stopSession () { if (inputScanner == null) { throw new IllegalStateException("Session was not properly started, inputScanner was null"); } inputScanner.close(); inputScanner = null; } @Override public UserPromptAnswer confirm (final String context, final Word original, final String replacement) { if (inputScanner == null) { throw new IllegalStateException("Session was not properly started, inputScanner is null, " + "cannot confirm replacement"); } String answer; boolean cancelled = false; boolean rememberChoice; final Console consoleKey = KEYS.userPrompt().console(); final String yes = translator.get(consoleKey.yes()); final String no = translator.get(consoleKey.no()); final String yesNoChoice = yes + "/" + no + " > "; output.println(translator.get(consoleKey.replace(), original, replacement)); if (context != null) { output.println(translator.get(consoleKey.context())); output.println(context); } output.print(yesNoChoice); final boolean keep = inputScanner.nextLine().startsWith(yes); if (keep) { answer = replacement; } else { output.println(translator.get(consoleKey.changeReplacement())); answer = inputScanner.nextLine(); if (answer.isEmpty()) { cancelled = true; } } output.println(translator.get(consoleKey.rememberChoice())); output.print(yesNoChoice); rememberChoice = inputScanner.nextLine().startsWith(yes); return new UserPromptAnswer(answer, cancelled, rememberChoice); } @Override public UserPromptAnswer confirm (final Word original, final String replacement) { return confirm(null, original, replacement); } }
src/main/java/com/alexrnl/subtitlecorrector/gui/view/ConsoleUserPrompt.java
package com.alexrnl.subtitlecorrector.gui.view; import static com.alexrnl.subtitlecorrector.common.TranslationKeys.KEYS; import java.io.InputStream; import java.io.PrintStream; import java.util.Scanner; import com.alexrnl.commons.translation.Translator; import com.alexrnl.commons.utils.Word; import com.alexrnl.subtitlecorrector.common.TranslationKeys.UserPrompt.Console; import com.alexrnl.subtitlecorrector.service.SessionParameters; import com.alexrnl.subtitlecorrector.service.UserPrompt; import com.alexrnl.subtitlecorrector.service.UserPromptAnswer; /** * A console implementation for the {@link UserPrompt} interface. * @author Alex */ public class ConsoleUserPrompt implements UserPrompt { /** The translator to use */ private Translator translator; /** The console input stream */ private final InputStream input; /** The scanner plugged on the console inputScanner */ private Scanner inputScanner; /** The console output */ private final PrintStream output; /** * Constructor #1.<br /> * @param input * the input stream to use for reading the user's answers. * @param output * the output to use for displaying information to the user. */ public ConsoleUserPrompt (final InputStream input, final PrintStream output) { super(); this.input = input; this.output = output; } /** * Constructor #2.<br /> * Build a {@link ConsoleUserPrompt} with the {@link System#in} and {@link System#out}. */ public ConsoleUserPrompt () { this(System.in, System.out); } @Override public void setTranslator (final Translator translator) { this.translator = translator; } @Override public void startSession (final SessionParameters parameters) { if (translator == null) { throw new IllegalStateException("Cannot start session without translator set"); } if (inputScanner != null) { throw new IllegalStateException("Session was not properly stop, inputScanner was not null"); } inputScanner = new Scanner(input); } @Override public void stopSession () { if (inputScanner == null) { throw new IllegalStateException("Session was not properly started, inputScanner was null"); } inputScanner.close(); inputScanner = null; } @Override public UserPromptAnswer confirm (final String context, final Word original, final String replacement) { if (inputScanner == null) { throw new IllegalStateException("Session was not properly started, inputScanner is null, " + "cannot confirm replacement"); } String answer; boolean cancelled = false; boolean rememberChoice; final Console consoleKey = KEYS.userPrompt().console(); final String yes = translator.get(consoleKey.yes()); final String no = translator.get(consoleKey.no()); final String yesNoChoice = yes + "/" + no + " > "; output.println(translator.get(consoleKey.replace(), original, replacement)); if (context != null) { output.println(translator.get(consoleKey.context()) + "\n\t"); output.println(context); } output.println(yesNoChoice); final boolean keep = inputScanner.nextLine().startsWith(yes); if (keep) { answer = replacement; } else { output.println(translator.get(consoleKey.changeReplacement())); answer = inputScanner.nextLine(); if (answer.isEmpty()) { cancelled = true; } } output.println(translator.get(consoleKey.rememberChoice())); output.println(yesNoChoice); rememberChoice = inputScanner.nextLine().startsWith(yes); return new UserPromptAnswer(answer, cancelled, rememberChoice); } @Override public UserPromptAnswer confirm (final Word original, final String replacement) { return confirm(null, original, replacement); } }
Remove useless new lines in console prompt
src/main/java/com/alexrnl/subtitlecorrector/gui/view/ConsoleUserPrompt.java
Remove useless new lines in console prompt
Java
mit
f80ae308cc8cc164eb0f1076c81a0971c10bdcad
0
om3g4zell/CityBuilderJSFML
package sim; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Stack; import org.jsfml.graphics.Color; import org.jsfml.graphics.IntRect; import org.jsfml.graphics.RenderWindow; import org.jsfml.system.Time; import org.jsfml.system.Vector2f; import org.jsfml.system.Vector2i; import org.jsfml.window.Mouse; import org.jsfml.window.VideoMode; import org.jsfml.window.event.Event; import graphics.Tile.TileType; import graphics.BuildingProjector; import graphics.FontManager; import graphics.TextureManager; import graphics.Tile; import graphics.TileMap; import graphics.ZoneMapLayer; import gui.CheckBox; import gui.GameSpeedGui; import gui.StatsGui; import gui.TileInfoGui; import gui.TileSelector; import gui.ZoneDrawingGui; import maths.Distance; import world.Building; import world.Building.BuildingType; import world.CityStats; import world.Need; import world.Resource; import world.ResourcesMap; import world.ResourcesStack; import world.Zone; import world.Zone.ZoneClass; import world.ZoneMap; /** * Contains init, update and render. */ public class Sim { // Constants. protected static final Vector2i TILEMAP_SIZE = new Vector2i(80, 45); protected static final Vector2f TILE_SIZE = new Vector2f(16.f, 16.f); // Attributes. protected RenderWindow window; protected TileMap tilemap; protected List<ArrayList<Tile>> tiles; protected ResourcesMap resourcesMap; protected ResourcesMap cachedResourceMap; protected List<Building> buildings; protected CityStats cityStats; protected TextureManager textureManager; protected FontManager fontManager; protected StatsGui statsGui; protected TileSelector tileSelector; protected TileInfoGui tileInfoGui; protected boolean displayTileInfo; protected Stack<Map<Integer, Building.BuildingType>> buildingStackRequired; protected CheckBox checkbox1; protected ZoneMap zoneMap; protected ZoneMapLayer zoneMapLayer; protected ZoneDrawingGui zoneDrawingGui; protected GameSpeedGui gameSpeedGui; protected Time simulationSpeedTimer; /** * Constructor * @param width : width of the window * @param height : height of the window * @param title : title of the window */ public Sim(int width, int height, String title) { this.window = new RenderWindow(new VideoMode(width, height), title); this.displayTileInfo = false; } /** * Inits the simulation. */ public void init() { // Inits the tiles array. this.tiles = new ArrayList<ArrayList<Tile>>(); for(int i = 0 ; i < TILEMAP_SIZE.y ; ++i) { ArrayList<Tile> row = new ArrayList<Tile>(); for(int j = 0 ; j < TILEMAP_SIZE.x ; ++j) { row.add(new Tile(TileType.TERRAIN_GRASS, new Vector2i(j, i))); } this.tiles.add(row); } // Instanciate the TextureManager this.textureManager = new TextureManager(); // Instanciate the fontManager this.fontManager = new FontManager(); //Instanciate the GUI this.statsGui = new StatsGui(textureManager, fontManager); this.tileSelector = new TileSelector(this.window, this.textureManager, TILEMAP_SIZE, TILE_SIZE); // Create the resources map. this.resourcesMap = new ResourcesMap(TILEMAP_SIZE); // Clone the resources map this.cachedResourceMap = this.resourcesMap.cloneResourcesMap(); // Create the buildings list. this.buildings = new ArrayList<Building>(); // Create a checkbox this.checkbox1 = new CheckBox(10, 100 , this.textureManager, this.fontManager , "Afficher les zones", 0); // Create the city stats. this.cityStats = new CityStats(); // Create the zoneMap this.zoneMap = new ZoneMap(TILEMAP_SIZE.x, TILEMAP_SIZE.y); // Create the game speed GUI this.gameSpeedGui = new GameSpeedGui(textureManager, fontManager, this.window.getSize().x - 80, 20); // Create the zoneMapLayer this.zoneMapLayer = new ZoneMapLayer(this.zoneMap); this.zoneMapLayer.addTypeColor(Zone.ZoneClass.FREE, new Color(12, 52, 30, 170)); this.zoneMapLayer.addTypeColor(Zone.ZoneClass.COMMERCIAL, new Color(125, 193, 129, 170)); this.zoneMapLayer.addTypeColor(Zone.ZoneClass.CULTURAL, new Color(51, 153, 255, 170)); this.zoneMapLayer.addTypeColor(Zone.ZoneClass.INDUSTRY, new Color(227, 168, 87, 170)); this.zoneMapLayer.addTypeColor(Zone.ZoneClass.ROAD, new Color(220, 220, 220, 170)); this.zoneMapLayer.addTypeColor(Zone.ZoneClass.RESIDENTIAL, new Color(70, 0, 0, 170)); // Houses. this.buildings.add(new Building(BuildingType.HOUSE, new Vector2i(31, 20))); /*this.buildings.add(new Building(BuildingType.HOUSE, new Vector2i(33, 20))); this.buildings.add(new Building(BuildingType.HOUSE, new Vector2i(35, 20))); this.buildings.add(new Building(BuildingType.HOUSE, new Vector2i(37, 20))); this.buildings.add(new Building(BuildingType.HOUSE, new Vector2i(31, 23))); this.buildings.add(new Building(BuildingType.HOUSE, new Vector2i(33, 23))); this.buildings.add(new Building(BuildingType.HOUSE, new Vector2i(35, 23))); this.buildings.add(new Building(BuildingType.HOUSE, new Vector2i(37, 23)));*/ // Generator. //this.buildings.add(new Building(BuildingType.GENERATOR, new Vector2i(39, 21))); // Water station. //this.buildings.add(new Building(BuildingType.HYDROLIC_STATION, new Vector2i(39, 23))); // Grossery store //this.buildings.add(new Building(BuildingType.GROCERY_STORE, new Vector2i(40, 21))); // Roads this.buildings.add(new Building(BuildingType.ROAD, new Vector2i(30, 20))); this.buildings.add(new Building(BuildingType.ANTENNA_4G, new Vector2i(35, 20))); // Inits the tilemap. this.tilemap = new TileMap(TILEMAP_SIZE, TILE_SIZE); this.tilemap.addTypeColor(TileType.TERRAIN_GRASS, new Color(0, 70, 0)); this.tilemap.addTypeColor(TileType.BUILDING_HOUSE, new Color(70, 0, 0)); this.tilemap.addTypeColor(TileType.BUILDING_ROAD, new Color(190, 190, 190)); this.tilemap.addTypeColor(TileType.BUILDING_GENERATOR, new Color(227, 168, 87)); this.tilemap.addTypeColor(TileType.BUILDING_HYDROLIC_STATION, new Color(51, 153, 255)); this.tilemap.addTypeColor(TileType.BUILDING_SUPERMARKET, new Color(125, 193, 129)); this.tilemap.addTypeColor(TileType.BUILDING_ROAD, new Color(220, 220, 220)); this.tilemap.addTypeColor(TileType.BUILDING_ANTENNA_4G, new Color(63, 63, 63)); this.tilemap.setTiles(this.tiles); // The stack of the maps which contains the required buildings of everyone. this.buildingStackRequired = new Stack<Map<Integer, Building.BuildingType>>(); // Instanciate the tileInfoGui this.tileInfoGui = new TileInfoGui(this.tiles, this.fontManager); // Instanciate the zone drawing GUI. this.zoneDrawingGui = new ZoneDrawingGui(this.textureManager, this.fontManager); // Building spawn timer. this.simulationSpeedTimer = Time.ZERO; } /** * Counts the number of buildings per building type. * @param buildings : the map of the buildings (association of building's ID and building type). */ public Map<Building.BuildingType, Integer> countBuildingsPerType(Map<Integer, Building.BuildingType> buildings) { Map<Building.BuildingType, Integer> buildingCounts = new HashMap<Building.BuildingType, Integer>(); for(Map.Entry<Integer, Building.BuildingType> entry : buildings.entrySet()) { Building.BuildingType buildingType = entry.getValue(); // Do not count NONE. if(buildingType == Building.BuildingType.NONE) continue; if(buildingCounts.containsKey(buildingType)) { Integer count = buildingCounts.get(buildingType); count = new Integer(count.intValue() + 1); buildingCounts.put(buildingType, count); } else { buildingCounts.put(buildingType, 1); } } return buildingCounts; } /** * Spawns the new buildings. * * TODO: Separate the algorithm in sub-functions. */ public void spawnBuildings() { // Look into the required buildings stack. if(this.buildingStackRequired.empty()) return; // The map collecting the required buildings. Map<Integer, Building.BuildingType> buildingsRequired = this.buildingStackRequired.peek(); // First count the required buildings. Map<Building.BuildingType, Integer> buildingCounts = countBuildingsPerType(buildingsRequired); // Get the most required. Map.Entry<Building.BuildingType, Integer> maxEntry = null; for(Map.Entry<Building.BuildingType, Integer> entry : buildingCounts.entrySet()) { if(maxEntry == null || entry.getValue() > maxEntry.getValue()) { maxEntry = entry; } } // We have a building type. if(maxEntry != null) { Building.BuildingType buildingType = maxEntry.getKey(); Vector2i position = new Vector2i(0, 0); // Now get the position of everyone asking for that building type. for(Map.Entry<Integer, Building.BuildingType> entry : buildingsRequired.entrySet()) { Building.BuildingType btype = entry.getValue(); if(btype == buildingType) { Building building = null; // Get the building. for(Building b : this.buildings) { if(b.getId() == entry.getKey()) { building = b; break; } } // Add its position. if(building != null) { Vector2i centerPosition = new Vector2i(building.getHitbox().left + building.getHitbox().width / 2, building.getHitbox().top + building.getHitbox().height / 2); position = Vector2i.add(position, centerPosition); } } } // Compute the average position, aka the center of the search area. Vector2i centerOfSearchArea = new Vector2i((int)(position.x / maxEntry.getValue()), (int)(position.y / maxEntry.getValue())); // Get the further building from the average position, to compute the radius of the search area. float radius = 0.f; for(Map.Entry<Integer, Building.BuildingType> entry : buildingsRequired.entrySet()) { Building.BuildingType btype = entry.getValue(); if(btype == buildingType) { Building building = null; // Get the building. for(Building b : this.buildings) { if(b.getId() == entry.getKey()) { building = b; break; } } // Add its position. if(building != null) { Vector2i centerPosition = new Vector2i(building.getHitbox().left + building.getHitbox().width / 2, building.getHitbox().top + building.getHitbox().height / 2); float distance = (float)Distance.euclidean(centerOfSearchArea, centerPosition); if(distance > radius) radius = distance; } } } // Create a fake building. Building requiredBuilding = new Building(maxEntry.getKey(), new Vector2i(0, 0)); // We may need to expand the radius. radius = Math.max(radius, requiredBuilding.getRange()); // We use squared radius and squared euclidean distance for performance. double squaredRadius = Math.pow(radius, 2); // Map of the considered positions with the number of requiring building in range. HashMap<Vector2i, Integer> candidatesPositions = new HashMap<Vector2i, Integer>(); // Map of the positions where it lacks resources only with the number of requiring building in range. HashMap<Vector2i, Integer> candidatesPositionsLackingResources = new HashMap<Vector2i, Integer>(); // Missing resources for the required building. HashMap<Resource.ResourceType, Integer> missingResources = new HashMap<Resource.ResourceType, Integer>(); // Initiates missing resources to 0. for(Resource.ResourceType rtype : Resource.ResourceType.values()) missingResources.put(rtype, 0); // Check all resource map in square range. for(int x = Math.max(0, centerOfSearchArea.x - (int)radius) ; x < Math.min(resourcesMap.getSize().x, centerOfSearchArea.x + radius + 1) ; ++x) { for(int y = Math.max(0, centerOfSearchArea.y - (int)radius) ; y < Math.min(resourcesMap.getSize().y, centerOfSearchArea.y + radius + 1) ; ++y) { // Check only in radius. if(Distance.squaredEuclidean(centerOfSearchArea, new Vector2i(x, y)) <= squaredRadius) { // Check collision with other buildings. boolean collide = false; IntRect candidateHitbox = new IntRect(x, y, requiredBuilding.getHitbox().width, requiredBuilding.getHitbox().height); if(candidateHitbox.left < 0 || candidateHitbox.top < 0 || candidateHitbox.left + candidateHitbox.width >= TILEMAP_SIZE.x || candidateHitbox.top + candidateHitbox.height >= TILEMAP_SIZE.y) collide = true; for(Building b : this.buildings) { if(candidateHitbox.intersection(b.getHitbox()) != null) collide = true; } if(collide) { // This position is not suitable. continue; } // Check zone compatibility. boolean validZone = true; for(int rx = x ; rx < Math.min(x + requiredBuilding.getHitbox().width, TILEMAP_SIZE.x) ; rx++) { for(int ry = y ; ry < Math.min(y + requiredBuilding.getHitbox().height, TILEMAP_SIZE.y) ; ry++) { Zone zone = this.zoneMap.getZoneMap().get(ry).get(rx); // check if the zone is suitable for(ZoneClass zoneBuilding : requiredBuilding.getZoneClasses()) { if(!zone.getType().equals(zoneBuilding)) { validZone = false; }else { validZone = true; break; } } // if the building contain the free zone it's ok if(requiredBuilding.getZoneClasses().contains(ZoneClass.FREE)) { validZone = true; } // if isn't a valid zone break if(!validZone) break; } // if isn't a valid zone break if(!validZone) break; } if(!validZone) { // This zone is not suitable continue; } // Get the resources available for the building. ResourcesStack rstack = resourcesMap.getResources(x, y); for(int rx = x ; rx < Math.min(x + requiredBuilding.getHitbox().width, TILEMAP_SIZE.x) ; rx++) { for(int ry = y ; ry < Math.min(y + requiredBuilding.getHitbox().height, TILEMAP_SIZE.y) ; ry++) { rstack.add(resourcesMap.getResources(rx, ry)); } } // Check if they satisfy the needs. boolean allNeedsSatisfied = true; for(Need n : requiredBuilding.getNeeds()) { float minAmount = n.amount * n.fillFactor; // If one need is not satisfied to its minimum, we quit. if(rstack.get(n.type) < minAmount) { allNeedsSatisfied = false; int count = missingResources.get(n.type).intValue(); count += 1; missingResources.put(n.type, count); } } // Check how many buildings (which required the building construction) are in range of the required building. int inRange = 0; for(Map.Entry<Integer, Building.BuildingType> buildingRequiredEntry : buildingsRequired.entrySet()) { if(buildingRequiredEntry.getValue() == requiredBuilding.getType()) { Building building = null; // Get the building. for(Building b : this.buildings) { if(b.getId() == buildingRequiredEntry.getKey()) { building = b; break; } } // Check if in range. if(building != null) { Vector2i buildingCenter = new Vector2i(building.getHitbox().left + building.getHitbox().width / 2, building.getHitbox().top + building.getHitbox().height / 2); int distance = (int)Distance.euclidean(buildingCenter, new Vector2i(x, y)); if(distance < requiredBuilding.getRange()) { inRange++; } } } } // Add to the candidates positions if all resources are available. if(allNeedsSatisfied) candidatesPositions.put(new Vector2i(x, y), inRange); else candidatesPositionsLackingResources.put(new Vector2i(x, y), inRange); } } } // Check the position which reach the most buildings AND is the closer to the center of the search area. Map.Entry<Vector2i, Integer> bestPosition = null; double mindistance = Double.MAX_VALUE; for(Map.Entry<Vector2i, Integer> entry : candidatesPositions.entrySet()) { if((bestPosition == null) || (entry.getValue() >= bestPosition.getValue() && mindistance > Distance.euclidean(entry.getKey(), centerOfSearchArea))) { bestPosition = entry; mindistance = Distance.euclidean(bestPosition.getKey(), centerOfSearchArea); } } // Add the building to the position. if(bestPosition != null) { this.buildings.add(new Building(maxEntry.getKey(), bestPosition.getKey())); // We spawned the building, so get it out of the stack. this.buildingStackRequired.pop(); System.out.println("Spawning : " + maxEntry.getKey().toString() + " @ " + bestPosition.getKey().x + ", " + bestPosition.getKey().y); System.out.println("\tdistance to CoSA: " + Distance.euclidean(bestPosition.getKey(), centerOfSearchArea)); System.out.println("\tefficiency: " + bestPosition.getValue() + "/" + maxEntry.getValue()); } else { // Get the most rare resource. Map.Entry<Resource.ResourceType, Integer> mostRareResourceEntry = null; for(Map.Entry<Resource.ResourceType, Integer> entry : missingResources.entrySet()) { if(mostRareResourceEntry == null || entry.getValue() > mostRareResourceEntry.getValue()) { mostRareResourceEntry = entry; } } Resource.ResourceType rareResource = mostRareResourceEntry.getKey(); // Every building asking for the current building type should ask for its pre-requisite. Map<Integer, Building.BuildingType> prerequisiteBuildingMap = new HashMap<Integer, Building.BuildingType>(); for(Map.Entry<Integer, Building.BuildingType> entry : buildingsRequired.entrySet()) { if(requiredBuilding.getType() == entry.getValue()) prerequisiteBuildingMap.put(entry.getKey(), Building.getBuildingTypeGenerating(rareResource)); } this.buildingStackRequired.push(prerequisiteBuildingMap); System.out.println("No suitable position found for : " + maxEntry.getKey().toString()); System.out.println("Most rare resource : " + rareResource.toString()); System.out.println("Asking to spawn : " + Building.getBuildingTypeGenerating(rareResource).toString()); } } } /** * Spawn road with the zone map */ public void spawnRoad() { boolean collisionFlag = false; // run the map for(int y = 0 ; y < this.zoneMap.getSize().y ; y++) { for(int x = 0 ; x < this.zoneMap.getSize().x ; x++) { // reset the collision flag collisionFlag = false; // check if a zoneType is road if(this.zoneMap.getZoneMap().get(y).get(x).getType().equals(ZoneClass.ROAD)) { // check if not building in this zone for(int i = 0 ; i < this.buildings.size() ; i++) { // if building stop if(this.buildings.get(i).getHitbox().contains(x, y)) { collisionFlag = true; break; } } // we spawn the road if(!collisionFlag) { this.buildings.add(new Building(BuildingType.ROAD, new Vector2i(x,y))); System.out.println("pop !"); } } } } this.zoneDrawingGui.setNewRoadAdded(false); } /** * Updates all the simulation. * @param dt : frame of time to use */ public void update(Time dt) { // Update the simulation timer. this.simulationSpeedTimer = Time.add(this.simulationSpeedTimer, Time.mul(dt, this.gameSpeedGui.getSpeedCoeff())); // Spawn road if(!this.gameSpeedGui.isInPause() && this.simulationSpeedTimer.asSeconds() >= 1.f) { if(this.zoneDrawingGui.newRoadAdded()) { spawnRoad(); } // Reset the resources. this.resourcesMap.reset(); // Generate resources. for(Building b : this.buildings) { b.generateResources(this.resourcesMap); } // Clone the resourceMap this.cachedResourceMap = this.resourcesMap.cloneResourcesMap(); } // We update tile infos after generate. if(this.displayTileInfo) this.tileInfoGui.update(this.cachedResourceMap, this.tileSelector, this.buildings); if(!this.gameSpeedGui.isInPause() && this.simulationSpeedTimer.asSeconds() >= 1.f) { // Consume resources and get required buildings. Map<Integer, Building.BuildingType> buildingsRequired = new HashMap<Integer, Building.BuildingType>(); for(Building b : this.buildings) { BuildingType requiredBuilding = b.consumeResources(this.resourcesMap); // Don't do anything if none required. if(requiredBuilding != BuildingType.NONE) { buildingsRequired.put(b.getId(), requiredBuilding); } } if(this.buildingStackRequired.isEmpty()) { this.buildingStackRequired.push(buildingsRequired); } // Spawn buildings. spawnBuildings(); // Project buildings on the tilemap. BuildingProjector.project(this.buildings, this.tilemap); // Update CityStats this.cityStats.update(this.buildings); } // Do the time substraction here. if(!this.gameSpeedGui.isInPause() && this.simulationSpeedTimer.asSeconds() >= 1.f) { this.simulationSpeedTimer = Time.sub(this.simulationSpeedTimer, Time.getSeconds(1.f)); } // Update the tilemap. this.tilemap.update(); if(this.checkbox1.isChecked()) { this.zoneDrawingGui.update(dt, this.window, this.zoneMap, this.tileSelector); this.zoneMapLayer.update(); } //Update stats this.gameSpeedGui.update(dt); this.statsGui.setMoney(this.cityStats.getMoney()); this.statsGui.setPopulation(this.cityStats.getPopulation()); this.tileSelector.update(); } /** * Renders all the simulation. */ public void render() { this.window.clear(Color.WHITE); ///////////// this.window.draw(this.tilemap); if(this.checkbox1.isChecked()) { this.window.draw(this.zoneMapLayer); this.window.draw(this.zoneDrawingGui); } this.window.draw(this.tileSelector); this.window.draw(this.statsGui); this.window.draw(checkbox1); this.window.draw(gameSpeedGui); if(this.displayTileInfo) this.window.draw(tileInfoGui); ///////////// this.window.display(); } /** * Returns the window. * @return the window used by the simulation */ public RenderWindow getWindow() { return this.window; } /** * Handles the event. * @param event : the JSFML event to handle */ public void handleEvent(Event event) { if(event.type == Event.Type.MOUSE_BUTTON_RELEASED && event.asMouseButtonEvent().button == Mouse.Button.MIDDLE) { this.displayTileInfo = !this.displayTileInfo; } this.checkbox1.handleEvent(event); if(this.checkbox1.isChecked()) this.zoneDrawingGui.handleEvent(event); this.gameSpeedGui.handleEvent(event); } }
CityBuilderJSFML/src/sim/Sim.java
package sim; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Stack; import org.jsfml.graphics.Color; import org.jsfml.graphics.IntRect; import org.jsfml.graphics.RenderWindow; import org.jsfml.system.Time; import org.jsfml.system.Vector2f; import org.jsfml.system.Vector2i; import org.jsfml.window.Mouse; import org.jsfml.window.VideoMode; import org.jsfml.window.event.Event; import graphics.Tile.TileType; import graphics.BuildingProjector; import graphics.FontManager; import graphics.TextureManager; import graphics.Tile; import graphics.TileMap; import graphics.ZoneMapLayer; import gui.CheckBox; import gui.GameSpeedGui; import gui.StatsGui; import gui.TileInfoGui; import gui.TileSelector; import gui.ZoneDrawingGui; import maths.Distance; import world.Building; import world.Building.BuildingType; import world.CityStats; import world.Need; import world.Resource; import world.ResourcesMap; import world.ResourcesStack; import world.Zone; import world.Zone.ZoneClass; import world.ZoneMap; /** * Contains init, update and render. */ public class Sim { // Constants. protected static final Vector2i TILEMAP_SIZE = new Vector2i(80, 45); protected static final Vector2f TILE_SIZE = new Vector2f(16.f, 16.f); // Attributes. protected RenderWindow window; protected TileMap tilemap; protected List<ArrayList<Tile>> tiles; protected ResourcesMap resourcesMap; protected ResourcesMap cachedResourceMap; protected List<Building> buildings; protected CityStats cityStats; protected TextureManager textureManager; protected FontManager fontManager; protected StatsGui statsGui; protected TileSelector tileSelector; protected TileInfoGui tileInfoGui; protected boolean displayTileInfo; protected Stack<Map<Integer, Building.BuildingType>> buildingStackRequired; protected CheckBox checkbox1; protected ZoneMap zoneMap; protected ZoneMapLayer zoneMapLayer; protected ZoneDrawingGui zoneDrawingGui; protected GameSpeedGui gameSpeedGui; protected Time simulationSpeedTimer; /** * Constructor * @param width : width of the window * @param height : height of the window * @param title : title of the window */ public Sim(int width, int height, String title) { this.window = new RenderWindow(new VideoMode(width, height), title); this.displayTileInfo = false; } /** * Inits the simulation. */ public void init() { // Inits the tiles array. this.tiles = new ArrayList<ArrayList<Tile>>(); for(int i = 0 ; i < TILEMAP_SIZE.y ; ++i) { ArrayList<Tile> row = new ArrayList<Tile>(); for(int j = 0 ; j < TILEMAP_SIZE.x ; ++j) { row.add(new Tile(TileType.TERRAIN_GRASS, new Vector2i(j, i))); } this.tiles.add(row); } // Instanciate the TextureManager this.textureManager = new TextureManager(); // Instanciate the fontManager this.fontManager = new FontManager(); //Instanciate the GUI this.statsGui = new StatsGui(textureManager, fontManager); this.tileSelector = new TileSelector(this.window, this.textureManager, TILEMAP_SIZE, TILE_SIZE); // Create the resources map. this.resourcesMap = new ResourcesMap(TILEMAP_SIZE); // Clone the resources map this.cachedResourceMap = this.resourcesMap.cloneResourcesMap(); // Create the buildings list. this.buildings = new ArrayList<Building>(); // Create a checkbox this.checkbox1 = new CheckBox(10, 100 , this.textureManager, this.fontManager , "Afficher les zones", 0); // Create the city stats. this.cityStats = new CityStats(); // Create the zoneMap this.zoneMap = new ZoneMap(TILEMAP_SIZE.x, TILEMAP_SIZE.y); // Create the game speed GUI this.gameSpeedGui = new GameSpeedGui(textureManager, fontManager, this.window.getSize().x - 80, 20); // Create the zoneMapLayer this.zoneMapLayer = new ZoneMapLayer(this.zoneMap); this.zoneMapLayer.addTypeColor(Zone.ZoneClass.FREE, new Color(12, 52, 30, 170)); this.zoneMapLayer.addTypeColor(Zone.ZoneClass.COMMERCIAL, new Color(125, 193, 129, 170)); this.zoneMapLayer.addTypeColor(Zone.ZoneClass.CULTURAL, new Color(51, 153, 255, 170)); this.zoneMapLayer.addTypeColor(Zone.ZoneClass.INDUSTRY, new Color(227, 168, 87, 170)); this.zoneMapLayer.addTypeColor(Zone.ZoneClass.ROAD, new Color(220, 220, 220, 170)); this.zoneMapLayer.addTypeColor(Zone.ZoneClass.RESIDENTIAL, new Color(70, 0, 0, 170)); // Houses. this.buildings.add(new Building(BuildingType.HOUSE, new Vector2i(31, 20))); /*this.buildings.add(new Building(BuildingType.HOUSE, new Vector2i(33, 20))); this.buildings.add(new Building(BuildingType.HOUSE, new Vector2i(35, 20))); this.buildings.add(new Building(BuildingType.HOUSE, new Vector2i(37, 20))); this.buildings.add(new Building(BuildingType.HOUSE, new Vector2i(31, 23))); this.buildings.add(new Building(BuildingType.HOUSE, new Vector2i(33, 23))); this.buildings.add(new Building(BuildingType.HOUSE, new Vector2i(35, 23))); this.buildings.add(new Building(BuildingType.HOUSE, new Vector2i(37, 23)));*/ // Generator. //this.buildings.add(new Building(BuildingType.GENERATOR, new Vector2i(39, 21))); // Water station. //this.buildings.add(new Building(BuildingType.HYDROLIC_STATION, new Vector2i(39, 23))); // Grossery store //this.buildings.add(new Building(BuildingType.GROCERY_STORE, new Vector2i(40, 21))); // Roads this.buildings.add(new Building(BuildingType.ROAD, new Vector2i(30, 20))); this.buildings.add(new Building(BuildingType.ANTENNA_4G, new Vector2i(35, 20))); // Inits the tilemap. this.tilemap = new TileMap(TILEMAP_SIZE, TILE_SIZE); this.tilemap.addTypeColor(TileType.TERRAIN_GRASS, new Color(0, 70, 0)); this.tilemap.addTypeColor(TileType.BUILDING_HOUSE, new Color(70, 0, 0)); this.tilemap.addTypeColor(TileType.BUILDING_ROAD, new Color(190, 190, 190)); this.tilemap.addTypeColor(TileType.BUILDING_GENERATOR, new Color(227, 168, 87)); this.tilemap.addTypeColor(TileType.BUILDING_HYDROLIC_STATION, new Color(51, 153, 255)); this.tilemap.addTypeColor(TileType.BUILDING_SUPERMARKET, new Color(125, 193, 129)); this.tilemap.addTypeColor(TileType.BUILDING_ROAD, new Color(220, 220, 220)); this.tilemap.addTypeColor(TileType.BUILDING_ANTENNA_4G, new Color(63, 63, 63)); this.tilemap.setTiles(this.tiles); // The stack of the maps which contains the required buildings of everyone. this.buildingStackRequired = new Stack<Map<Integer, Building.BuildingType>>(); // Instanciate the tileInfoGui this.tileInfoGui = new TileInfoGui(this.tiles, this.fontManager); // Instanciate the zone drawing GUI. this.zoneDrawingGui = new ZoneDrawingGui(this.textureManager, this.fontManager); // Building spawn timer. this.simulationSpeedTimer = Time.ZERO; } /** * Spawns the new buildings. * * TODO: Separate the algorithm in sub-functions. */ public void spawnBuildings() { // Look into the required buildings stack. if(this.buildingStackRequired.empty()) return; // The map collecting the required buildings. Map<Integer, Building.BuildingType> buildingsRequired = this.buildingStackRequired.peek(); // First count the required buildings. Map<Building.BuildingType, Integer> buildingCounts = new HashMap<Building.BuildingType, Integer>(); for(Map.Entry<Integer, Building.BuildingType> entry : buildingsRequired.entrySet()) { Building.BuildingType buildingType = entry.getValue(); // Do not count NONE. if(buildingType == Building.BuildingType.NONE) continue; if(buildingCounts.containsKey(buildingType)) { Integer count = buildingCounts.get(buildingType); count = new Integer(count.intValue() + 1); buildingCounts.put(buildingType, count); } else { buildingCounts.put(buildingType, 1); } } // Get the most required. Map.Entry<Building.BuildingType, Integer> maxEntry = null; for(Map.Entry<Building.BuildingType, Integer> entry : buildingCounts.entrySet()) { if(maxEntry == null || entry.getValue() > maxEntry.getValue()) { maxEntry = entry; } } // We have a building type. if(maxEntry != null) { Building.BuildingType buildingType = maxEntry.getKey(); Vector2i position = new Vector2i(0, 0); // Now get the position of everyone asking for that building type. for(Map.Entry<Integer, Building.BuildingType> entry : buildingsRequired.entrySet()) { Building.BuildingType btype = entry.getValue(); if(btype == buildingType) { Building building = null; // Get the building. for(Building b : this.buildings) { if(b.getId() == entry.getKey()) { building = b; break; } } // Add its position. if(building != null) { Vector2i centerPosition = new Vector2i(building.getHitbox().left + building.getHitbox().width / 2, building.getHitbox().top + building.getHitbox().height / 2); position = Vector2i.add(position, centerPosition); } } } // Compute the average position, aka the center of the search area. Vector2i centerOfSearchArea = new Vector2i((int)(position.x / maxEntry.getValue()), (int)(position.y / maxEntry.getValue())); // Get the further building from the average position, to compute the radius of the search area. float radius = 0.f; for(Map.Entry<Integer, Building.BuildingType> entry : buildingsRequired.entrySet()) { Building.BuildingType btype = entry.getValue(); if(btype == buildingType) { Building building = null; // Get the building. for(Building b : this.buildings) { if(b.getId() == entry.getKey()) { building = b; break; } } // Add its position. if(building != null) { Vector2i centerPosition = new Vector2i(building.getHitbox().left + building.getHitbox().width / 2, building.getHitbox().top + building.getHitbox().height / 2); float distance = (float)Distance.euclidean(centerOfSearchArea, centerPosition); if(distance > radius) radius = distance; } } } // Create a fake building. Building requiredBuilding = new Building(maxEntry.getKey(), new Vector2i(0, 0)); // We may need to expand the radius. radius = Math.max(radius, requiredBuilding.getRange()); // We use squared radius and squared euclidean distance for performance. double squaredRadius = Math.pow(radius, 2); // Map of the considered positions with the number of requiring building in range. HashMap<Vector2i, Integer> candidatesPositions = new HashMap<Vector2i, Integer>(); // Map of the positions where it lacks resources only with the number of requiring building in range. HashMap<Vector2i, Integer> candidatesPositionsLackingResources = new HashMap<Vector2i, Integer>(); // Missing resources for the required building. HashMap<Resource.ResourceType, Integer> missingResources = new HashMap<Resource.ResourceType, Integer>(); // Initiates missing resources to 0. for(Resource.ResourceType rtype : Resource.ResourceType.values()) missingResources.put(rtype, 0); // Check all resource map in square range. for(int x = Math.max(0, centerOfSearchArea.x - (int)radius) ; x < Math.min(resourcesMap.getSize().x, centerOfSearchArea.x + radius + 1) ; ++x) { for(int y = Math.max(0, centerOfSearchArea.y - (int)radius) ; y < Math.min(resourcesMap.getSize().y, centerOfSearchArea.y + radius + 1) ; ++y) { // Check only in radius. if(Distance.squaredEuclidean(centerOfSearchArea, new Vector2i(x, y)) <= squaredRadius) { // Check collision with other buildings. boolean collide = false; IntRect candidateHitbox = new IntRect(x, y, requiredBuilding.getHitbox().width, requiredBuilding.getHitbox().height); if(candidateHitbox.left < 0 || candidateHitbox.top < 0 || candidateHitbox.left + candidateHitbox.width >= TILEMAP_SIZE.x || candidateHitbox.top + candidateHitbox.height >= TILEMAP_SIZE.y) collide = true; for(Building b : this.buildings) { if(candidateHitbox.intersection(b.getHitbox()) != null) collide = true; } if(collide) { // This position is not suitable. continue; } // Check zone compatibility. boolean validZone = true; for(int rx = x ; rx < Math.min(x + requiredBuilding.getHitbox().width, TILEMAP_SIZE.x) ; rx++) { for(int ry = y ; ry < Math.min(y + requiredBuilding.getHitbox().height, TILEMAP_SIZE.y) ; ry++) { Zone zone = this.zoneMap.getZoneMap().get(ry).get(rx); // check if the zone is suitable for(ZoneClass zoneBuilding : requiredBuilding.getZoneClasses()) { if(!zone.getType().equals(zoneBuilding)) { validZone = false; }else { validZone = true; break; } } // if the building contain the free zone it's ok if(requiredBuilding.getZoneClasses().contains(ZoneClass.FREE)) { validZone = true; } // if isn't a valid zone break if(!validZone) break; } // if isn't a valid zone break if(!validZone) break; } if(!validZone) { // This zone is not suitable continue; } // Get the resources available for the building. ResourcesStack rstack = resourcesMap.getResources(x, y); for(int rx = x ; rx < Math.min(x + requiredBuilding.getHitbox().width, TILEMAP_SIZE.x) ; rx++) { for(int ry = y ; ry < Math.min(y + requiredBuilding.getHitbox().height, TILEMAP_SIZE.y) ; ry++) { rstack.add(resourcesMap.getResources(rx, ry)); } } // Check if they satisfy the needs. boolean allNeedsSatisfied = true; for(Need n : requiredBuilding.getNeeds()) { float minAmount = n.amount * n.fillFactor; // If one need is not satisfied to its minimum, we quit. if(rstack.get(n.type) < minAmount) { allNeedsSatisfied = false; int count = missingResources.get(n.type).intValue(); count += 1; missingResources.put(n.type, count); } } // Check how many buildings (which required the building construction) are in range of the required building. int inRange = 0; for(Map.Entry<Integer, Building.BuildingType> buildingRequiredEntry : buildingsRequired.entrySet()) { if(buildingRequiredEntry.getValue() == requiredBuilding.getType()) { Building building = null; // Get the building. for(Building b : this.buildings) { if(b.getId() == buildingRequiredEntry.getKey()) { building = b; break; } } // Check if in range. if(building != null) { Vector2i buildingCenter = new Vector2i(building.getHitbox().left + building.getHitbox().width / 2, building.getHitbox().top + building.getHitbox().height / 2); int distance = (int)Distance.euclidean(buildingCenter, new Vector2i(x, y)); if(distance < requiredBuilding.getRange()) { inRange++; } } } } // Add to the candidates positions if all resources are available. if(allNeedsSatisfied) candidatesPositions.put(new Vector2i(x, y), inRange); else candidatesPositionsLackingResources.put(new Vector2i(x, y), inRange); } } } // Check the position which reach the most buildings AND is the closer to the center of the search area. Map.Entry<Vector2i, Integer> bestPosition = null; double mindistance = Double.MAX_VALUE; for(Map.Entry<Vector2i, Integer> entry : candidatesPositions.entrySet()) { if((bestPosition == null) || (entry.getValue() >= bestPosition.getValue() && mindistance > Distance.euclidean(entry.getKey(), centerOfSearchArea))) { bestPosition = entry; mindistance = Distance.euclidean(bestPosition.getKey(), centerOfSearchArea); } } // Add the building to the position. if(bestPosition != null) { this.buildings.add(new Building(maxEntry.getKey(), bestPosition.getKey())); // We spawned the building, so get it out of the stack. this.buildingStackRequired.pop(); System.out.println("Spawning : " + maxEntry.getKey().toString() + " @ " + bestPosition.getKey().x + ", " + bestPosition.getKey().y); System.out.println("\tdistance to CoSA: " + Distance.euclidean(bestPosition.getKey(), centerOfSearchArea)); System.out.println("\tefficiency: " + bestPosition.getValue() + "/" + maxEntry.getValue()); } else { // Get the most rare resource. Map.Entry<Resource.ResourceType, Integer> mostRareResourceEntry = null; for(Map.Entry<Resource.ResourceType, Integer> entry : missingResources.entrySet()) { if(mostRareResourceEntry == null || entry.getValue() > mostRareResourceEntry.getValue()) { mostRareResourceEntry = entry; } } Resource.ResourceType rareResource = mostRareResourceEntry.getKey(); // Every building asking for the current building type should ask for its pre-requisite. Map<Integer, Building.BuildingType> prerequisiteBuildingMap = new HashMap<Integer, Building.BuildingType>(); for(Map.Entry<Integer, Building.BuildingType> entry : buildingsRequired.entrySet()) { if(requiredBuilding.getType() == entry.getValue()) prerequisiteBuildingMap.put(entry.getKey(), Building.getBuildingTypeGenerating(rareResource)); } this.buildingStackRequired.push(prerequisiteBuildingMap); System.out.println("No suitable position found for : " + maxEntry.getKey().toString()); System.out.println("Most rare resource : " + rareResource.toString()); System.out.println("Asking to spawn : " + Building.getBuildingTypeGenerating(rareResource).toString()); } } } /** * Spawn road with the zone map */ public void spawnRoad() { boolean collisionFlag = false; // run the map for(int y = 0 ; y < this.zoneMap.getSize().y ; y++) { for(int x = 0 ; x < this.zoneMap.getSize().x ; x++) { // reset the collision flag collisionFlag = false; // check if a zoneType is road if(this.zoneMap.getZoneMap().get(y).get(x).getType().equals(ZoneClass.ROAD)) { // check if not building in this zone for(int i = 0 ; i < this.buildings.size() ; i++) { // if building stop if(this.buildings.get(i).getHitbox().contains(x, y)) { collisionFlag = true; break; } } // we spawn the road if(!collisionFlag) { this.buildings.add(new Building(BuildingType.ROAD, new Vector2i(x,y))); System.out.println("pop !"); } } } } this.zoneDrawingGui.setNewRoadAdded(false); } /** * Updates all the simulation. * @param dt : frame of time to use */ public void update(Time dt) { // Update the simulation timer. this.simulationSpeedTimer = Time.add(this.simulationSpeedTimer, Time.mul(dt, this.gameSpeedGui.getSpeedCoeff())); // Spawn road if(!this.gameSpeedGui.isInPause() && this.simulationSpeedTimer.asSeconds() >= 1.f) { if(this.zoneDrawingGui.newRoadAdded()) { spawnRoad(); } // Reset the resources. this.resourcesMap.reset(); // Generate resources. for(Building b : this.buildings) { b.generateResources(this.resourcesMap); } // Clone the resourceMap this.cachedResourceMap = this.resourcesMap.cloneResourcesMap(); } // We update tile infos after generate. if(this.displayTileInfo) this.tileInfoGui.update(this.cachedResourceMap, this.tileSelector, this.buildings); if(!this.gameSpeedGui.isInPause() && this.simulationSpeedTimer.asSeconds() >= 1.f) { // Consume resources and get required buildings. Map<Integer, Building.BuildingType> buildingsRequired = new HashMap<Integer, Building.BuildingType>(); for(Building b : this.buildings) { BuildingType requiredBuilding = b.consumeResources(this.resourcesMap); // Don't do anything if none required. if(requiredBuilding != BuildingType.NONE) { buildingsRequired.put(b.getId(), requiredBuilding); } } if(this.buildingStackRequired.isEmpty()) { this.buildingStackRequired.push(buildingsRequired); } // Spawn buildings. spawnBuildings(); // Project buildings on the tilemap. BuildingProjector.project(this.buildings, this.tilemap); // Update CityStats this.cityStats.update(this.buildings); } // Do the time substraction here. if(!this.gameSpeedGui.isInPause() && this.simulationSpeedTimer.asSeconds() >= 1.f) { this.simulationSpeedTimer = Time.sub(this.simulationSpeedTimer, Time.getSeconds(1.f)); } // Update the tilemap. this.tilemap.update(); if(this.checkbox1.isChecked()) { this.zoneDrawingGui.update(dt, this.window, this.zoneMap, this.tileSelector); this.zoneMapLayer.update(); } //Update stats this.gameSpeedGui.update(dt); this.statsGui.setMoney(this.cityStats.getMoney()); this.statsGui.setPopulation(this.cityStats.getPopulation()); this.tileSelector.update(); } /** * Renders all the simulation. */ public void render() { this.window.clear(Color.WHITE); ///////////// this.window.draw(this.tilemap); if(this.checkbox1.isChecked()) { this.window.draw(this.zoneMapLayer); this.window.draw(this.zoneDrawingGui); } this.window.draw(this.tileSelector); this.window.draw(this.statsGui); this.window.draw(checkbox1); this.window.draw(gameSpeedGui); if(this.displayTileInfo) this.window.draw(tileInfoGui); ///////////// this.window.display(); } /** * Returns the window. * @return the window used by the simulation */ public RenderWindow getWindow() { return this.window; } /** * Handles the event. * @param event : the JSFML event to handle */ public void handleEvent(Event event) { if(event.type == Event.Type.MOUSE_BUTTON_RELEASED && event.asMouseButtonEvent().button == Mouse.Button.MIDDLE) { this.displayTileInfo = !this.displayTileInfo; } this.checkbox1.handleEvent(event); if(this.checkbox1.isChecked()) this.zoneDrawingGui.handleEvent(event); this.gameSpeedGui.handleEvent(event); } }
Création de la méthode 'countBuildingsPerType'.
CityBuilderJSFML/src/sim/Sim.java
Création de la méthode 'countBuildingsPerType'.
Java
mit
55e66f0c3fd32116f8a60109179022d76a113aad
0
flesire/ontrack,nemerosa/ontrack,flesire/ontrack,flesire/ontrack,nemerosa/ontrack,flesire/ontrack,nemerosa/ontrack,flesire/ontrack,nemerosa/ontrack,nemerosa/ontrack
package net.nemerosa.ontrack.extension.svn.client; import net.nemerosa.ontrack.extension.svn.db.SVNRepository; import net.nemerosa.ontrack.extension.svn.support.SVNLogEntryCollector; import net.nemerosa.ontrack.tx.Transaction; import net.nemerosa.ontrack.tx.TransactionService; import org.apache.commons.lang3.StringUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import org.tmatesoft.svn.core.*; import org.tmatesoft.svn.core.auth.BasicAuthenticationManager; import org.tmatesoft.svn.core.internal.io.dav.DAVRepositoryFactory; import org.tmatesoft.svn.core.internal.io.svn.SVNRepositoryFactoryImpl; import org.tmatesoft.svn.core.wc.*; import java.util.*; import java.util.regex.Pattern; @Component public class SVNClientImpl implements SVNClient { private final TransactionService transactionService; @Autowired public SVNClientImpl(TransactionService transactionService) { this.transactionService = transactionService; // Repository factories SVNRepositoryFactoryImpl.setup(); DAVRepositoryFactory.setup(); } @Override public boolean exists(SVNRepository repository, SVNURL url, SVNRevision revision) { // Tries to gets information try { SVNInfo info = getWCClient(repository).doInfo(url, revision, revision); return info != null; } catch (SVNException ex) { return false; } } @Override public long getRepositoryRevision(SVNRepository repository, SVNURL url) { try { SVNInfo info = getWCClient(repository).doInfo(url, SVNRevision.HEAD, SVNRevision.HEAD); return info.getCommittedRevision().getNumber(); } catch (SVNException e) { throw translateSVNException(e); } } @Override public void log(SVNRepository repository, SVNURL url, SVNRevision pegRevision, SVNRevision startRevision, SVNRevision stopRevision, boolean stopOnCopy, boolean discoverChangedPaths, long limit, boolean includeMergedRevisions, ISVNLogEntryHandler isvnLogEntryHandler) { try { getLogClient(repository).doLog(url, null, pegRevision, startRevision, stopRevision, stopOnCopy, discoverChangedPaths, includeMergedRevisions, limit, null, isvnLogEntryHandler); } catch (SVNException e) { throw translateSVNException(e); } } @Override public boolean isTrunkOrBranch(SVNRepository repository, String path) { return isTrunk(path) || isBranch(repository, path); } @Override public List<Long> getMergedRevisions(SVNRepository repository, SVNURL url, long revision) { // Checks that the URL exists at both R-1 and R SVNRevision rm1 = SVNRevision.create(revision - 1); SVNRevision r = SVNRevision.create(revision); boolean existRM1 = exists(repository, url, rm1); boolean existR = exists(repository, url, r); try { // Both revisions must be valid in order to get some merges in between if (existRM1 && existR) { // Gets the changes in merge information SVNDiffClient diffClient = getDiffClient(repository); @SuppressWarnings("unchecked") Map<SVNURL, SVNMergeRangeList> before = diffClient.doGetMergedMergeInfo(url, rm1); @SuppressWarnings("unchecked") Map<SVNURL, SVNMergeRangeList> after = diffClient.doGetMergedMergeInfo(url, r); // Gets the difference between the two merge informations Map<SVNURL, SVNMergeRangeList> change; if (after != null && before != null) { change = new HashMap<>(); for (Map.Entry<SVNURL, SVNMergeRangeList> entry : after.entrySet()) { SVNURL source = entry.getKey(); SVNMergeRangeList afterMergeRangeList = entry.getValue(); SVNMergeRangeList beforeMergeRangeList = before.get(source); if (beforeMergeRangeList != null) { SVNMergeRangeList changeRangeList = afterMergeRangeList.diff(beforeMergeRangeList, false); if (!changeRangeList.isEmpty()) { change.put(source, changeRangeList); } } else { change.put(source, afterMergeRangeList); } } } else { change = after; } if (change == null || change.isEmpty()) { return Collections.emptyList(); } else { SVNLogEntryCollector collector = new SVNLogEntryCollector(); for (Map.Entry<SVNURL, SVNMergeRangeList> entry : change.entrySet()) { SVNURL source = entry.getKey(); SVNMergeRangeList mergeRangeList = entry.getValue(); SVNMergeRange[] mergeRanges = mergeRangeList.getRanges(); for (SVNMergeRange mergeRange : mergeRanges) { SVNRevision endRevision = SVNRevision.create(mergeRange.getEndRevision()); SVNRevision startRevision = SVNRevision.create(mergeRange.getStartRevision()); log(repository, source, endRevision, startRevision, endRevision, true, false, 0, false, collector); } } List<Long> revisions = new ArrayList<>(); for (SVNLogEntry entry : collector.getEntries()) { revisions.add(entry.getRevision()); } return revisions; } } else { // One of the revisions (R-1 or R) is missing return Collections.emptyList(); } } catch (SVNException ex) { throw translateSVNException(ex); } } @Override public boolean isTagOrBranch(SVNRepository repository, String path) { return isTag(repository, path) || isBranch(repository, path); } @Override public boolean isTag(SVNRepository repository, String path) { return isPathOK(repository.getTagPattern(), path); } private boolean isBranch(SVNRepository repository, String path) { return isPathOK(repository.getBranchPattern(), path); } private boolean isPathOK(String pattern, String path) { return org.apache.commons.lang.StringUtils.isNotBlank(pattern) && Pattern.matches(pattern, path); } private boolean isTrunk(String path) { return isPathOK(".+/trunk", path); } private SVNClientException translateSVNException(SVNException e) { return new SVNClientException(e); } protected SVNWCClient getWCClient(SVNRepository repository) { return getClientManager(repository).getWCClient(); } protected SVNLogClient getLogClient(SVNRepository repository) { return getClientManager(repository).getLogClient(); } protected SVNDiffClient getDiffClient(SVNRepository repository) { return getClientManager(repository).getDiffClient(); } protected SVNClientManager getClientManager(final SVNRepository repository) { // Gets the current transaction Transaction transaction = transactionService.get(); if (transaction == null) { throw new IllegalStateException("All SVN calls must be part of a SVN transaction"); } // Gets the client manager return transaction .getResource( SVNSession.class, repository.getId(), () -> { // Creates the client manager for SVN SVNClientManager clientManager = SVNClientManager.newInstance(); // Authentication (if needed) String svnUser = repository.getConfiguration().getUser(); String svnPassword = repository.getConfiguration().getPassword(); if (StringUtils.isNotBlank(svnUser) && StringUtils.isNotBlank(svnPassword)) { clientManager.setAuthenticationManager(new BasicAuthenticationManager(svnUser, svnPassword)); } // OK return new SVNSessionImpl(clientManager); } ) .getClientManager(); } }
ontrack-extension-svn/src/main/java/net/nemerosa/ontrack/extension/svn/client/SVNClientImpl.java
package net.nemerosa.ontrack.extension.svn.client; import net.nemerosa.ontrack.extension.svn.db.SVNRepository; import net.nemerosa.ontrack.extension.svn.support.SVNLogEntryCollector; import net.nemerosa.ontrack.tx.Transaction; import net.nemerosa.ontrack.tx.TransactionService; import org.apache.commons.lang3.StringUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import org.tmatesoft.svn.core.*; import org.tmatesoft.svn.core.auth.BasicAuthenticationManager; import org.tmatesoft.svn.core.wc.*; import java.util.*; import java.util.regex.Pattern; @Component public class SVNClientImpl implements SVNClient { private final TransactionService transactionService; @Autowired public SVNClientImpl(TransactionService transactionService) { this.transactionService = transactionService; } @Override public boolean exists(SVNRepository repository, SVNURL url, SVNRevision revision) { // Tries to gets information try { SVNInfo info = getWCClient(repository).doInfo(url, revision, revision); return info != null; } catch (SVNException ex) { return false; } } @Override public long getRepositoryRevision(SVNRepository repository, SVNURL url) { try { SVNInfo info = getWCClient(repository).doInfo(url, SVNRevision.HEAD, SVNRevision.HEAD); return info.getCommittedRevision().getNumber(); } catch (SVNException e) { throw translateSVNException(e); } } @Override public void log(SVNRepository repository, SVNURL url, SVNRevision pegRevision, SVNRevision startRevision, SVNRevision stopRevision, boolean stopOnCopy, boolean discoverChangedPaths, long limit, boolean includeMergedRevisions, ISVNLogEntryHandler isvnLogEntryHandler) { try { getLogClient(repository).doLog(url, null, pegRevision, startRevision, stopRevision, stopOnCopy, discoverChangedPaths, includeMergedRevisions, limit, null, isvnLogEntryHandler); } catch (SVNException e) { throw translateSVNException(e); } } @Override public boolean isTrunkOrBranch(SVNRepository repository, String path) { return isTrunk(path) || isBranch(repository, path); } @Override public List<Long> getMergedRevisions(SVNRepository repository, SVNURL url, long revision) { // Checks that the URL exists at both R-1 and R SVNRevision rm1 = SVNRevision.create(revision - 1); SVNRevision r = SVNRevision.create(revision); boolean existRM1 = exists(repository, url, rm1); boolean existR = exists(repository, url, r); try { // Both revisions must be valid in order to get some merges in between if (existRM1 && existR) { // Gets the changes in merge information SVNDiffClient diffClient = getDiffClient(repository); @SuppressWarnings("unchecked") Map<SVNURL, SVNMergeRangeList> before = diffClient.doGetMergedMergeInfo(url, rm1); @SuppressWarnings("unchecked") Map<SVNURL, SVNMergeRangeList> after = diffClient.doGetMergedMergeInfo(url, r); // Gets the difference between the two merge informations Map<SVNURL, SVNMergeRangeList> change; if (after != null && before != null) { change = new HashMap<>(); for (Map.Entry<SVNURL, SVNMergeRangeList> entry : after.entrySet()) { SVNURL source = entry.getKey(); SVNMergeRangeList afterMergeRangeList = entry.getValue(); SVNMergeRangeList beforeMergeRangeList = before.get(source); if (beforeMergeRangeList != null) { SVNMergeRangeList changeRangeList = afterMergeRangeList.diff(beforeMergeRangeList, false); if (!changeRangeList.isEmpty()) { change.put(source, changeRangeList); } } else { change.put(source, afterMergeRangeList); } } } else { change = after; } if (change == null || change.isEmpty()) { return Collections.emptyList(); } else { SVNLogEntryCollector collector = new SVNLogEntryCollector(); for (Map.Entry<SVNURL, SVNMergeRangeList> entry : change.entrySet()) { SVNURL source = entry.getKey(); SVNMergeRangeList mergeRangeList = entry.getValue(); SVNMergeRange[] mergeRanges = mergeRangeList.getRanges(); for (SVNMergeRange mergeRange : mergeRanges) { SVNRevision endRevision = SVNRevision.create(mergeRange.getEndRevision()); SVNRevision startRevision = SVNRevision.create(mergeRange.getStartRevision()); log(repository, source, endRevision, startRevision, endRevision, true, false, 0, false, collector); } } List<Long> revisions = new ArrayList<>(); for (SVNLogEntry entry : collector.getEntries()) { revisions.add(entry.getRevision()); } return revisions; } } else { // One of the revisions (R-1 or R) is missing return Collections.emptyList(); } } catch (SVNException ex) { throw translateSVNException(ex); } } @Override public boolean isTagOrBranch(SVNRepository repository, String path) { return isTag(repository, path) || isBranch(repository, path); } @Override public boolean isTag(SVNRepository repository, String path) { return isPathOK(repository.getTagPattern(), path); } private boolean isBranch(SVNRepository repository, String path) { return isPathOK(repository.getBranchPattern(), path); } private boolean isPathOK(String pattern, String path) { return org.apache.commons.lang.StringUtils.isNotBlank(pattern) && Pattern.matches(pattern, path); } private boolean isTrunk(String path) { return isPathOK(".+/trunk", path); } private SVNClientException translateSVNException(SVNException e) { return new SVNClientException(e); } protected SVNWCClient getWCClient(SVNRepository repository) { return getClientManager(repository).getWCClient(); } protected SVNLogClient getLogClient(SVNRepository repository) { return getClientManager(repository).getLogClient(); } protected SVNDiffClient getDiffClient(SVNRepository repository) { return getClientManager(repository).getDiffClient(); } protected SVNClientManager getClientManager(final SVNRepository repository) { // Gets the current transaction Transaction transaction = transactionService.get(); if (transaction == null) { throw new IllegalStateException("All SVN calls must be part of a SVN transaction"); } // Gets the client manager return transaction .getResource( SVNSession.class, repository.getId(), () -> { // Creates the client manager for SVN SVNClientManager clientManager = SVNClientManager.newInstance(); // Authentication (if needed) String svnUser = repository.getConfiguration().getUser(); String svnPassword = repository.getConfiguration().getPassword(); if (StringUtils.isNotBlank(svnUser) && StringUtils.isNotBlank(svnPassword)) { clientManager.setAuthenticationManager(new BasicAuthenticationManager(svnUser, svnPassword)); } // OK return new SVNSessionImpl(clientManager); } ) .getClientManager(); } }
SVN: initialisation of SVN repository factories
ontrack-extension-svn/src/main/java/net/nemerosa/ontrack/extension/svn/client/SVNClientImpl.java
SVN: initialisation of SVN repository factories
Java
mit
2ae58cfc073bcf8a7fae04cc2438d47b566a11ad
0
StevenUpForever/LeetCode_Java,StevenUpForever/LeetCode_Java
package data_structure; import java.util.HashMap; public class LRUCache { //TAG: Google //TAG: Facebook //TAG: Microsoft //TAG: Amazon //TAG: Uber //TAG: Data structure //Difficulty: Hard /** 146. LRU Cache Design and implement a data structure for Least Recently Used (LRU) cache. It should support the following operations: get and put. get(key) - Get the value (will always be positive) of the key if the key exists in the cache, otherwise return -1. put(key, value) - Set or insert the value if the key is not already present. When the cache reached its capacity, it should invalidate the least recently used item before inserting a new item. Follow up: Could you do both operations in O(1) time complexity? Example: LRUCache cache = new LRUCache( 2 capacity ); cache.put(1, 1); cache.put(2, 2); cache.get(1); // returns 1 cache.put(3, 3); // evicts key 2 cache.get(2); // returns -1 (not found) cache.put(4, 4); // evicts key 1 cache.get(1); // returns -1 (not found) cache.get(3); // returns 3 cache.get(4); // returns 4 */ /* * Solution: * Use Doubly linkedList, used when delete LRU and insert * Use HashMap for get O(1) */ private HashMap<Integer, DoublyListNode> map; private int capacity; private DoublyListNode head; private DoublyListNode tail; public LRUCache(int capacity) { map = new HashMap<>(); this.capacity = capacity; head = new DoublyListNode(0, 0); tail = new DoublyListNode(0, 0); head.pre = null; head.next = tail; tail.pre = head; tail.next = null; } public int get(int key) { if (!map.containsKey(key)) return -1; DoublyListNode node = map.get(key); //Get key point: move this node to 1st in list, this is the most recent used one //First remove current node and link pre with next removeNode(node); //Second insert it at 1st place insertNodeAtFirst(node); return node.val; } public void put(int key, int value) { /* Put key point 1: put this node into 1st place 1. if this node existed 1. replace the value 2. delete and move to first place 2. if not existed, alloc a new node and move to first place decrease capacity if no capacity (-1) delete tail node */ if (map.containsKey(key)) { DoublyListNode node = map.get(key); node.val = value; removeNode(node); insertNodeAtFirst(node); } else { DoublyListNode newNode = new DoublyListNode(key, value); map.put(key, newNode); insertNodeAtFirst(newNode); capacity--; //Put key point 2: check if over capacity, if so delete most least used node, which at the end of the list if (capacity < 0) { //Remove node key from map as well map.remove(tail.pre.key); removeNode(tail.pre); capacity++; } } } private void removeNode(DoublyListNode node) { node.pre.next = node.next; node.next.pre = node.pre; } private void insertNodeAtFirst(DoublyListNode node) { DoublyListNode sec = head.next; head.next = node; node.pre = head; node.next = sec; sec.pre = node; } class DoublyListNode { public int key; public int val; public DoublyListNode next; public DoublyListNode pre; public DoublyListNode(int key, int val) { this.key = key; this.val = val; } } }
src/data_structure/LRUCache.java
package data_structure; import java.util.HashMap; public class LRUCache { //TAG: Google //TAG: Uber //TAG: Data structure //Difficulty: Hard /** 146. LRU Cache Design and implement a data structure for Least Recently Used (LRU) cache. It should support the following operations: get and put. get(key) - Get the value (will always be positive) of the key if the key exists in the cache, otherwise return -1. put(key, value) - Set or insert the value if the key is not already present. When the cache reached its capacity, it should invalidate the least recently used item before inserting a new item. Follow up: Could you do both operations in O(1) time complexity? Example: LRUCache cache = new LRUCache( 2 capacity ); cache.put(1, 1); cache.put(2, 2); cache.get(1); // returns 1 cache.put(3, 3); // evicts key 2 cache.get(2); // returns -1 (not found) cache.put(4, 4); // evicts key 1 cache.get(1); // returns -1 (not found) cache.get(3); // returns 3 cache.get(4); // returns 4 */ /** * Solution: * Use Doubly linkedList, used when delete LRU and insert * Use HashMap for get O(1) */ private HashMap<Integer, DoublyListNode> map; private int capacity; private DoublyListNode head; private DoublyListNode tail; public LRUCache(int capacity) { map = new HashMap<>(); this.capacity = capacity; head = new DoublyListNode(0, 0); tail = new DoublyListNode(0, 0); head.pre = null; head.next = tail; tail.pre = head; tail.next = null; } public int get(int key) { if (!map.containsKey(key)) return -1; DoublyListNode node = map.get(key); //Get key point: move this node to 1st in list, this is the most recent used one //First remove current node and link pre with next removeNode(node); //Second insert it at 1st place insertNodeAtFirst(node); return node.val; } public void put(int key, int value) { /* Put key point 1: put this node into 1st place 1. if this node existed 1. replace the value 2. delete and move to first place 2. if not existed, alloc a new node and move to first place */ if (map.containsKey(key)) { DoublyListNode node = map.get(key); node.val = value; removeNode(node); insertNodeAtFirst(node); } else { DoublyListNode newNode = new DoublyListNode(key, value); map.put(key, newNode); insertNodeAtFirst(newNode); capacity--; //Put key point 2: check if over capacity, if so delete most least used node, which at the end of the list if (capacity < 0) { //Remove node key from map as well map.remove(tail.pre.key); removeNode(tail.pre); capacity++; } } } private void removeNode(DoublyListNode node) { node.pre.next = node.next; node.next.pre = node.pre; } private void insertNodeAtFirst(DoublyListNode node) { DoublyListNode sec = head.next; head.next = node; node.pre = head; node.next = sec; sec.pre = node; } class DoublyListNode { public int key; public int val; public DoublyListNode next; public DoublyListNode pre; public DoublyListNode(int key, int val) { this.key = key; this.val = val; } } }
146. LRU Cache
src/data_structure/LRUCache.java
146. LRU Cache
Java
mit
4c920717118c1eb98566e06acbe18b07546a5ac2
0
BlockScore/blockscore-java
package com.blockscore.models; import com.blockscore.models.base.BasicResponse; import com.fasterxml.jackson.annotation.JsonProperty; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Date; import java.util.GregorianCalendar; /** * Candidate model. */ public class Candidate extends BasicResponse { @NotNull @JsonProperty("name_first") private String firstName; @Nullable @JsonProperty("name_middle") private String middleName; @NotNull @JsonProperty("name_last") private String lastName; @Nullable @JsonProperty("note") private String note; @Nullable @JsonProperty("ssn") private String socialSecurityNumber; @Nullable @JsonProperty("passport") private String passport; @Nullable @JsonProperty("date_of_birth") private Date dateOfBirth; @NotNull @JsonProperty("address_street1") private String addressStreet1; @Nullable @JsonProperty("address_street2") private String addressStreet2; @NotNull @JsonProperty("address_city") private String addressCity; @NotNull @JsonProperty("address_subdivision") private String addressSubdivision; @NotNull @JsonProperty("address_postal_code") private String addressPostalCode; @NotNull @JsonProperty("address_country_code") private String addressCountryCode; /** * Sets the legal first name of the customer. * @param firstName First name. * @return this. */ @NotNull public Candidate setFirstName(@NotNull final String firstName) { this.firstName = firstName; return this; } /** * Sets the legal middle name of the customer. * @param middleName Middle name. * @return this. */ @NotNull public Candidate setMiddleName(@NotNull final String middleName) { this.middleName = middleName; return this; } /** * Sets the legal last name. * @param lastName Last name. * @return this. */ @NotNull public Candidate setLastName(@NotNull final String lastName) { this.lastName = lastName; return this; } /** * You can store additional information about the candidate here such as your internal system's * identifier for this individual. This will allow you to keep track of them. * @param note Note to store. * @return this. */ @NotNull public Candidate setNote(@Nullable final String note) { this.note = note; return this; } /** * Can be either the last 4 digits of the US Social Security Number or the whole SSN. * @param ssn SSN to use. * @return this. */ @NotNull public Candidate setSSN(@Nullable final String ssn) { this.socialSecurityNumber = ssn; return this; } /** * Sets the passport number of the individual being verified. is only used for verifying non-US customers. * @param passport Passport data. * @return this. */ @NotNull public Candidate setPassport(@Nullable final String passport) { this.passport = passport; return this; } /** * Set the date of birth of your candidate. * @param dateOfBirth Date of birth * @return this. */ @NotNull public Candidate setDateOfBirth(@Nullable final Date dateOfBirth) { if (dateOfBirth == null) { return this; } this.dateOfBirth = new Date(dateOfBirth.getTime()); return this; } /** * Sets the primary street address for this person. * @param address Primary street address. */ public Candidate setAddress(@NotNull final Address address) { this.addressStreet1 = address.getStreet1(); this.addressStreet2 = address.getStreet2(); this.addressCity = address.getCity(); this.addressSubdivision = address.getSubdivision(); this.addressPostalCode = address.getPostalCode(); this.addressCountryCode = address.getCountryCode(); return this; } /** * The legal first name of the customer. * @return First name. */ @Nullable public String getFirstName() { return firstName; } /** * The legal middle name of the customer. * @return Middle name. */ @Nullable public String getMiddleName() { return middleName; } /** * Gets the legal last name of the customer. * @return Last name. */ @Nullable public String getLastName() { return lastName; } /** * You can store additional information about the candidate here such as your internal system's * identifier for this individual. This will allow you to keep track of them. * @return Note stored. */ @Nullable public String getNote() { return note; } /** * Gets either the 4 digits of the US Social Security Number or the whole SSN. * @return SSN */ @Nullable public String getSSN() { return socialSecurityNumber; } /** * The passport number of the individual being verified. is only used for verifying non-US customers. * @return Passport number. */ @Nullable public String getPassport() { return passport; } /** * The date of birth of your candidate. * @return Date of birth. */ @Nullable public Date getDateOfBirth() { if (dateOfBirth == null) { return null; } return new Date(dateOfBirth.getTime()); } /** * Gets the primary street address for this person. * @return Address. */ @Nullable public Address getAddress() { Address addressObject = new Address(addressStreet1, addressStreet2, addressCity, addressSubdivision, addressPostalCode, addressCountryCode); return addressObject; } }
src/main/java/com/blockscore/models/Candidate.java
package com.blockscore.models; import com.blockscore.models.base.BasicResponse; import com.fasterxml.jackson.annotation.JsonProperty; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Date; /** * Watch list candidate model. * Created by Tony Dieppa on 9/30/14. */ public class Candidate extends BasicResponse { @Nullable @JsonProperty("note") private String mNote; @Nullable @JsonProperty("ssn") private String mSSN; @Nullable @JsonProperty("passport") private String mPassport; @Nullable @JsonProperty("date_of_birth") private Date mDateOfBirth; @NotNull @JsonProperty("first_name") private String mFirstName; @Nullable @JsonProperty("middle_name") private String mMiddleName; @NotNull @JsonProperty("last_name") private String mLastName; @Nullable @JsonProperty("address_street1") private String mStreet1; @Nullable @JsonProperty("address_street2") private String mStreet2; @Nullable @JsonProperty("address_city") private String mCity; @Nullable @JsonProperty("address_state") private String mState; @Nullable @JsonProperty("address_postal_code") private String mPostalCode; @Nullable @JsonProperty("address_country_code") private String mCountryCode; /** * An additional field which can be used for arbitrary storage. is typically used for * storing your internal identifiers for customer. * @return Note stored. */ @Nullable public String getNote() { return mNote; } /** * Sets an additional field which can be used for arbitrary storage. is typically used for * storing your internal identifiers for customer. * @param note Note to store. * @return this. */ @NotNull public Candidate setNote(@Nullable final String note) { mNote = note; return this; } /** * Gets either the 4 digits of the US Social Security Number or the whole SSN. * @return SSN */ @Nullable public String getSSN() { return mSSN; } /** * Can be either the last 4 digits of the US Social Security Number or the whole SSN. * @param ssn SSN to use. * @return this. */ @NotNull public Candidate setSSN(@Nullable final String ssn) { mSSN = ssn; return this; } /** * The passport number of the individual being verified. is only used for verifying non-US customers. * @return Passport number. */ @Nullable public String getPassport() { return mPassport; } /** * Sets the passport number of the individual being verified. is only used for verifying non-US customers. * @param passport Passport data. * @return this. */ @NotNull public Candidate setPassport(@Nullable final String passport) { mPassport = passport; return this; } /** * The date of birth of your candidate. * @return Date of birth. */ @Nullable public Date getDateOfBirth() { if (mDateOfBirth == null) { return null; } return new Date(mDateOfBirth.getTime()); } /** * Set the date of birth of your candidate. * @param dateOfBirth Date of birth * @return this. */ @NotNull public Candidate setDateOfBirth(@Nullable final Date dateOfBirth) { if (dateOfBirth == null) { return this; } mDateOfBirth = new Date(dateOfBirth.getTime()); return this; } /** * The legal first name of the customer. * @return First name. */ @NotNull public String getFirstName() { return mFirstName; } /** * Sets the legal first name of the customer. * @param firstName First name. * @return this. */ @NotNull public Candidate setFirstName(@NotNull final String firstName) { mFirstName = firstName; return this; } /** * The legal middle name of the customer. * @return Middle name. */ @Nullable public String getMiddleName() { return mMiddleName; } /** * Sets the legal middle name of the customer. * @param middleName Middle name. * @return this. */ @NotNull public Candidate setMiddleName(@NotNull final String middleName) { mMiddleName = middleName; return this; } /** * Gets the legal last name of the customer. * @return Last name. */ @NotNull public String getLastName() { return mLastName; } /** * Sets the legal last name. * @param lastName Last name. * @return this. */ @NotNull public Candidate setLastName(@NotNull final String lastName) { mLastName = lastName; return this; } /** * The primary street address of the customer. This is automatically normalized. * @return Street address. */ @Nullable public String getStreet1() { return mStreet1; } /** * Sets the primary street address of the customer. This is automatically normalized. * @param street1 Street address. * @return this. */ @NotNull public Candidate setStreet1(@NotNull final String street1) { mStreet1 = street1; return this; } /** * The second address line typically used for apartment or suite numbers. This is automatically normalized. * @return Street address (line 2) */ @Nullable public String getStreet2() { return mStreet2; } /** * Sets the second address line typically used for apartment or suite numbers. This is automatically normalized. * @param street2 Street address (line 2) * @return this. */ @NotNull public Candidate setStreet2(@NotNull final String street2) { mStreet2 = street2; return this; } /** * The city name of the customer. This is automatically normalized. * @return City name. */ @Nullable public String getCity() { return mCity; } /** * Sets the city name of the customer. This is automatically normalized. * @param city City name. * @return this. */ @NotNull public Candidate setCity(@NotNull final String city) { mCity = city; return this; } /** * Gets the state of the customer. Should be of the FIPS code form. For example California would be CA. * @return State */ @Nullable public String getState() { return mState; } /** * Sets the state of the customer. Should be of the FIPS code form. For example California would be CA. * @param state State * @return this. */ @NotNull public Candidate setState(@Nullable final String state) { mState = state; return this; } /** * Gets the postal code. * @return Postal code. */ @Nullable public String getPostalCode() { return mPostalCode; } /** * Sets the postal code. * @param postalCode Postal code. * @return this. */ @NotNull public Candidate setPostalCode(@Nullable final String postalCode) { mPostalCode = postalCode; return this; } /** * The country of the customer. Should be of the ISO code form. * @return Country code. */ @Nullable public String getCountryCode() { return mCountryCode; } /** * Sets the country of the customer. Should be of the ISO code form. * @param countryCode Country code. * @return this. */ @NotNull public Candidate setCountryCode(@Nullable final String countryCode) { mCountryCode = countryCode; return this; } }
Updating Candidate to v4.0 * Also removes androidisms
src/main/java/com/blockscore/models/Candidate.java
Updating Candidate to v4.0 * Also removes androidisms
Java
mit
2307070e26469e15afee56c41bd3e699d3632087
0
SpongePowered/Sponge,SpongePowered/Sponge,SpongePowered/Sponge,SpongePowered/SpongeCommon,SpongePowered/SpongeCommon
/* * This file is part of Sponge, licensed under the MIT License (MIT). * * Copyright (c) SpongePowered <https://www.spongepowered.org> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.spongepowered.common.event.tracking.context; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableListMultimap; import com.google.common.collect.LinkedListMultimap; import com.google.common.collect.ListMultimap; import com.google.common.collect.Lists; import com.google.common.collect.Multimap; import com.google.common.collect.Multimaps; import net.minecraft.block.Block; import net.minecraft.block.BlockEventData; import net.minecraft.block.state.IBlockState; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.math.BlockPos; import net.minecraft.world.WorldServer; import org.spongepowered.api.block.BlockSnapshot; import org.spongepowered.api.data.Transaction; import org.spongepowered.api.event.block.ChangeBlockEvent; import org.spongepowered.api.world.BlockChangeFlag; import org.spongepowered.api.world.BlockChangeFlags; import org.spongepowered.asm.util.PrettyPrinter; import org.spongepowered.common.block.SpongeBlockSnapshot; import org.spongepowered.common.block.SpongeBlockSnapshotBuilder; import org.spongepowered.common.bridge.tileentity.TileEntityBridge; import org.spongepowered.common.bridge.world.WorldServerBridge; import org.spongepowered.common.event.tracking.IPhaseState; import org.spongepowered.common.event.tracking.PhaseContext; import org.spongepowered.common.event.tracking.PhaseTracker; import org.spongepowered.common.event.tracking.TrackingUtil; import org.spongepowered.common.mixin.core.world.WorldServerAccessor; import org.spongepowered.common.mixin.core.world.chunk.ChunkMixin; import org.spongepowered.common.world.BlockChange; import org.spongepowered.common.world.SpongeBlockChangeFlag; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.function.BiConsumer; import javax.annotation.Nullable; @SuppressWarnings("rawtypes") public final class MultiBlockCaptureSupplier implements ICaptureSupplier { public static final boolean PRINT_TRANSACTIONS = Boolean.parseBoolean(System.getProperty("sponge.debugBlockTransactions", "false")); @Nullable private LinkedListMultimap<BlockPos, SpongeBlockSnapshot> multimap; @Nullable private ListMultimap<BlockPos, BlockEventData> scheduledEvents; @Nullable private List<SpongeBlockSnapshot> snapshots; @Nullable private LinkedHashMap<WorldServer, SpongeProxyBlockAccess.Proxy> processingWorlds; @Nullable private Set<BlockPos> usedBlocks; private int transactionIndex = -1; // These are used to keep track of which snapshot is being referred to as "most recent change" private int snapshotIndex = -1; // so that we can appropriately cancel or discard or apply specific event transactions // We made BlockTransaction a Node and this is a pseudo LinkedList due to the nature of needing // to be able to track what block states exist at the time of the transaction while other transactions // are processing (because future transactions performing logic based on what exists at that state, // will potentially get contaminated information based on the last transaction prior to transaction // processing). Example: When starting to perform neighbor notifications during piston movement, one // can feasibly see that the block state is changed already without being able to get the appropriate // block state. @Nullable private BlockTransaction tail; @Nullable private BlockTransaction head; public MultiBlockCaptureSupplier() { } /** * Captures the provided {@link BlockSnapshot} into a {@link Multimap} backed collection. * The premise is that each {@link BlockPos} normally has a single {@link BlockChange}, * with the exceptions of certain few cases where multiple changes can occur for the same * position. The larger issue is that while the multiple changes are tracked, the desired * flag of changes does not result in a valid {@link BlockChange}, and therefor an invalid * {@link ChangeBlockEvent} is generated, potentially leading to duplication bugs with * protection plugins. As a result, the consuming {@link BlockSnapshot} is placed into * a {@link ListMultimap} keyed by the {@link BlockPos}, and if there are multiple snapshots * per {@link BlockPos}, has multiple changes will be {@code true}, and this method * will return {@code true}. * * @param snapshot The snapshot being captured * @param newState The most current new IBlockState to calculate the BlockChange flag * @return True if the block position has previously not been modified or captured yet */ public boolean put(final BlockSnapshot snapshot, final IBlockState newState) { // Start by figuring out the backing snapshot. In all likelyhood, we could just cast, but we want to be safe final SpongeBlockSnapshot backingSnapshot = getBackingSnapshot(snapshot); // Get the key of the block position, we know this is a pure block pos and not a mutable one too. final BlockPos blockPos = backingSnapshot.getBlockPos(); if (this.usedBlocks == null) { // Means we have a first usage. All three fields are null // At this point, we know we have not captured anything and // can just populate the normal list. this.usedBlocks = new HashSet<>(); this.usedBlocks.add(blockPos); this.addSnapshot(backingSnapshot); return true; } // This isn't our first rodeo... final boolean added = this.usedBlocks.add(blockPos); // add it to the set of positions already used and use the boolean if (this.multimap != null) { // Means we've already got multiple changes per position once before. // Likewise, the used blocks, snapshots and multimap will NOT be null. // more fasts, we know we have multiple block positions. // And we can find out if this is the first time we if (added) { // If the position hasn't been captured yet, that means we need to add it as an original // snapshot being changed, for the list usage. this.addSnapshot(backingSnapshot); } // we don't have to this.multimap.put(blockPos, backingSnapshot); // If the position is duplicated, we need to update the original snapshot of the now incoming block change // in relation to the original state (so if a block was set to air, then afterwards set to piston head, it should go from break to modify) if (!added) { associateBlockChangeForPosition(newState, blockPos); } return added; } // We have not yet checked if this incoming snapshot is a duplicate position if (!added) { // Ok, means we have a multi change on a same position, now to use the multimap // for the first time. this.multimap = LinkedListMultimap.create(); // LinkedListMultimap is insertion order respective, so the backed lists per // Now to populate it from the previously used list of snapshots... for (final SpongeBlockSnapshot existing : this.snapshots) { // Ignore snapshots potentially being null, it will never be null at this point. this.multimap.put(existing.getBlockPos(), existing); } // And place the snapshot into the multimap. this.multimap.put(blockPos, backingSnapshot); // Now we can re-evaluate the modified block position // If the position is duplicated, we need to update the original snapshot of the now incoming block change // in relation to the original state (so if a block was set to air, then afterwards set to piston head, it should go from break to modify) associateBlockChangeForPosition(newState, blockPos); return false; } // At this point, we haven't captured the block position yet. // and we can check if the list is null. this.addSnapshot(backingSnapshot); // And this is the only time that we return true, if we have not caught multiple transactions per position before. return true; } private void addSnapshot(final SpongeBlockSnapshot backingSnapshot) { if (this.snapshots == null) { this.snapshots = new ArrayList<>(); } this.snapshots.add(backingSnapshot); this.snapshotIndex++; } /** * Associates the desired block state {@link BlockChange} in comparison to the * already guaranteed original {@link SpongeBlockSnapshot} for proper event * creation when multiple block changes exist for the provided {@link BlockPos}. * * <p>Note: This method <strong>requires</strong> that {@link #multimap} is not * {@code null}, otherwise it will cause an NPE.</p> * * @param newState The incoming block change to compare to change * @param blockPos The block position to get the backing list from the multimap */ @SuppressWarnings("unchecked") private void associateBlockChangeForPosition(final IBlockState newState, final BlockPos blockPos) { final List<SpongeBlockSnapshot> list = this.multimap.get(blockPos); if (list != null && !list.isEmpty()) { final SpongeBlockSnapshot originalSnapshot = list.get(0); final PhaseContext<?> peek = PhaseTracker.getInstance().getCurrentContext(); final IBlockState currentState = (IBlockState) originalSnapshot.getState(); originalSnapshot.blockChange = ((IPhaseState) peek.state).associateBlockChangeWithSnapshot(peek, newState, newState.getBlock(), currentState, originalSnapshot, currentState.getBlock()); } } /** * Gets an <b>unmodifiable</b> {@link List} of the original * {@link BlockSnapshot}s being changed for their respective * {@link BlockPos Block Positions}. The list is self updating * and the {@link BlockSnapshot}s themselves are self updating * based on the current processes within the PhaseTracker's * {@link IPhaseState} the game is processing. The reasons for * this list to be unmodifiable except by this object are as follows: * <ul> * <li>Submitted {@link BlockSnapshot}s are to be added by the * {@link #put(BlockSnapshot, IBlockState)} method.</li> * <li>Adding multiple {@link BlockSnapshot}s per {@link BlockPos} * results in an internal restructuring of storage such that a * {@link Multimap} is created to keep track of intermediary * {@link BlockSnapshot}s. By this nature, the list cannot be modified * except by this capture object.</li> * <li>Removing a {@link BlockSnapshot} is only applicable via * {@link #prune(BlockSnapshot)} or {@link #clear()}. This is to * allow sanity checking for multimap purposes and garbage cleanup * when necessary.</li> * <li>The creation of {@link ChangeBlockEvent}s requires a * {@link Transaction} to be created, and plugins are only * exposed the {@link Transaction#getOriginal()} as the first * {@link BlockSnapshot} that would exist in this list. Intermediary * {@link BlockSnapshot} changes for that postiion are internally * utilized to process physics, but are not exposed to the event.</li> * </ul> * * @return An unmodifiable list of first block originals being changed */ public final List<SpongeBlockSnapshot> get() { return this.snapshots == null ? Collections.emptyList() : Collections.unmodifiableList(this.snapshots); } public final void prune(final BlockSnapshot snapshot) { if (this.isEmpty()) { throw new IllegalStateException("Unexpected pruning on an empty capture object for position " + snapshot.getPosition()); } // Start by figuring out the backing snapshot. In all likelyhood, we could just cast, but we want to be safe final SpongeBlockSnapshot backingSnapshot = getBackingSnapshot(snapshot); // Get the key of the block position, we know this is a pure block pos and not a mutable one too. final BlockPos blockPos = backingSnapshot.getBlockPos(); // Check if we have a multi-pos if (this.multimap != null) { pruneFromMulti(backingSnapshot, blockPos); return; } pruneSingle(backingSnapshot, blockPos); if (this.head != null) { pruneTransaction(getBackingSnapshot(snapshot)); } } private void pruneSingle(final SpongeBlockSnapshot backingSnapshot, final BlockPos blockPos) { if (this.usedBlocks == null) { // means we didn't actually capture??? throw new IllegalStateException("Expected to remove a single block change that was supposed to be captured...."); } if (this.snapshots == null) { // also means we didn't capture.... wtf is going on at this point? throw new IllegalStateException("Expected to remove a single block change that was supposed to be captured...."); } this.usedBlocks.remove(blockPos); this.snapshots.remove(backingSnapshot); // Should be the same snapshot used } private void pruneFromMulti(final SpongeBlockSnapshot backingSnapshot, final BlockPos blockPos) { final List<SpongeBlockSnapshot> snapshots = this.multimap.get(blockPos); if (snapshots != null) { for (final Iterator<SpongeBlockSnapshot> iterator = snapshots.iterator(); iterator.hasNext(); ) { final SpongeBlockSnapshot next = iterator.next(); if (next.getState().equals(backingSnapshot.getState())) { iterator.remove(); break; } } // If the list view is now empty, we need to prune the position from the multimap if (snapshots.isEmpty()) { this.multimap.removeAll(blockPos); // And then prune the snapshot from the list of firsts for (final Iterator<SpongeBlockSnapshot> firsts = this.snapshots.iterator(); firsts.hasNext(); ) { final SpongeBlockSnapshot next = firsts.next(); if (next.equals(backingSnapshot)) { firsts.remove(); // And if it's been found, remove the position from the used blocks as well. this.usedBlocks.remove(blockPos); break; } } if (this.snapshots.isEmpty()) { this.multimap = null; } } } } private SpongeBlockSnapshot getBackingSnapshot(final BlockSnapshot snapshot) { final SpongeBlockSnapshot backingSnapshot; if (!(snapshot instanceof SpongeBlockSnapshot)) { backingSnapshot = SpongeBlockSnapshotBuilder.pooled().from(snapshot).build(); } else { backingSnapshot = (SpongeBlockSnapshot) snapshot; } return backingSnapshot; } /** * Returns {@code true} if there are no captured objects. * * @return {@code true} if empty */ @Override public final boolean isEmpty() { return (this.snapshots == null || this.snapshots.isEmpty()) && this.head == null; } /** * If not empty, activates the {@link BiConsumer} then clears all captures. * The catch with this is that as the underlying list is guaranteed * to be the first {@link BlockSnapshot} change, multiple changes can exist * for an individual {@link BlockPos}, such that the multi-map needs to * provide said information to the consumer. * * <p>The first {@link List} parameter is identical to having been * built from the first elements of each list from the second parameter * of {@link Map} where {@link Map#values()} returns the insertion order * preserved {@link List} of {@link BlockSnapshot}s for the backed * {@link BlockPos}. In simpler terms, the {@link List} is built from * the first elements of each list from the {@link Map}s values.</p> * * @param consumer The consumer to activate */ @SuppressWarnings("UnstableApiUsage") // Guava marks Multimaps.asMap as Beta features public final void acceptAndClearIfNotEmpty(final BiConsumer<List<? extends BlockSnapshot>, Map<BlockPos, List<BlockSnapshot>>> consumer) { if (this.multimap != null) { final List<? extends BlockSnapshot> blockSnapshots = get(); // Since multimaps provide a view when asMap is called, we need to recreate the collection // of the map to pass into the consumer final Map<BlockPos, List<SpongeBlockSnapshot>> view = Multimaps.asMap(this.multimap); final Map<BlockPos, List<BlockSnapshot>> map = new LinkedHashMap<>(view.size()); for (final Map.Entry<BlockPos, List<SpongeBlockSnapshot>> entryView : view.entrySet()) { map.put(entryView.getKey(), new ArrayList<>(entryView.getValue())); } this.multimap.clear(); // Clean captured lists before they get potentially contaminated by processing. consumer.accept(blockSnapshots, map); // Accept the list and map } } /* Begin the more enhanced block tracking. This is only used by states that absolutely need to be able to track certain changes that involve more "physics" related transactions, such as neighbor notification tracking, tile entity tracking, and normally, intermediary transaction tracking. Because of these states, we need to envelop the information relating to: - The most recent block change, if it has been a change that was applied - The most recent tile entity being captured - The most recent neighbor notification in the order in which it is being applied to in comparison with the most recent block change In some rare cases, some block changes may take place after a neighbor notification is submitted, or a tile entity is being replaced. To acommodate this, when such cases arise, we attempt to snapshot any potential transactions that may take place prior to their blocks being changed, allowing us to take full snapshots of tile entities in the event a complete restoration is required. This is achieved through captureNeighborNotification and logTileChange. */ private void logTransaction(final BlockTransaction transaction) { if (this.tail != null) { this.tail.next = transaction; } else { this.head = transaction; } transaction.previous = this.tail; this.tail = transaction; } private void pruneTransaction(final SpongeBlockSnapshot snapshot) { if (this.head == null) { return; } for (BlockTransaction transaction = this.head; transaction != null; transaction = transaction.next) { if (transaction.equalsSnapshot(snapshot)) { final BlockTransaction previous = transaction.previous; final BlockTransaction next = transaction.next; if (previous == null) { this.head = next; } else { previous.next = next; transaction.previous = null; } if (next == null) { this.tail = previous; } else { next.previous = previous; transaction.next = null; } } } } public void captureNeighborNotification( final WorldServerBridge mixinWorldServer, final IBlockState notifyState, final BlockPos notifyPos, final Block sourceBlock, final BlockPos sourcePos) { final int transactionIndex = ++this.transactionIndex; final IBlockState actualSourceState = ((WorldServer) mixinWorldServer).getBlockState(sourcePos); final BlockTransaction.NeighborNotification notification = new BlockTransaction.NeighborNotification(transactionIndex, this.snapshotIndex, mixinWorldServer, notifyState, notifyPos, sourceBlock, sourcePos, actualSourceState); notification.enqueueChanges(mixinWorldServer.bridge$getProxyAccess(), this); logTransaction(notification); } /** * Specifically called by {@link ChunkMixin#bridge$setBlockState(BlockPos, IBlockState, IBlockState, BlockChangeFlag)} while it is preparing * various transactional aspects, such as potential tile entity removals, replacements, etc. Specifically should never be called outside * of that reaction since {@link BlockTransaction#enqueueChanges(SpongeProxyBlockAccess, MultiBlockCaptureSupplier)} * does not get called automatically, it is called prior to queueing potential tile replacements, and prior to calling to * {@link #logTileChange(WorldServerBridge, BlockPos, TileEntity, TileEntity)} in the event a tile entity is going to be removed. * * @param originalBlockSnapshot The original snapshot being changed * @param newState The new state * @param flags The change flags * @return The constructed transaction */ public BlockTransaction.ChangeBlock logBlockChange(final SpongeBlockSnapshot originalBlockSnapshot, final IBlockState newState, final BlockChangeFlag flags) { this.put(originalBlockSnapshot, newState); // Always update the snapshot index before the block change is tracked final int transactionIndex = ++this.transactionIndex; final BlockTransaction.ChangeBlock changeBlock = new BlockTransaction.ChangeBlock(transactionIndex, this.snapshotIndex, originalBlockSnapshot, newState, (SpongeBlockChangeFlag) flags); logTransaction(changeBlock); return changeBlock; } public void logTileChange( final WorldServerBridge mixinWorldServer, final BlockPos pos, @Nullable final TileEntity oldTile, @Nullable final TileEntity newTile) { final WorldServer world = (WorldServer) mixinWorldServer; final IBlockState current = world.getBlockState(pos); if (this.tail instanceof BlockTransaction.ChangeBlock) { final BlockTransaction.ChangeBlock changeBlock = (BlockTransaction.ChangeBlock) this.tail; if (oldTile != null && newTile == null && changeBlock.queueBreak) { if (changeBlock.queuedRemoval == oldTile) { return; // Duplicate requests need to be silenced because multiple attempts to assure a tile is removed can be made // during breaking blocks. } changeBlock.queuedRemoval = oldTile; if (changeBlock.queueTileSet == null) { mixinWorldServer.bridge$getProxyAccess().queueRemoval(oldTile); } else { // Make sure the new tile entity has the correct position changeBlock.queueTileSet.setPos(pos); mixinWorldServer.bridge$getProxyAccess().queueReplacement(changeBlock.queueTileSet, changeBlock.queuedRemoval); mixinWorldServer.bridge$getProxyAccess().unmarkRemoval(pos, oldTile); } return; } } if (newTile != null && this.tail != null) { // Double check previous changes, if there's a remove tile entity, and previous to that, a change block, and this is an add tile entity, // well, we need to flip the ChangeBlock to avoid doing a breakBlock logic boolean isSame = false; for (BlockTransaction prevChange = this.tail; prevChange != null; prevChange = prevChange.previous) { if (prevChange instanceof BlockTransaction.ChangeBlock) { final BlockTransaction.ChangeBlock changeBlock = (BlockTransaction.ChangeBlock) prevChange; isSame = changeBlock.queuedRemoval == newTile; if (isSame) { changeBlock.ignoreBreakBlockLogic = true; changeBlock.queuedRemoval = null; ((TileEntityBridge) newTile).bridge$setCaptured(false); break; } } } if (isSame) { if (mixinWorldServer.bridge$getProxyAccess().isTileQueuedForRemoval(pos, newTile)) { mixinWorldServer.bridge$getProxyAccess().unmarkRemoval(pos, newTile); } return; } } final int transactionIndex = ++this.transactionIndex; if (oldTile != null) { final SpongeBlockSnapshot snapshot = mixinWorldServer.bridge$createSnapshotWithEntity(current, pos, BlockChangeFlags.NONE, oldTile); this.put(snapshot, current); if (newTile != null) { // replacing a tile. snapshot.blockChange = BlockChange.MODIFY; final BlockTransaction.ReplaceTileEntity transaction = new BlockTransaction.ReplaceTileEntity(transactionIndex, this.snapshotIndex, newTile, oldTile, snapshot); logTransaction(transaction); transaction.enqueueChanges(mixinWorldServer.bridge$getProxyAccess(),this); return; } // Removing the tile snapshot.blockChange = BlockChange.BREAK; final BlockTransaction.RemoveTileEntity transaction = new BlockTransaction.RemoveTileEntity(transactionIndex, this.snapshotIndex, oldTile, snapshot); transaction.enqueueChanges(mixinWorldServer.bridge$getProxyAccess(), this); logTransaction(transaction); return; } if (newTile != null) { final SpongeBlockSnapshot snapshot = mixinWorldServer.bridge$createSnapshotWithEntity(current, pos, BlockChangeFlags.NONE, newTile); snapshot.blockChange = BlockChange.PLACE; final BlockTransaction.AddTileEntity transaction = new BlockTransaction.AddTileEntity(transactionIndex, this.snapshotIndex, newTile, snapshot); transaction.enqueueChanges(mixinWorldServer.bridge$getProxyAccess(), this); logTransaction(transaction); } } void queuePreviousStates(final BlockTransaction transaction) { if (this.head != null) { if (transaction == this.head) { return; } for (BlockTransaction prevChange = this.head; prevChange != null; prevChange = prevChange.next) { if (transaction.appliedPreChange) { // Short circuit. It will not have already applied changes to the previous // changes until it at least applies them to the first entry (head). return; } transaction.provideUnchangedStates(prevChange); } } } public void cancelTransaction(final BlockSnapshot original) { if (this.tail == null) { return; } final SpongeBlockSnapshot snapshot = (SpongeBlockSnapshot) original; final BlockPos blockPos = snapshot.getBlockPos(); snapshot.getWorldServer().ifPresent(worldServer -> { for (BlockTransaction prevChange = this.tail; prevChange != null; prevChange = prevChange.previous) { if (!prevChange.isCancelled) { prevChange.cancel(worldServer, blockPos, ((WorldServerBridge) worldServer).bridge$getProxyAccess()); } } }); } public void clear() { if (this.multimap != null) { this.multimap.clear(); this.multimap = null; } if (this.snapshots != null) { this.snapshots.clear(); this.snapshots = null; } if (this.usedBlocks != null) { this.usedBlocks.clear(); } if (this.scheduledEvents != null) { this.scheduledEvents.clear(); } this.snapshotIndex = -1; this.transactionIndex = -1; } public void restoreOriginals() { if (this.snapshots != null && !this.snapshots.isEmpty()) { for (final SpongeBlockSnapshot original : Lists.reverse(this.snapshots)) { original.restore(true, BlockChangeFlags.NONE); } this.clear(); } } public Optional<Transaction<BlockSnapshot>> createTransaction(final SpongeBlockSnapshot snapshot) { final Optional<WorldServer> maybeWorld = snapshot.getWorldServer(); if (!maybeWorld.isPresent()) { return Optional.empty(); } final WorldServer worldServer = maybeWorld.get(); final BlockPos blockPos = snapshot.getBlockPos(); final IBlockState newState = worldServer.getBlockState(blockPos); // Because enhanced tracking requires handling very specific proxying of block states // so, the requests for the actual states sometimes may cause issues with mods and their // extended state handling logic if what the world sees is different from what our tracker // saw, so, we have to just provide the new state (extended states are calculated anyways). final IBlockState newActualState = this.head != null ? newState : newState.getActualState(worldServer, blockPos); final BlockSnapshot newSnapshot = ((WorldServerBridge) worldServer).bridge$createSnapshot(newState, newActualState, blockPos, BlockChangeFlags.NONE); // Up until this point, we can create a default Transaction if (this.multimap != null) { // But we need to check if there's any intermediary block changes... // And because multi is true, we can be sure the multimap is populated at least somewhere. final List<SpongeBlockSnapshot> intermediary = this.multimap.get(blockPos); if (!intermediary.isEmpty() && intermediary.size() > 1) { // We need to make a carbon copy of the list since it's technically a key view list // within the multimap, so, if the multimap is cleared, at the very least, the list will // not be cleared. Likewise, we also need to skip over the first element since the snapshots // list will have that element anyways (we don't want to be providing duplicate snapshots // for plugins to witness and come to expect that they are intermediary states, when they're still the original positions final ImmutableList.Builder<SpongeBlockSnapshot> builder = ImmutableList.builder(); boolean movedPastFirst = false; for (final Iterator<SpongeBlockSnapshot> iterator = intermediary.iterator(); iterator.hasNext(); ) { if (!movedPastFirst) { iterator.next(); movedPastFirst = true; continue; } builder.add(iterator.next()); } return Optional.of(new Transaction<>(snapshot, newSnapshot, builder.build())); } } return Optional.of(new Transaction<>(snapshot, newSnapshot)); } public boolean trackEvent(final BlockPos pos, final BlockEventData blockEventData) { if (this.usedBlocks != null && this.usedBlocks.contains(pos)) { if (this.scheduledEvents == null) { this.scheduledEvents = LinkedListMultimap.create(); } this.scheduledEvents.put(pos.toImmutable(), blockEventData); return true; } return false; } public ListMultimap<BlockPos, BlockEventData> getScheduledEvents() { return this.scheduledEvents == null || this.scheduledEvents.isEmpty() ? ImmutableListMultimap.of() : ArrayListMultimap.create(this.scheduledEvents); } @SuppressWarnings("ReturnInsideFinallyBlock") public boolean processTransactions(final List<Transaction<BlockSnapshot>> transactions, final PhaseContext<?> phaseContext, final boolean noCancelledTransactions, final ListMultimap<BlockPos, BlockEventData> scheduledEvents, final int currentDepth) { final IPhaseState phaseState = phaseContext.state; int targetIndex = 0; if (this.tail == null) { boolean hasEvents = false; if (!scheduledEvents.isEmpty()) { hasEvents = true; } for (final Transaction<BlockSnapshot> transaction : transactions) { if (!transaction.isValid()) { continue; } TrackingUtil.performTransactionProcess(transaction, phaseContext, currentDepth); if (hasEvents) { final SpongeBlockSnapshot original = (SpongeBlockSnapshot) transaction.getOriginal(); original.getWorldServer().ifPresent(worldServer -> { final WorldServerAccessor accessor = (WorldServerAccessor) worldServer; final WorldServer.ServerBlockEventList queue = accessor.getBlockEventQueueForSponge()[accessor.getBlockEventCacheIndexForSponge()]; for (final BlockEventData blockEventData : scheduledEvents.get(original.getBlockPos())) { boolean equals = false; for (final BlockEventData eventData : queue) { if (eventData.equals(blockEventData)) { equals = true; break; } } if (!equals) { queue.add(blockEventData); } } }); } } return noCancelledTransactions; } Transaction<BlockSnapshot> eventTransaction = transactions.isEmpty() ? null : transactions.get(targetIndex); try { // now to clear this suppliers information before we start proceeding final BlockTransaction head = this.head; this.head = null; this.tail = null; for (BlockTransaction transaction = head; transaction != null; ) { if (transaction.snapshotIndex > targetIndex) { targetIndex++; eventTransaction = transactions.get(targetIndex); } if (eventTransaction != null && !eventTransaction.isValid()) { final BlockTransaction next = transaction.next; transaction.next = null; transaction.previous = null; transaction = next; continue; } final Optional<WorldServerBridge> maybeWorld = transaction.getWorldServer(); final BlockTransaction derp = transaction; try (@SuppressWarnings("try") final SpongeProxyBlockAccess access = maybeWorld.map( WorldServerBridge::bridge$getProxyAccess).map(proxy -> proxy.switchTo(derp)).orElse(null); final SpongeProxyBlockAccess.Proxy ignored = maybeWorld.map(transaction::getProxy).orElse(null)){ final PrettyPrinter printer; if (PRINT_TRANSACTIONS) { printer = new PrettyPrinter(60).add("Debugging BlockTransaction").centre().hr() .addWrapped(60, "This is a process printout of the information passed along from the Proxy and the world.") .add() .add("Proxy Container:"); } else { printer = null; } if (transaction.blocksNotAffected != null) { transaction.blocksNotAffected.forEach((pos, block) -> { if (PRINT_TRANSACTIONS) { printer.addWrapped(120, " %s : %s, %s", "UnaffectedBlock", pos, block); } if (access != null) { access.proceed(pos, block, false); } }); } if (transaction.tilesAtTransaction != null) { transaction.tilesAtTransaction.forEach((pos, tile) -> { if (PRINT_TRANSACTIONS) { printer.addWrapped(120, " %s : %s, %s", "UnaffectedTile", pos, tile == null ? "null" : ((TileEntityBridge) tile).bridge$getPrettyPrinterString()); } if (access != null) { access.pushTile(pos, tile); } }); } if (PRINT_TRANSACTIONS) { if (access != null) { access.addToPrinter(printer); } transaction.addToPrinter(printer); printer.print(System.err); } transaction.process(eventTransaction, phaseState, phaseContext, currentDepth); } catch (final Exception e) { final PrettyPrinter printer = new PrettyPrinter(60).add("Exception while trying to apply transaction").centre().hr() .addWrapped(60, "BlockTransactions failing to process can lead to unintended consequences. If the exception is *directly* coming from Sponge's code, please report to Sponge.") .add(); maybeWorld.map(WorldServerBridge::bridge$getProxyAccess).ifPresent(access -> access.addToPrinter(printer)); transaction.addToPrinter(printer); printer.add(); printer .add("Exception: ") .add(e) .trace(System.err); } maybeWorld.map(WorldServerBridge::bridge$getProxyAccess).ifPresent(transaction::postProcessBlocksAffected); // Clean up final BlockTransaction next = transaction.next; transaction.next = null; transaction.previous = null; transaction = next; } } finally { clearProxies(); resetTransactionLinks(); } return noCancelledTransactions; } @SuppressWarnings("RedundantCast") void getProxyOrCreate(final WorldServerBridge mixinWorldServer) { if (this.processingWorlds == null) { this.processingWorlds = new LinkedHashMap<>(); } SpongeProxyBlockAccess.Proxy existing = this.processingWorlds.get((WorldServer) mixinWorldServer); if (existing == null) { existing = mixinWorldServer.bridge$getProxyAccess().pushProxy(); this.processingWorlds.put((WorldServer) mixinWorldServer, existing); } } @Override public int hashCode() { return Objects.hashCode(this.snapshots); } @Override public boolean equals(@Nullable final Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } final MultiBlockCaptureSupplier other = (MultiBlockCaptureSupplier) obj; return Objects.equals(this.multimap, other.multimap); } @Override public String toString() { return com.google.common.base.MoreObjects.toStringHelper(this) .add("Captured", this.snapshots == null ? 0 : this.snapshots.size()) .add("Head", this.head == null ? "null" : this.head) .toString(); } public void clearProxies() { if (this.processingWorlds == null || this.processingWorlds.isEmpty()) { return; } for (final Map.Entry<WorldServer, SpongeProxyBlockAccess.Proxy> entry : this.processingWorlds.entrySet()) { try { entry.getValue().close(); } catch (final Exception e) { PhaseTracker.getInstance().printMessageWithCaughtException("Forcibly Closing Proxy", "Proxy Access could not be popped", e); } } this.processingWorlds.clear(); } public boolean hasTransactions() { return this.head != null; } public boolean hasBlocksCaptured() { return !(this.snapshots == null || this.snapshots.isEmpty()); } public void reset() { if (this.multimap != null) { // shouldn't but whatever, it's the end of a phase. this.multimap = null; } if (this.scheduledEvents != null) { this.scheduledEvents = null; } if (this.snapshots != null) { this.snapshots = null; } if (this.usedBlocks != null) { this.usedBlocks = null; } this.clearProxies(); this.transactionIndex = -1; this.snapshotIndex = -1; if (this.head != null) { resetTransactionLinks(); } } private void resetTransactionLinks() { for (BlockTransaction transaction = this.head; transaction != null; ) { final BlockTransaction next = transaction.next; transaction.previous = null; transaction.next = null; transaction = next; } this.head = null; this.tail = null; } }
src/main/java/org/spongepowered/common/event/tracking/context/MultiBlockCaptureSupplier.java
/* * This file is part of Sponge, licensed under the MIT License (MIT). * * Copyright (c) SpongePowered <https://www.spongepowered.org> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.spongepowered.common.event.tracking.context; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableListMultimap; import com.google.common.collect.LinkedListMultimap; import com.google.common.collect.ListMultimap; import com.google.common.collect.Lists; import com.google.common.collect.Multimap; import com.google.common.collect.Multimaps; import net.minecraft.block.Block; import net.minecraft.block.BlockEventData; import net.minecraft.block.state.IBlockState; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.math.BlockPos; import net.minecraft.world.WorldServer; import org.spongepowered.api.block.BlockSnapshot; import org.spongepowered.api.data.Transaction; import org.spongepowered.api.event.block.ChangeBlockEvent; import org.spongepowered.api.world.BlockChangeFlag; import org.spongepowered.api.world.BlockChangeFlags; import org.spongepowered.asm.util.PrettyPrinter; import org.spongepowered.common.block.SpongeBlockSnapshot; import org.spongepowered.common.block.SpongeBlockSnapshotBuilder; import org.spongepowered.common.bridge.tileentity.TileEntityBridge; import org.spongepowered.common.bridge.world.WorldServerBridge; import org.spongepowered.common.event.tracking.IPhaseState; import org.spongepowered.common.event.tracking.PhaseContext; import org.spongepowered.common.event.tracking.PhaseTracker; import org.spongepowered.common.event.tracking.TrackingUtil; import org.spongepowered.common.mixin.core.world.WorldServerAccessor; import org.spongepowered.common.mixin.core.world.chunk.ChunkMixin; import org.spongepowered.common.world.BlockChange; import org.spongepowered.common.world.SpongeBlockChangeFlag; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.function.BiConsumer; import javax.annotation.Nullable; @SuppressWarnings("rawtypes") public final class MultiBlockCaptureSupplier implements ICaptureSupplier { public static final boolean PRINT_TRANSACTIONS = Boolean.valueOf(System.getProperty("sponge.debugBlockTransactions", "false")); @Nullable private LinkedListMultimap<BlockPos, SpongeBlockSnapshot> multimap; @Nullable private ListMultimap<BlockPos, BlockEventData> scheduledEvents; @Nullable private List<SpongeBlockSnapshot> snapshots; @Nullable private LinkedHashMap<WorldServer, SpongeProxyBlockAccess.Proxy> processingWorlds; @Nullable private Set<BlockPos> usedBlocks; private int transactionIndex = -1; // These are used to keep track of which snapshot is being referred to as "most recent change" private int snapshotIndex = -1; // so that we can appropriately cancel or discard or apply specific event transactions // We made BlockTransaction a Node and this is a pseudo LinkedList due to the nature of needing // to be able to track what block states exist at the time of the transaction while other transactions // are processing (because future transactions performing logic based on what exists at that state, // will potentially get contaminated information based on the last transaction prior to transaction // processing). Example: When starting to perform neighbor notifications during piston movement, one // can feasibly see that the block state is changed already without being able to get the appropriate // block state. @Nullable private BlockTransaction tail; @Nullable private BlockTransaction head; public MultiBlockCaptureSupplier() { } /** * Captures the provided {@link BlockSnapshot} into a {@link Multimap} backed collection. * The premise is that each {@link BlockPos} normally has a single {@link BlockChange}, * with the exceptions of certain few cases where multiple changes can occur for the same * position. The larger issue is that while the multiple changes are tracked, the desired * flag of changes does not result in a valid {@link BlockChange}, and therefor an invalid * {@link ChangeBlockEvent} is generated, potentially leading to duplication bugs with * protection plugins. As a result, the consuming {@link BlockSnapshot} is placed into * a {@link ListMultimap} keyed by the {@link BlockPos}, and if there are multiple snapshots * per {@link BlockPos}, has multiple changes will be {@code true}, and this method * will return {@code true}. * * @param snapshot The snapshot being captured * @param newState The most current new IBlockState to calculate the BlockChange flag * @return True if the block position has previously not been modified or captured yet */ public boolean put(final BlockSnapshot snapshot, final IBlockState newState) { // Start by figuring out the backing snapshot. In all likelyhood, we could just cast, but we want to be safe final SpongeBlockSnapshot backingSnapshot = getBackingSnapshot(snapshot); // Get the key of the block position, we know this is a pure block pos and not a mutable one too. final BlockPos blockPos = backingSnapshot.getBlockPos(); if (this.usedBlocks == null) { // Means we have a first usage. All three fields are null // At this point, we know we have not captured anything and // can just populate the normal list. this.usedBlocks = new HashSet<>(); this.usedBlocks.add(blockPos); this.addSnapshot(backingSnapshot); return true; } // This isn't our first rodeo... final boolean added = this.usedBlocks.add(blockPos); // add it to the set of positions already used and use the boolean if (this.multimap != null) { // Means we've already got multiple changes per position once before. // Likewise, the used blocks, snapshots and multimap will NOT be null. // more fasts, we know we have multiple block positions. // And we can find out if this is the first time we if (added) { // If the position hasn't been captured yet, that means we need to add it as an original // snapshot being changed, for the list usage. this.addSnapshot(backingSnapshot); } // we don't have to this.multimap.put(blockPos, backingSnapshot); // If the position is duplicated, we need to update the original snapshot of the now incoming block change // in relation to the original state (so if a block was set to air, then afterwards set to piston head, it should go from break to modify) if (!added) { associateBlockChangeForPosition(newState, blockPos); } return added; } // We have not yet checked if this incoming snapshot is a duplicate position if (!added) { // Ok, means we have a multi change on a same position, now to use the multimap // for the first time. this.multimap = LinkedListMultimap.create(); // LinkedListMultimap is insertion order respective, so the backed lists per // Now to populate it from the previously used list of snapshots... for (final SpongeBlockSnapshot existing : this.snapshots) { // Ignore snapshots potentially being null, it will never be null at this point. this.multimap.put(existing.getBlockPos(), existing); } // And place the snapshot into the multimap. this.multimap.put(blockPos, backingSnapshot); // Now we can re-evaluate the modified block position // If the position is duplicated, we need to update the original snapshot of the now incoming block change // in relation to the original state (so if a block was set to air, then afterwards set to piston head, it should go from break to modify) associateBlockChangeForPosition(newState, blockPos); return false; } // At this point, we haven't captured the block position yet. // and we can check if the list is null. this.addSnapshot(backingSnapshot); // And this is the only time that we return true, if we have not caught multiple transactions per position before. return true; } private void addSnapshot(final SpongeBlockSnapshot backingSnapshot) { if (this.snapshots == null) { this.snapshots = new ArrayList<>(); } this.snapshots.add(backingSnapshot); this.snapshotIndex++; } /** * Associates the desired block state {@link BlockChange} in comparison to the * already guaranteed original {@link SpongeBlockSnapshot} for proper event * creation when multiple block changes exist for the provided {@link BlockPos}. * * <p>Note: This method <strong>requires</strong> that {@link #multimap} is not * {@code null}, otherwise it will cause an NPE.</p> * * @param newState The incoming block change to compare to change * @param blockPos The block position to get the backing list from the multimap */ @SuppressWarnings("unchecked") private void associateBlockChangeForPosition(final IBlockState newState, final BlockPos blockPos) { final List<SpongeBlockSnapshot> list = this.multimap.get(blockPos); if (list != null && !list.isEmpty()) { final SpongeBlockSnapshot originalSnapshot = list.get(0); final PhaseContext<?> peek = PhaseTracker.getInstance().getCurrentContext(); final IBlockState currentState = (IBlockState) originalSnapshot.getState(); originalSnapshot.blockChange = ((IPhaseState) peek.state).associateBlockChangeWithSnapshot(peek, newState, newState.getBlock(), currentState, originalSnapshot, currentState.getBlock()); } } /** * Gets an <b>unmodifiable</b> {@link List} of the original * {@link BlockSnapshot}s being changed for their respective * {@link BlockPos Block Positions}. The list is self updating * and the {@link BlockSnapshot}s themselves are self updating * based on the current processes within the PhaseTracker's * {@link IPhaseState} the game is processing. The reasons for * this list to be unmodifiable except by this object are as follows: * <ul> * <li>Submitted {@link BlockSnapshot}s are to be added by the * {@link #put(BlockSnapshot, IBlockState)} method.</li> * <li>Adding multiple {@link BlockSnapshot}s per {@link BlockPos} * results in an internal restructuring of storage such that a * {@link Multimap} is created to keep track of intermediary * {@link BlockSnapshot}s. By this nature, the list cannot be modified * except by this capture object.</li> * <li>Removing a {@link BlockSnapshot} is only applicable via * {@link #prune(BlockSnapshot)} or {@link #clear()}. This is to * allow sanity checking for multimap purposes and garbage cleanup * when necessary.</li> * <li>The creation of {@link ChangeBlockEvent}s requires a * {@link Transaction} to be created, and plugins are only * exposed the {@link Transaction#getOriginal()} as the first * {@link BlockSnapshot} that would exist in this list. Intermediary * {@link BlockSnapshot} changes for that postiion are internally * utilized to process physics, but are not exposed to the event.</li> * </ul> * * @return An unmodifiable list of first block originals being changed */ public final List<SpongeBlockSnapshot> get() { return this.snapshots == null ? Collections.emptyList() : Collections.unmodifiableList(this.snapshots); } public final void prune(final BlockSnapshot snapshot) { if (this.isEmpty()) { throw new IllegalStateException("Unexpected pruning on an empty capture object for position " + snapshot.getPosition()); } // Start by figuring out the backing snapshot. In all likelyhood, we could just cast, but we want to be safe final SpongeBlockSnapshot backingSnapshot = getBackingSnapshot(snapshot); // Get the key of the block position, we know this is a pure block pos and not a mutable one too. final BlockPos blockPos = backingSnapshot.getBlockPos(); // Check if we have a multi-pos if (this.multimap != null) { pruneFromMulti(backingSnapshot, blockPos); return; } pruneSingle(backingSnapshot, blockPos); if (this.head != null) { pruneTransaction(getBackingSnapshot(snapshot)); } } private void pruneSingle(final SpongeBlockSnapshot backingSnapshot, final BlockPos blockPos) { if (this.usedBlocks == null) { // means we didn't actually capture??? throw new IllegalStateException("Expected to remove a single block change that was supposed to be captured...."); } if (this.snapshots == null) { // also means we didn't capture.... wtf is going on at this point? throw new IllegalStateException("Expected to remove a single block change that was supposed to be captured...."); } this.usedBlocks.remove(blockPos); this.snapshots.remove(backingSnapshot); // Should be the same snapshot used } private void pruneFromMulti(final SpongeBlockSnapshot backingSnapshot, final BlockPos blockPos) { final List<SpongeBlockSnapshot> snapshots = this.multimap.get(blockPos); if (snapshots != null) { for (final Iterator<SpongeBlockSnapshot> iterator = snapshots.iterator(); iterator.hasNext(); ) { final SpongeBlockSnapshot next = iterator.next(); if (next.getState().equals(backingSnapshot.getState())) { iterator.remove(); break; } } // If the list view is now empty, we need to prune the position from the multimap if (snapshots.isEmpty()) { this.multimap.removeAll(blockPos); // And then prune the snapshot from the list of firsts for (final Iterator<SpongeBlockSnapshot> firsts = this.snapshots.iterator(); firsts.hasNext(); ) { final SpongeBlockSnapshot next = firsts.next(); if (next.equals(backingSnapshot)) { firsts.remove(); // And if it's been found, remove the position from the used blocks as well. this.usedBlocks.remove(blockPos); break; } } if (this.snapshots.isEmpty()) { this.multimap = null; } } } } private SpongeBlockSnapshot getBackingSnapshot(final BlockSnapshot snapshot) { final SpongeBlockSnapshot backingSnapshot; if (!(snapshot instanceof SpongeBlockSnapshot)) { backingSnapshot = SpongeBlockSnapshotBuilder.pooled().from(snapshot).build(); } else { backingSnapshot = (SpongeBlockSnapshot) snapshot; } return backingSnapshot; } /** * Returns {@code true} if there are no captured objects. * * @return {@code true} if empty */ @Override public final boolean isEmpty() { return (this.snapshots == null || this.snapshots.isEmpty()) && this.head == null; } /** * If not empty, activates the {@link BiConsumer} then clears all captures. * The catch with this is that as the underlying list is guaranteed * to be the first {@link BlockSnapshot} change, multiple changes can exist * for an individual {@link BlockPos}, such that the multi-map needs to * provide said information to the consumer. * * <p>The first {@link List} parameter is identical to having been * built from the first elements of each list from the second parameter * of {@link Map} where {@link Map#values()} returns the insertion order * preserved {@link List} of {@link BlockSnapshot}s for the backed * {@link BlockPos}. In simpler terms, the {@link List} is built from * the first elements of each list from the {@link Map}s values.</p> * * @param consumer The consumer to activate */ @SuppressWarnings("UnstableApiUsage") // Guava marks Multimaps.asMap as Beta features public final void acceptAndClearIfNotEmpty(final BiConsumer<List<? extends BlockSnapshot>, Map<BlockPos, List<BlockSnapshot>>> consumer) { if (this.multimap != null) { final List<? extends BlockSnapshot> blockSnapshots = get(); // Since multimaps provide a view when asMap is called, we need to recreate the collection // of the map to pass into the consumer final Map<BlockPos, List<SpongeBlockSnapshot>> view = Multimaps.asMap(this.multimap); final Map<BlockPos, List<BlockSnapshot>> map = new LinkedHashMap<>(view.size()); for (final Map.Entry<BlockPos, List<SpongeBlockSnapshot>> entryView : view.entrySet()) { map.put(entryView.getKey(), new ArrayList<>(entryView.getValue())); } this.multimap.clear(); // Clean captured lists before they get potentially contaminated by processing. consumer.accept(blockSnapshots, map); // Accept the list and map } } /* Begin the more enhanced block tracking. This is only used by states that absolutely need to be able to track certain changes that involve more "physics" related transactions, such as neighbor notification tracking, tile entity tracking, and normally, intermediary transaction tracking. Because of these states, we need to envelop the information relating to: - The most recent block change, if it has been a change that was applied - The most recent tile entity being captured - The most recent neighbor notification in the order in which it is being applied to in comparison with the most recent block change In some rare cases, some block changes may take place after a neighbor notification is submitted, or a tile entity is being replaced. To acommodate this, when such cases arise, we attempt to snapshot any potential transactions that may take place prior to their blocks being changed, allowing us to take full snapshots of tile entities in the event a complete restoration is required. This is achieved through captureNeighborNotification and logTileChange. */ private void logTransaction(final BlockTransaction transaction) { if (this.tail != null) { this.tail.next = transaction; } else { this.head = transaction; } transaction.previous = this.tail; this.tail = transaction; } private void pruneTransaction(final SpongeBlockSnapshot snapshot) { if (this.head == null) { return; } for (BlockTransaction transaction = this.head; transaction != null; transaction = transaction.next) { if (transaction.equalsSnapshot(snapshot)) { final BlockTransaction previous = transaction.previous; final BlockTransaction next = transaction.next; if (previous == null) { this.head = next; } else { previous.next = next; transaction.previous = null; } if (next == null) { this.tail = previous; } else { next.previous = previous; transaction.next = null; } } } } public void captureNeighborNotification( final WorldServerBridge mixinWorldServer, final IBlockState notifyState, final BlockPos notifyPos, final Block sourceBlock, final BlockPos sourcePos) { final int transactionIndex = ++this.transactionIndex; final IBlockState actualSourceState = ((WorldServer) mixinWorldServer).getBlockState(sourcePos); final BlockTransaction.NeighborNotification notification = new BlockTransaction.NeighborNotification(transactionIndex, this.snapshotIndex, mixinWorldServer, notifyState, notifyPos, sourceBlock, sourcePos, actualSourceState); notification.enqueueChanges(mixinWorldServer.bridge$getProxyAccess(), this); logTransaction(notification); } /** * Specifically called by {@link ChunkMixin#bridge$setBlockState(BlockPos, IBlockState, IBlockState, BlockChangeFlag)} while it is preparing * various transactional aspects, such as potential tile entity removals, replacements, etc. Specifically should never be called outside * of that reaction since {@link BlockTransaction#enqueueChanges(SpongeProxyBlockAccess, MultiBlockCaptureSupplier)} * does not get called automatically, it is called prior to queueing potential tile replacements, and prior to calling to * {@link #logTileChange(WorldServerBridge, BlockPos, TileEntity, TileEntity)} in the event a tile entity is going to be removed. * * @param originalBlockSnapshot The original snapshot being changed * @param newState The new state * @param flags The change flags * @return The constructed transaction */ public BlockTransaction.ChangeBlock logBlockChange(final SpongeBlockSnapshot originalBlockSnapshot, final IBlockState newState, final BlockChangeFlag flags) { this.put(originalBlockSnapshot, newState); // Always update the snapshot index before the block change is tracked final int transactionIndex = ++this.transactionIndex; final BlockTransaction.ChangeBlock changeBlock = new BlockTransaction.ChangeBlock(transactionIndex, this.snapshotIndex, originalBlockSnapshot, newState, (SpongeBlockChangeFlag) flags); logTransaction(changeBlock); return changeBlock; } public void logTileChange( final WorldServerBridge mixinWorldServer, final BlockPos pos, @Nullable final TileEntity oldTile, @Nullable final TileEntity newTile) { final WorldServer world = (WorldServer) mixinWorldServer; final IBlockState current = world.getBlockState(pos); if (this.tail instanceof BlockTransaction.ChangeBlock) { final BlockTransaction.ChangeBlock changeBlock = (BlockTransaction.ChangeBlock) this.tail; if (oldTile != null && newTile == null && changeBlock.queueBreak) { if (changeBlock.queuedRemoval == oldTile) { return; // Duplicate requests need to be silenced because multiple attempts to assure a tile is removed can be made // during breaking blocks. } changeBlock.queuedRemoval = oldTile; if (changeBlock.queueTileSet == null) { mixinWorldServer.bridge$getProxyAccess().queueRemoval(oldTile); } else { // Make sure the new tile entity has the correct position changeBlock.queueTileSet.setPos(pos); mixinWorldServer.bridge$getProxyAccess().queueReplacement(changeBlock.queueTileSet, changeBlock.queuedRemoval); mixinWorldServer.bridge$getProxyAccess().unmarkRemoval(pos, oldTile); } return; } } if (newTile != null && this.tail != null) { // Double check previous changes, if there's a remove tile entity, and previous to that, a change block, and this is an add tile entity, // well, we need to flip the ChangeBlock to avoid doing a breakBlock logic boolean isSame = false; for (BlockTransaction prevChange = this.tail; prevChange != null; prevChange = prevChange.previous) { if (prevChange instanceof BlockTransaction.ChangeBlock) { final BlockTransaction.ChangeBlock changeBlock = (BlockTransaction.ChangeBlock) prevChange; isSame = changeBlock.queuedRemoval == newTile; if (isSame) { changeBlock.ignoreBreakBlockLogic = true; changeBlock.queuedRemoval = null; ((TileEntityBridge) newTile).bridge$setCaptured(false); break; } } } if (isSame) { if (mixinWorldServer.bridge$getProxyAccess().isTileQueuedForRemoval(pos, newTile)) { mixinWorldServer.bridge$getProxyAccess().unmarkRemoval(pos, newTile); } return; } } final int transactionIndex = ++this.transactionIndex; if (oldTile != null) { final SpongeBlockSnapshot snapshot = mixinWorldServer.bridge$createSnapshotWithEntity(current, pos, BlockChangeFlags.NONE, oldTile); this.put(snapshot, current); if (newTile != null) { // replacing a tile. snapshot.blockChange = BlockChange.MODIFY; final BlockTransaction.ReplaceTileEntity transaction = new BlockTransaction.ReplaceTileEntity(transactionIndex, this.snapshotIndex, newTile, oldTile, snapshot); logTransaction(transaction); transaction.enqueueChanges(mixinWorldServer.bridge$getProxyAccess(),this); return; } // Removing the tile snapshot.blockChange = BlockChange.BREAK; final BlockTransaction.RemoveTileEntity transaction = new BlockTransaction.RemoveTileEntity(transactionIndex, this.snapshotIndex, oldTile, snapshot); transaction.enqueueChanges(mixinWorldServer.bridge$getProxyAccess(), this); logTransaction(transaction); return; } if (newTile != null) { final SpongeBlockSnapshot snapshot = mixinWorldServer.bridge$createSnapshotWithEntity(current, pos, BlockChangeFlags.NONE, newTile); snapshot.blockChange = BlockChange.PLACE; final BlockTransaction.AddTileEntity transaction = new BlockTransaction.AddTileEntity(transactionIndex, this.snapshotIndex, newTile, snapshot); transaction.enqueueChanges(mixinWorldServer.bridge$getProxyAccess(), this); logTransaction(transaction); } } void queuePreviousStates(final BlockTransaction transaction) { if (this.head != null) { if (transaction == this.head) { return; } for (BlockTransaction prevChange = this.head; prevChange != null; prevChange = prevChange.next) { if (transaction.appliedPreChange) { // Short circuit. It will not have already applied changes to the previous // changes until it at least applies them to the first entry (head). return; } transaction.provideUnchangedStates(prevChange); } } } public void cancelTransaction(final BlockSnapshot original) { if (this.tail == null) { return; } final SpongeBlockSnapshot snapshot = (SpongeBlockSnapshot) original; final BlockPos blockPos = snapshot.getBlockPos(); snapshot.getWorldServer().ifPresent(worldServer -> { for (BlockTransaction prevChange = this.tail; prevChange != null; prevChange = prevChange.previous) { if (!prevChange.isCancelled) { prevChange.cancel(worldServer, blockPos, ((WorldServerBridge) worldServer).bridge$getProxyAccess()); } } }); } public void clear() { if (this.multimap != null) { this.multimap.clear(); this.multimap = null; } if (this.snapshots != null) { this.snapshots.clear(); this.snapshots = null; } if (this.usedBlocks != null) { this.usedBlocks.clear(); } if (this.scheduledEvents != null) { this.scheduledEvents.clear(); } this.snapshotIndex = -1; this.transactionIndex = -1; } public void restoreOriginals() { if (this.snapshots != null && !this.snapshots.isEmpty()) { for (final SpongeBlockSnapshot original : Lists.reverse(this.snapshots)) { original.restore(true, BlockChangeFlags.NONE); } this.clear(); } } public Optional<Transaction<BlockSnapshot>> createTransaction(final SpongeBlockSnapshot snapshot) { final Optional<WorldServer> maybeWorld = snapshot.getWorldServer(); if (!maybeWorld.isPresent()) { return Optional.empty(); } final WorldServer worldServer = maybeWorld.get(); final BlockPos blockPos = snapshot.getBlockPos(); final IBlockState newState = worldServer.getBlockState(blockPos); // Because enhanced tracking requires handling very specific proxying of block states // so, the requests for the actual states sometimes may cause issues with mods and their // extended state handling logic if what the world sees is different from what our tracker // saw, so, we have to just provide the new state (extended states are calculated anyways). final IBlockState newActualState = this.head != null ? newState : newState.getActualState(worldServer, blockPos); final BlockSnapshot newSnapshot = ((WorldServerBridge) worldServer).bridge$createSnapshot(newState, newActualState, blockPos, BlockChangeFlags.NONE); // Up until this point, we can create a default Transaction if (this.multimap != null) { // But we need to check if there's any intermediary block changes... // And because multi is true, we can be sure the multimap is populated at least somewhere. final List<SpongeBlockSnapshot> intermediary = this.multimap.get(blockPos); if (!intermediary.isEmpty() && intermediary.size() > 1) { // We need to make a carbon copy of the list since it's technically a key view list // within the multimap, so, if the multimap is cleared, at the very least, the list will // not be cleared. Likewise, we also need to skip over the first element since the snapshots // list will have that element anyways (we don't want to be providing duplicate snapshots // for plugins to witness and come to expect that they are intermediary states, when they're still the original positions final ImmutableList.Builder<SpongeBlockSnapshot> builder = ImmutableList.builder(); boolean movedPastFirst = false; for (final Iterator<SpongeBlockSnapshot> iterator = intermediary.iterator(); iterator.hasNext(); ) { if (!movedPastFirst) { iterator.next(); movedPastFirst = true; continue; } builder.add(iterator.next()); } return Optional.of(new Transaction<>(snapshot, newSnapshot, builder.build())); } } return Optional.of(new Transaction<>(snapshot, newSnapshot)); } public boolean trackEvent(final BlockPos pos, final BlockEventData blockEventData) { if (this.usedBlocks != null && this.usedBlocks.contains(pos)) { if (this.scheduledEvents == null) { this.scheduledEvents = LinkedListMultimap.create(); } this.scheduledEvents.put(pos.toImmutable(), blockEventData); return true; } return false; } public ListMultimap<BlockPos, BlockEventData> getScheduledEvents() { return this.scheduledEvents == null || this.scheduledEvents.isEmpty() ? ImmutableListMultimap.of() : ArrayListMultimap.create(this.scheduledEvents); } @SuppressWarnings("ReturnInsideFinallyBlock") public boolean processTransactions(final List<Transaction<BlockSnapshot>> transactions, final PhaseContext<?> phaseContext, final boolean noCancelledTransactions, final ListMultimap<BlockPos, BlockEventData> scheduledEvents, final int currentDepth) { final IPhaseState phaseState = phaseContext.state; int targetIndex = 0; if (this.tail == null) { boolean hasEvents = false; if (!scheduledEvents.isEmpty()) { hasEvents = true; } for (final Transaction<BlockSnapshot> transaction : transactions) { if (!transaction.isValid()) { continue; } TrackingUtil.performTransactionProcess(transaction, phaseContext, currentDepth); if (hasEvents) { final SpongeBlockSnapshot original = (SpongeBlockSnapshot) transaction.getOriginal(); original.getWorldServer().ifPresent(worldServer -> { final WorldServerAccessor accessor = (WorldServerAccessor) worldServer; final WorldServer.ServerBlockEventList queue = accessor.getBlockEventQueueForSponge()[accessor.getBlockEventCacheIndexForSponge()]; for (final BlockEventData blockEventData : scheduledEvents.get(original.getBlockPos())) { boolean equals = false; for (final BlockEventData eventData : queue) { if (eventData.equals(blockEventData)) { equals = true; break; } } if (!equals) { queue.add(blockEventData); } } }); } } return noCancelledTransactions; } Transaction<BlockSnapshot> eventTransaction = transactions.isEmpty() ? null : transactions.get(targetIndex); try { // now to clear this suppliers information before we start proceeding final BlockTransaction head = this.head; this.head = null; this.tail = null; for (BlockTransaction transaction = head; transaction != null; ) { if (transaction.snapshotIndex > targetIndex) { targetIndex++; eventTransaction = transactions.get(targetIndex); } if (eventTransaction != null && !eventTransaction.isValid()) { final BlockTransaction next = transaction.next; transaction.next = null; transaction.previous = null; transaction = next; continue; } final Optional<WorldServerBridge> maybeWorld = transaction.getWorldServer(); final BlockTransaction derp = transaction; try (@SuppressWarnings("try") final SpongeProxyBlockAccess access = maybeWorld.map( WorldServerBridge::bridge$getProxyAccess).map(proxy -> proxy.switchTo(derp)).orElse(null); final SpongeProxyBlockAccess.Proxy ignored = maybeWorld.map(transaction::getProxy).orElse(null)){ final PrettyPrinter printer; if (PRINT_TRANSACTIONS) { printer = new PrettyPrinter(60).add("Debugging BlockTransaction").centre().hr() .addWrapped(60, "This is a process printout of the information passed along from the Proxy and the world.") .add() .add("Proxy Container:"); } else { printer = null; } if (transaction.blocksNotAffected != null) { transaction.blocksNotAffected.forEach((pos, block) -> { if (PRINT_TRANSACTIONS) { printer.addWrapped(120, " %s : %s, %s", "UnaffectedBlock", pos, block); } if (access != null) { access.proceed(pos, block, false); } }); } if (transaction.tilesAtTransaction != null) { transaction.tilesAtTransaction.forEach((pos, tile) -> { if (PRINT_TRANSACTIONS) { printer.addWrapped(120, " %s : %s, %s", "UnaffectedTile", pos, tile == null ? "null" : ((TileEntityBridge) tile).bridge$getPrettyPrinterString()); } if (access != null) { access.pushTile(pos, tile); } }); } if (PRINT_TRANSACTIONS) { if (access != null) { access.addToPrinter(printer); } transaction.addToPrinter(printer); printer.print(System.err); } transaction.process(eventTransaction, phaseState, phaseContext, currentDepth); } catch (final Exception e) { final PrettyPrinter printer = new PrettyPrinter(60).add("Exception while trying to apply transaction").centre().hr() .addWrapped(60, "BlockTransactions failing to process can lead to unintended consequences. If the exception is *directly* coming from Sponge's code, please report to Sponge.") .add(); maybeWorld.map(WorldServerBridge::bridge$getProxyAccess).ifPresent(access -> access.addToPrinter(printer)); transaction.addToPrinter(printer); printer.add(); printer .add("Exception: ") .add(e) .trace(System.err); } maybeWorld.map(WorldServerBridge::bridge$getProxyAccess).ifPresent(transaction::postProcessBlocksAffected); // Clean up final BlockTransaction next = transaction.next; transaction.next = null; transaction.previous = null; transaction = next; } } finally { if (this.processingWorlds == null) { return noCancelledTransactions; } for (final Map.Entry<WorldServer, SpongeProxyBlockAccess.Proxy> entry : this.processingWorlds.entrySet()) { try { entry.getValue().close(); } catch (final Exception e) { PhaseTracker.getInstance().printMessageWithCaughtException("Forcibly Closing Proxy", "Proxy Access could not be popped", e); } } this.processingWorlds.clear(); for (BlockTransaction transaction = this.head; transaction != null; ) { final BlockTransaction next = transaction.next; transaction.previous = null; transaction.next = null; transaction = next; } this.head = null; this.tail = null; } return noCancelledTransactions; } @SuppressWarnings("RedundantCast") void getProxyOrCreate(final WorldServerBridge mixinWorldServer) { if (this.processingWorlds == null) { this.processingWorlds = new LinkedHashMap<>(); } SpongeProxyBlockAccess.Proxy existing = this.processingWorlds.get((WorldServer) mixinWorldServer); if (existing == null) { existing = mixinWorldServer.bridge$getProxyAccess().pushProxy(); this.processingWorlds.put((WorldServer) mixinWorldServer, existing); } } @Override public int hashCode() { return Objects.hashCode(this.snapshots); } @Override public boolean equals(@Nullable final Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } final MultiBlockCaptureSupplier other = (MultiBlockCaptureSupplier) obj; return Objects.equals(this.multimap, other.multimap); } @Override public String toString() { return com.google.common.base.MoreObjects.toStringHelper(this) .add("Captured", this.snapshots == null ? 0 : this.snapshots.size()) .add("Head", this.head == null ? "null" : this.head) .toString(); } public void clearProxies() { if (this.processingWorlds == null || this.processingWorlds.isEmpty()) { return; } for (final Map.Entry<WorldServer, SpongeProxyBlockAccess.Proxy> entry : this.processingWorlds.entrySet()) { try { entry.getValue().close(); } catch (final Exception e) { PhaseTracker.getInstance().printMessageWithCaughtException("Forcibly Closing Proxy", "Proxy Access could not be popped", e); } } } public boolean hasTransactions() { return this.head != null; } public boolean hasBlocksCaptured() { return !(this.snapshots == null || this.snapshots.isEmpty()); } public void reset() { if (this.multimap != null) { // shouldn't but whatever, it's the end of a phase. this.multimap.clear(); this.multimap = null; } if (this.scheduledEvents != null) { this.scheduledEvents.clear(); } if (this.snapshots != null) { this.snapshots.clear(); this.snapshots = null; } if (this.usedBlocks != null) { this.usedBlocks.clear(); } this.clearProxies(); this.transactionIndex = -1; this.snapshotIndex = -1; if (this.head != null) { this.head = null; this.tail = null; for (BlockTransaction transaction = this.head; transaction != null; ) { final BlockTransaction next = transaction.next; transaction.previous = null; transaction.next = null; transaction = next; } } } }
Inline micro optimize some bits in MultiBlockCaptureSupplier. (#2636) Signed-off-by: Gabriel Harris-Rouquette <[email protected]>
src/main/java/org/spongepowered/common/event/tracking/context/MultiBlockCaptureSupplier.java
Inline micro optimize some bits in MultiBlockCaptureSupplier. (#2636)
Java
mit
de402da80adaec172d10baed525d2ce171cf8fa5
0
razreg/ubongo,razreg/ubongo,razreg/ubongo,razreg/ubongo,razreg/ubongo
package ubongo.common.constants; public class MachineConstants { public static final String SERVER_FALLBACK = "nova.cs.tau.ac.il"; public static final String BASE_UNIT_REQUEST = "Unit task"; public static final String KILL_TASK_REQUEST = "Kill unit task"; public static final String UPDATE_TASK_REQUEST = "Update task status"; public static String GET_MACHINE_PERFORMANCE = "Get machine performance request"; public static final String INPUT_DIR_SUFFIX = "_ubongo_in"; public static final String OUTPUT_DIR_SUFFIX = "_ubongo_out"; public static final String ARG_SERVER = "server"; public static final String ARG_DIR = "base_dir"; public static final String ARG_UNITS = "units_dir"; public static final String ARG_QUERIES_PATH = "queries"; public static final String ARG_CONFIG_PATH = "config"; }
infra/common/src/main/java/ubongo/common/constants/MachineConstants.java
package ubongo.common.constants; public class MachineConstants { public static final String SERVER_FALLBACK = "nova.cs.tau.ac.il"; public static final String BASE_UNIT_REQUEST = "Base unit task"; public static final String KILL_TASK_REQUEST = "Kill unit task"; public static final String UPDATE_TASK_REQUEST = "Update task status"; public static String GET_MACHINE_PERFORMANCE = "Get machine performance request"; public static final String INPUT_DIR_SUFFIX = "_ubongo_in"; public static final String OUTPUT_DIR_SUFFIX = "_ubongo_out"; public static final String ARG_SERVER = "server"; public static final String ARG_DIR = "base_dir"; public static final String ARG_UNITS = "units_dir"; public static final String ARG_QUERIES_PATH = "queries"; public static final String ARG_CONFIG_PATH = "config"; }
Machine changes
infra/common/src/main/java/ubongo/common/constants/MachineConstants.java
Machine changes
Java
mit
721c6d9a903e0224d80d0059b8fdcaf3cdaa353b
0
bitcoin-solutions/multibit-hd,oscarguindzberg/multibit-hd,akonring/multibit-hd-modified,akonring/multibit-hd-modified,bitcoin-solutions/multibit-hd,bitcoin-solutions/multibit-hd,akonring/multibit-hd-modified,oscarguindzberg/multibit-hd,oscarguindzberg/multibit-hd
package org.multibit.hd.ui.views.wizards.welcome; import com.google.common.base.Optional; import com.google.common.base.Strings; import net.miginfocom.swing.MigLayout; import org.multibit.hd.core.api.seed_phrase.SeedPhraseSize; import org.multibit.hd.ui.events.view.ViewEvents; import org.multibit.hd.ui.i18n.MessageKey; import org.multibit.hd.ui.views.components.*; import org.multibit.hd.ui.views.components.enter_seed_phrase.EnterSeedPhraseModel; import org.multibit.hd.ui.views.components.enter_seed_phrase.EnterSeedPhraseView; import org.multibit.hd.ui.views.components.select_file.SelectFileModel; import org.multibit.hd.ui.views.components.select_file.SelectFileView; import org.multibit.hd.ui.views.wizards.AbstractWizard; import org.multibit.hd.ui.views.wizards.AbstractWizardPanelView; import org.multibit.hd.ui.views.wizards.WizardButton; import javax.swing.*; /** * <p>Wizard to provide the following to UI:</p> * <ul> * <li>Restore wallet from backup</li> * </ul> * * @since 0.0.1 *   */ public class RestoreWalletBackupPanelView extends AbstractWizardPanelView<WelcomeWizardModel, RestoreWalletBackupPanelModel> { private ModelAndView<SelectFileModel, SelectFileView> selectFileMaV; private ModelAndView<EnterSeedPhraseModel, EnterSeedPhraseView> enterSeedPhraseMaV; /** * @param wizard The wizard managing the states * @param panelName The panel name to filter events from components */ public RestoreWalletBackupPanelView(AbstractWizard<WelcomeWizardModel> wizard, String panelName) { super(wizard.getWizardModel(), panelName, MessageKey.RESTORE_WALLET_BACKUP_TITLE); PanelDecorator.addExitCancelPreviousNext(this, wizard); } @Override public void newPanelModel() { // Component models selectFileMaV = Components.newSelectFileMaV(getPanelName()); enterSeedPhraseMaV = Components.newEnterSeedPhraseMaV(getPanelName(),false); RestoreWalletBackupPanelModel panelModel = new RestoreWalletBackupPanelModel( getPanelName(), selectFileMaV.getModel(), enterSeedPhraseMaV.getModel() ); setPanelModel(panelModel); getWizardModel().setBackupLocationSelectFileModel(selectFileMaV.getModel()); getWizardModel().setRestoreWalletEnterSeedPhraseModel(enterSeedPhraseMaV.getModel()); } @Override public JPanel newWizardViewPanel() { JPanel panel = Panels.newPanel(new MigLayout( "fillx,insets 0", // Layout constraints "[][]", // Column constraints "[][][]" // Row constraints )); panel.add(Panels.newRestoreFromBackup(), "span 2,grow,wrap"); panel.add(enterSeedPhraseMaV.getView().newComponentPanel(), "span 2,wrap"); panel.add(Labels.newSelectFolder()); panel.add(selectFileMaV.getView().newComponentPanel(), "grow,wrap"); return panel; } @Override public void updateFromComponentModels(Optional componentModel) { // Do nothing we have a direct reference // Enable the "next" button if the backup location is present and the seed phrase has a valid size boolean backupLocationPresent = !Strings.isNullOrEmpty(selectFileMaV.getModel().getValue()); boolean seedPhraseSizeValid = SeedPhraseSize.isValid(enterSeedPhraseMaV.getModel().getValue().size()); boolean result = backupLocationPresent && seedPhraseSizeValid; ViewEvents.fireWizardButtonEnabledEvent( getPanelName(), WizardButton.NEXT, result ); } }
mbhd-swing/src/main/java/org/multibit/hd/ui/views/wizards/welcome/RestoreWalletBackupPanelView.java
package org.multibit.hd.ui.views.wizards.welcome; import com.google.common.base.Optional; import com.google.common.base.Strings; import net.miginfocom.swing.MigLayout; import org.multibit.hd.core.api.seed_phrase.SeedPhraseSize; import org.multibit.hd.ui.events.view.ViewEvents; import org.multibit.hd.ui.i18n.MessageKey; import org.multibit.hd.ui.views.components.*; import org.multibit.hd.ui.views.components.enter_seed_phrase.EnterSeedPhraseModel; import org.multibit.hd.ui.views.components.enter_seed_phrase.EnterSeedPhraseView; import org.multibit.hd.ui.views.components.select_file.SelectFileModel; import org.multibit.hd.ui.views.components.select_file.SelectFileView; import org.multibit.hd.ui.views.wizards.AbstractWizard; import org.multibit.hd.ui.views.wizards.AbstractWizardPanelView; import org.multibit.hd.ui.views.wizards.WizardButton; import javax.swing.*; /** * <p>Wizard to provide the following to UI:</p> * <ul> * <li>Restore wallet from backup</li> * </ul> * * @since 0.0.1 *   */ public class RestoreWalletBackupPanelView extends AbstractWizardPanelView<WelcomeWizardModel, RestoreWalletBackupPanelModel> { private ModelAndView<SelectFileModel, SelectFileView> selectFileMaV; private ModelAndView<EnterSeedPhraseModel, EnterSeedPhraseView> enterSeedPhraseMaV; /** * @param wizard The wizard managing the states * @param panelName The panel name to filter events from components */ public RestoreWalletBackupPanelView(AbstractWizard<WelcomeWizardModel> wizard, String panelName) { super(wizard.getWizardModel(), panelName, MessageKey.RESTORE_WALLET_BACKUP_TITLE); PanelDecorator.addExitCancelPreviousNext(this, wizard); } @Override public void newPanelModel() { // Component models selectFileMaV = Components.newSelectFileMaV(getPanelName()); enterSeedPhraseMaV = Components.newEnterSeedPhraseMaV(getPanelName(),false); RestoreWalletBackupPanelModel panelModel = new RestoreWalletBackupPanelModel( getPanelName(), selectFileMaV.getModel(), enterSeedPhraseMaV.getModel() ); setPanelModel(panelModel); getWizardModel().setBackupLocationSelectFileModel(selectFileMaV.getModel()); } @Override public JPanel newWizardViewPanel() { JPanel panel = Panels.newPanel(new MigLayout( "fillx,insets 0", // Layout constraints "[][]", // Column constraints "[][][]" // Row constraints )); panel.add(Panels.newRestoreFromBackup(), "span 2,grow,wrap"); panel.add(enterSeedPhraseMaV.getView().newComponentPanel(), "span 2,wrap"); panel.add(Labels.newSelectFolder()); panel.add(selectFileMaV.getView().newComponentPanel(), "grow,wrap"); return panel; } @Override public void updateFromComponentModels(Optional componentModel) { // Do nothing we have a direct reference // Enable the "next" button if the backup location is present and the seed phrase has a valid size boolean backupLocationPresent = !Strings.isNullOrEmpty(selectFileMaV.getModel().getValue()); boolean seedPhraseSizeValid = SeedPhraseSize.isValid(enterSeedPhraseMaV.getModel().getValue().size()); boolean result = backupLocationPresent && seedPhraseSizeValid; ViewEvents.fireWizardButtonEnabledEvent( getPanelName(), WizardButton.NEXT, result ); } }
Track the seed phrase during restore from backup
mbhd-swing/src/main/java/org/multibit/hd/ui/views/wizards/welcome/RestoreWalletBackupPanelView.java
Track the seed phrase during restore from backup
Java
epl-1.0
67ee6b730bcffa1881c3d99b3aaf89d414607ec7
0
sdirix/emf2web,sdirix/emf2web,sdirix/emf2web
package org.eclipse.emf.ecp.emf2web.wizard.pages; import org.eclipse.core.resources.IFile; import org.eclipse.core.resources.IProject; import org.eclipse.core.resources.IWorkspace; import org.eclipse.core.resources.ResourcesPlugin; import org.eclipse.core.runtime.IPath; import org.eclipse.core.runtime.Path; import org.eclipse.emf.ecp.emf2web.wizard.ViewModelExportWizard; import org.eclipse.jface.fieldassist.ControlDecoration; import org.eclipse.jface.fieldassist.FieldDecoration; import org.eclipse.jface.fieldassist.FieldDecorationRegistry; import org.eclipse.jface.viewers.Viewer; import org.eclipse.jface.viewers.ViewerFilter; import org.eclipse.jface.wizard.IWizard; import org.eclipse.jface.wizard.WizardPage; import org.eclipse.swt.SWT; import org.eclipse.swt.events.ModifyEvent; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.graphics.Image; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Combo; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Group; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Text; import org.eclipse.ui.dialogs.ElementTreeSelectionDialog; import org.eclipse.ui.model.BaseWorkbenchContentProvider; import org.eclipse.ui.model.WorkbenchLabelProvider; public class ModelPathsPage extends WizardPage { private Text ecoremodelText; private Text genmodelText; private Button ecoreBrowse; private Button genBrowse; private IFile ecoreModel; private IFile genModel; private ControlDecoration ecoreControlDecoration; private ControlDecoration genControlDecoration; private Button projectSettingsButton; private Combo combo; private Group grpProjectSettings; private Text projectSettingsText; private Label projectSettingsLabel; private boolean createNewProject = false; private IProject selectedProject = null; private String projectName = ""; private ControlDecoration projectControlDecoration; /** * Create the wizard. */ public ModelPathsPage(IFile ecoreModel, IFile genModel) { super("wizardPage"); setTitle("ECP Model Exporter"); setDescription("Select the models and the project to export to"); this.ecoreModel = ecoreModel; this.genModel = genModel; } public boolean getCreateNewProject() { return createNewProject; } public IProject getSelectedProject() { return selectedProject; } public String getProjectName() { return projectName; } /** * Create contents of the wizard. * * @param parent */ public void createControl(Composite parent) { Composite container = new Composite(parent, SWT.NULL); FieldDecoration errorFieldDecoration = FieldDecorationRegistry .getDefault().getFieldDecoration( FieldDecorationRegistry.DEC_ERROR); Image errorImage = errorFieldDecoration.getImage(); FieldDecoration warningFieldDecoration = FieldDecorationRegistry .getDefault().getFieldDecoration( FieldDecorationRegistry.DEC_WARNING); Image warningImage = warningFieldDecoration.getImage(); setControl(container); container.setLayout(new GridLayout(2, false)); Label lblEmfEcoreModel = new Label(container, SWT.NONE); lblEmfEcoreModel.setText("EMF Ecore Model"); new Label(container, SWT.NONE); ecoremodelText = new Text(container, SWT.BORDER); ecoremodelText.addModifyListener(new EcoremodelTextModifyListener()); ecoremodelText.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, true, false, 1, 1)); ecoreControlDecoration = new ControlDecoration(ecoremodelText, SWT.LEFT | SWT.TOP); ecoreControlDecoration.setDescriptionText("Please enter a valid file"); ecoreControlDecoration.setImage(errorImage); ecoreControlDecoration.hide(); ecoreBrowse = new Button(container, SWT.NONE); ecoreBrowse.addSelectionListener(new EcoreBrowseSelectionListener()); ecoreBrowse.setText("Browse"); Label lblEmfEcoreGen = new Label(container, SWT.NONE); lblEmfEcoreGen.setText("EMF Ecore Gen Model (optional)"); new Label(container, SWT.NONE); genmodelText = new Text(container, SWT.BORDER); genmodelText.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, true, false, 1, 1)); genControlDecoration = new ControlDecoration(genmodelText, SWT.LEFT | SWT.TOP); genControlDecoration.setDescriptionText("Please enter a valid file"); genControlDecoration.setImage(errorImage); genControlDecoration.hide(); genBrowse = new Button(container, SWT.NONE); genBrowse.addSelectionListener(new EcoreBrowseSelectionListener()); genBrowse.setText("Browse"); new Label(container, SWT.NONE); new Label(container, SWT.NONE); grpProjectSettings = new Group(container, SWT.NONE); grpProjectSettings.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, false, false, 2, 1)); grpProjectSettings.setText("Project Settings"); grpProjectSettings.setLayout(new GridLayout(2, false)); Composite composite = new Composite(grpProjectSettings, SWT.NONE); composite.setLayoutData(new GridData(SWT.LEFT, SWT.CENTER, false, false, 2, 1)); composite.setBounds(0, 0, 64, 64); composite.setLayout(new GridLayout(2, false)); Label lblNewLabel = new Label(composite, SWT.NONE); lblNewLabel.setText("Action:"); combo = new Combo(composite, SWT.NONE); combo.addSelectionListener(new ComboSelectionListener()); combo.setItems(new String[] { "Update existing Project", "Create new Project" }); combo.select(0); new Label(grpProjectSettings, SWT.NONE); new Label(grpProjectSettings, SWT.NONE); projectSettingsLabel = new Label(grpProjectSettings, SWT.NONE); projectSettingsLabel.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, false, false, 1, 1)); projectSettingsLabel.setBounds(0, 0, 55, 15); projectSettingsLabel.setText("Select Project"); new Label(grpProjectSettings, SWT.NONE); projectSettingsText = new Text(grpProjectSettings, SWT.BORDER); projectSettingsText .addModifyListener(new ProjectSettingsTextModifyListener()); projectSettingsText.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, true, false, 1, 1)); projectControlDecoration = new ControlDecoration(projectSettingsText, SWT.LEFT | SWT.TOP); projectControlDecoration .setDescriptionText("Please enter a valid project name"); projectControlDecoration.setImage(errorImage); projectControlDecoration.hide(); projectSettingsButton = new Button(grpProjectSettings, SWT.NONE); projectSettingsButton.setSize(50, 25); projectSettingsButton .addSelectionListener(new BtnNewButtonSelectionListener()); projectSettingsButton.setText("Browse"); new Label(grpProjectSettings, SWT.NONE); new Label(grpProjectSettings, SWT.NONE); new Label(grpProjectSettings, SWT.NONE); new Label(grpProjectSettings, SWT.NONE); new Label(grpProjectSettings, SWT.NONE); new Label(grpProjectSettings, SWT.NONE); new Label(grpProjectSettings, SWT.NONE); new Label(grpProjectSettings, SWT.NONE); Label lblNewLabel_1 = new Label(grpProjectSettings, SWT.NONE); new Label(grpProjectSettings, SWT.NONE); init(); } private void init() { if (ecoreModel != null) { ecoremodelText.setText(ecoreModel.getFullPath().toString()); } if (genModel != null) { genmodelText.setText(ecoreModel.getFullPath().toString()); } checkForPageCompletion(); } private void checkForPageCompletion() { boolean pageComplete = true; String message = null; if (!createNewProject) { if (selectedProject == null) { projectControlDecoration.show(); pageComplete = false; } else { projectControlDecoration.hide(); } } else { if (selectedProject != null) { projectControlDecoration.show(); pageComplete = false; }else{ projectControlDecoration.hide(); } } if (ecoreModel == null) { pageComplete = false; } else if (!ecoreModel.exists()) { ecoreControlDecoration.show(); message = "Please enter a valid ecore file"; pageComplete = false; } else { ecoreControlDecoration.hide(); setMessage(null); } if (genModel == null) { // do nothing } else if (!genModel.exists()) { genControlDecoration.show(); } else { genControlDecoration.hide(); } if (isPageComplete() != pageComplete) { setPageComplete(pageComplete); } if (getMessage() == null || !getMessage().equals(message)) { setMessage(message); } } private class EcoreBrowseSelectionListener extends SelectionAdapter { @Override public void widgetSelected(SelectionEvent e) { ElementTreeSelectionDialog dialog = new ElementTreeSelectionDialog( getShell(), new WorkbenchLabelProvider(), new BaseWorkbenchContentProvider()); final String modelType; final String modelExtension; final Text modelText; if (e.getSource() == ecoreBrowse) { modelType = "Ecore Model"; modelExtension = ".ecore"; modelText = ecoremodelText; } else { modelType = "Ecore Gen Model"; modelExtension = ".genmodel"; modelText = genmodelText; } dialog.setTitle(modelType + " Selection"); dialog.setMessage("Select a " + modelType + " from the workspace"); dialog.addFilter(new ViewerFilter() { @Override public boolean select(Viewer viewer, Object parentElement, Object element) { if (element instanceof IFile) { IFile file = (IFile) element; return file.getName().endsWith(modelExtension); } return true; } }); dialog.setInput(ResourcesPlugin.getWorkspace().getRoot()); if (dialog.open() == ElementTreeSelectionDialog.OK) { Object result = dialog.getFirstResult(); if (result instanceof IFile) { IFile file = (IFile) result; modelText.setText(file.getFullPath().toString()); } } } } private class EcoremodelTextModifyListener implements ModifyListener { public void modifyText(ModifyEvent e) { String text = ((Text) e.getSource()).getText(); IPath path = new Path(text); IWorkspace workspace = ResourcesPlugin.getWorkspace(); IFile file = workspace.getRoot().getFile(path); if (e.getSource() == ecoremodelText) { ecoreModel = file; if (getExportWizard() != null && ecoreModel.exists()) { getExportWizard().setEcoreModel(ecoreModel); } } else { genModel = file; if (getExportWizard() != null && genModel.exists()) { getExportWizard().setGenModel(genModel); } } checkForPageCompletion(); } } private class BtnNewButtonSelectionListener extends SelectionAdapter { @Override public void widgetSelected(SelectionEvent e) { ElementTreeSelectionDialog dialog = new ElementTreeSelectionDialog( getShell(), new WorkbenchLabelProvider(), new BaseWorkbenchContentProvider()); dialog.setTitle("Select Play Application"); dialog.setMessage("Select your Play Application Project"); dialog.addFilter(new ViewerFilter() { @Override public boolean select(Viewer viewer, Object parentElement, Object element) { if (element instanceof IProject) { return true; } return false; } }); dialog.setInput(ResourcesPlugin.getWorkspace().getRoot()); if (dialog.open() == ElementTreeSelectionDialog.OK) { Object result = dialog.getFirstResult(); if (result instanceof IProject) { IProject project = (IProject) result; selectedProject = project; projectSettingsText.setText(project.getName()); } } } } private class ComboSelectionListener extends SelectionAdapter { @Override public void widgetSelected(SelectionEvent e) { if (combo.getSelectionIndex() == 0) { projectSettingsLabel.setText("Select Project"); createNewProject = false; if (selectedProject != null) { projectSettingsText.setText(selectedProject.getName()); } projectSettingsButton.setEnabled(true); } else if (combo.getSelectionIndex() == 1) { projectSettingsLabel.setText("Enter new Project Name"); createNewProject = true; projectSettingsButton.setEnabled(false); } checkForPageCompletion(); } } private class ProjectSettingsTextModifyListener implements ModifyListener { public void modifyText(ModifyEvent e) { IWorkspace workspace = ResourcesPlugin.getWorkspace(); boolean found = false; for (IProject project : workspace.getRoot().getProjects()) { String searchName = projectSettingsText.getText() != null ? projectSettingsText .getText().trim() : ""; if (searchName.equals(project.getName())) { selectedProject = project; found = true; } } if (!found) { selectedProject = null; } projectName = projectSettingsText.getText(); checkForPageCompletion(); } } private ViewModelExportWizard getExportWizard() { IWizard wizard = getWizard(); if (wizard instanceof ViewModelExportWizard) { return (ViewModelExportWizard) wizard; } else { return null; } } }
org.eclipse.emf.ecp.emf2web/src/org/eclipse/emf/ecp/emf2web/wizard/pages/ModelPathsPage.java
package org.eclipse.emf.ecp.emf2web.wizard.pages; import java.io.File; import org.eclipse.core.resources.IFile; import org.eclipse.core.resources.IProject; import org.eclipse.core.resources.IWorkspace; import org.eclipse.core.resources.ResourcesPlugin; import org.eclipse.core.runtime.IPath; import org.eclipse.core.runtime.Path; import org.eclipse.emf.ecp.emf2web.wizard.ViewModelExportWizard; import org.eclipse.jface.fieldassist.ControlDecoration; import org.eclipse.jface.fieldassist.FieldDecoration; import org.eclipse.jface.fieldassist.FieldDecorationRegistry; import org.eclipse.jface.viewers.Viewer; import org.eclipse.jface.viewers.ViewerFilter; import org.eclipse.jface.wizard.IWizard; import org.eclipse.jface.wizard.WizardPage; import org.eclipse.swt.SWT; import org.eclipse.swt.events.ModifyEvent; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.graphics.Image; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Text; import org.eclipse.ui.dialogs.ElementTreeSelectionDialog; import org.eclipse.ui.model.BaseWorkbenchContentProvider; import org.eclipse.ui.model.WorkbenchLabelProvider; import org.eclipse.swt.widgets.TabFolder; import org.eclipse.swt.widgets.TabItem; import org.eclipse.swt.widgets.Group; import org.eclipse.swt.layout.FillLayout; import org.eclipse.swt.widgets.Combo; public class ModelPathsPage extends WizardPage { private Text ecoremodelText; private Text genmodelText; private Button ecoreBrowse; private Button genBrowse; private IFile ecoreModel; private IFile genModel; private ControlDecoration ecoreControlDecoration; private ControlDecoration genControlDecoration; private Button projectSettingsButton; private Combo combo; private Group grpProjectSettings; private Text projectSettingsText; private Label projectSettingsLabel; private boolean createNewProject = false; private IProject selectedProject = null; private String projectName = ""; private ControlDecoration projectControlDecoration; /** * Create the wizard. */ public ModelPathsPage(IFile ecoreModel, IFile genModel) { super("wizardPage"); setTitle("ECP Model Exporter"); setDescription("Select the models and the project to export to"); this.ecoreModel = ecoreModel; this.genModel = genModel; } public boolean getCreateNewProject() { return createNewProject; } public IProject getSelectedProject() { return selectedProject; } public String getProjectName() { return projectName; } /** * Create contents of the wizard. * * @param parent */ public void createControl(Composite parent) { Composite container = new Composite(parent, SWT.NULL); FieldDecoration errorFieldDecoration = FieldDecorationRegistry .getDefault().getFieldDecoration( FieldDecorationRegistry.DEC_ERROR); Image errorImage = errorFieldDecoration.getImage(); FieldDecoration warningFieldDecoration = FieldDecorationRegistry .getDefault().getFieldDecoration( FieldDecorationRegistry.DEC_WARNING); Image warningImage = warningFieldDecoration.getImage(); setControl(container); container.setLayout(new GridLayout(2, false)); Label lblEmfEcoreModel = new Label(container, SWT.NONE); lblEmfEcoreModel.setText("EMF Ecore Model"); new Label(container, SWT.NONE); ecoremodelText = new Text(container, SWT.BORDER); ecoremodelText.addModifyListener(new EcoremodelTextModifyListener()); ecoremodelText.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, true, false, 1, 1)); ecoreControlDecoration = new ControlDecoration(ecoremodelText, SWT.LEFT | SWT.TOP); ecoreControlDecoration.setDescriptionText("Please enter a valid file"); ecoreControlDecoration.setImage(errorImage); ecoreControlDecoration.hide(); ecoreBrowse = new Button(container, SWT.NONE); ecoreBrowse.addSelectionListener(new EcoreBrowseSelectionListener()); ecoreBrowse.setText("Browse"); Label lblEmfEcoreGen = new Label(container, SWT.NONE); lblEmfEcoreGen.setText("EMF Ecore Gen Model (optional)"); new Label(container, SWT.NONE); genmodelText = new Text(container, SWT.BORDER); genmodelText.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, true, false, 1, 1)); genControlDecoration = new ControlDecoration(genmodelText, SWT.LEFT | SWT.TOP); genControlDecoration.setDescriptionText("Please enter a valid file"); genControlDecoration.setImage(errorImage); genControlDecoration.hide(); genBrowse = new Button(container, SWT.NONE); genBrowse.addSelectionListener(new EcoreBrowseSelectionListener()); genBrowse.setText("Browse"); new Label(container, SWT.NONE); new Label(container, SWT.NONE); grpProjectSettings = new Group(container, SWT.NONE); grpProjectSettings.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, false, false, 2, 1)); grpProjectSettings.setText("Project Settings"); grpProjectSettings.setLayout(new GridLayout(2, false)); Composite composite = new Composite(grpProjectSettings, SWT.NONE); composite.setLayoutData(new GridData(SWT.LEFT, SWT.CENTER, false, false, 2, 1)); composite.setBounds(0, 0, 64, 64); composite.setLayout(new GridLayout(2, false)); Label lblNewLabel = new Label(composite, SWT.NONE); lblNewLabel.setText("Action:"); combo = new Combo(composite, SWT.NONE); combo.addSelectionListener(new ComboSelectionListener()); combo.setItems(new String[] { "Update existing Project", "Create new Project" }); combo.select(0); new Label(grpProjectSettings, SWT.NONE); new Label(grpProjectSettings, SWT.NONE); projectSettingsLabel = new Label(grpProjectSettings, SWT.NONE); projectSettingsLabel.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, false, false, 1, 1)); projectSettingsLabel.setBounds(0, 0, 55, 15); projectSettingsLabel.setText("Select Project"); new Label(grpProjectSettings, SWT.NONE); projectSettingsText = new Text(grpProjectSettings, SWT.BORDER); projectSettingsText .addModifyListener(new ProjectSettingsTextModifyListener()); projectSettingsText.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, true, false, 1, 1)); projectControlDecoration = new ControlDecoration(projectSettingsText, SWT.LEFT | SWT.TOP); projectControlDecoration .setDescriptionText("Please enter a valid project name"); projectControlDecoration.setImage(errorImage); projectControlDecoration.hide(); projectSettingsButton = new Button(grpProjectSettings, SWT.NONE); projectSettingsButton.setSize(50, 25); projectSettingsButton .addSelectionListener(new BtnNewButtonSelectionListener()); projectSettingsButton.setText("Browse"); new Label(grpProjectSettings, SWT.NONE); new Label(grpProjectSettings, SWT.NONE); new Label(grpProjectSettings, SWT.NONE); new Label(grpProjectSettings, SWT.NONE); new Label(grpProjectSettings, SWT.NONE); new Label(grpProjectSettings, SWT.NONE); new Label(grpProjectSettings, SWT.NONE); new Label(grpProjectSettings, SWT.NONE); Label lblNewLabel_1 = new Label(grpProjectSettings, SWT.NONE); new Label(grpProjectSettings, SWT.NONE); init(); } private void init() { if (ecoreModel != null) { ecoremodelText.setText(ecoreModel.getFullPath().toString()); } if (genModel != null) { genmodelText.setText(ecoreModel.getFullPath().toString()); } checkForPageCompletion(); } private void checkForPageCompletion() { boolean pageComplete = true; String message = null; if (!createNewProject) { if (selectedProject == null) { projectControlDecoration.show(); pageComplete = false; } else { projectControlDecoration.hide(); } } else { if (selectedProject != null) { projectControlDecoration.show(); pageComplete = false; }else{ projectControlDecoration.hide(); } } if (ecoreModel == null) { pageComplete = false; } else if (!ecoreModel.exists()) { ecoreControlDecoration.show(); message = "Please enter a valid ecore file"; pageComplete = false; } else { ecoreControlDecoration.hide(); setMessage(null); } if (genModel == null) { // do nothing } else if (!genModel.exists()) { genControlDecoration.show(); } else { genControlDecoration.hide(); } if (isPageComplete() != pageComplete) { setPageComplete(pageComplete); } if (getMessage() == null || !getMessage().equals(message)) { setMessage(message); } } private class EcoreBrowseSelectionListener extends SelectionAdapter { @Override public void widgetSelected(SelectionEvent e) { ElementTreeSelectionDialog dialog = new ElementTreeSelectionDialog( getShell(), new WorkbenchLabelProvider(), new BaseWorkbenchContentProvider()); final String modelType; final String modelExtension; final Text modelText; if (e.getSource() == ecoreBrowse) { modelType = "Ecore Model"; modelExtension = ".ecore"; modelText = ecoremodelText; } else { modelType = "Ecore Gen Model"; modelExtension = ".genmodel"; modelText = genmodelText; } dialog.setTitle(modelType + " Selection"); dialog.setMessage("Select a " + modelType + " from the workspace"); dialog.addFilter(new ViewerFilter() { @Override public boolean select(Viewer viewer, Object parentElement, Object element) { if (element instanceof IFile) { IFile file = (IFile) element; return file.getName().endsWith(modelExtension); } return true; } }); dialog.setInput(ResourcesPlugin.getWorkspace().getRoot()); if (dialog.open() == ElementTreeSelectionDialog.OK) { Object result = dialog.getFirstResult(); if (result instanceof IFile) { IFile file = (IFile) result; modelText.setText(file.getFullPath().toString()); } } } } private class EcoremodelTextModifyListener implements ModifyListener { public void modifyText(ModifyEvent e) { String text = ((Text) e.getSource()).getText(); IPath path = new Path(text); IWorkspace workspace = ResourcesPlugin.getWorkspace(); IFile file = workspace.getRoot().getFile(path); if (e.getSource() == ecoremodelText) { ecoreModel = file; if (getExportWizard() != null && ecoreModel.exists()) { getExportWizard().setEcoreModel(ecoreModel); } } else { genModel = file; if (getExportWizard() != null && genModel.exists()) { getExportWizard().setGenModel(genModel); } } checkForPageCompletion(); } } private class BtnNewButtonSelectionListener extends SelectionAdapter { @Override public void widgetSelected(SelectionEvent e) { ElementTreeSelectionDialog dialog = new ElementTreeSelectionDialog( getShell(), new WorkbenchLabelProvider(), new BaseWorkbenchContentProvider()); dialog.setTitle("Select Play Application"); dialog.setMessage("Select your Play Application Project"); dialog.addFilter(new ViewerFilter() { @Override public boolean select(Viewer viewer, Object parentElement, Object element) { if (element instanceof IProject) { return true; } return false; } }); dialog.setInput(ResourcesPlugin.getWorkspace().getRoot()); if (dialog.open() == ElementTreeSelectionDialog.OK) { Object result = dialog.getFirstResult(); if (result instanceof IProject) { IProject project = (IProject) result; selectedProject = project; projectSettingsText.setText(project.getName()); } } } } private class ComboSelectionListener extends SelectionAdapter { @Override public void widgetSelected(SelectionEvent e) { if (combo.getSelectionIndex() == 0) { projectSettingsLabel.setText("Select Project"); createNewProject = false; if (selectedProject != null) { projectSettingsText.setText(selectedProject.getName()); } projectSettingsButton.setEnabled(true); } else if (combo.getSelectionIndex() == 1) { projectSettingsLabel.setText("Enter new Project Name"); createNewProject = true; projectSettingsButton.setEnabled(false); } } } private class ProjectSettingsTextModifyListener implements ModifyListener { public void modifyText(ModifyEvent e) { IWorkspace workspace = ResourcesPlugin.getWorkspace(); boolean found = false; for (IProject project : workspace.getRoot().getProjects()) { String searchName = projectSettingsText.getText() != null ? projectSettingsText .getText().trim() : ""; if (searchName.equals(project.getName())) { selectedProject = project; found = true; } } if (!found) { selectedProject = null; } projectName = projectSettingsText.getText(); checkForPageCompletion(); } } private ViewModelExportWizard getExportWizard() { IWizard wizard = getWizard(); if (wizard instanceof ViewModelExportWizard) { return (ViewModelExportWizard) wizard; } else { return null; } } }
Validate Page after selecting Item in DropDown of ModelPathsPage
org.eclipse.emf.ecp.emf2web/src/org/eclipse/emf/ecp/emf2web/wizard/pages/ModelPathsPage.java
Validate Page after selecting Item in DropDown of ModelPathsPage
Java
agpl-3.0
3f117bb38ae3f2d8f32721ee6e0f03839c937d3a
0
duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test
461c6a9e-2e61-11e5-9284-b827eb9e62be
hello.java
4616e4a2-2e61-11e5-9284-b827eb9e62be
461c6a9e-2e61-11e5-9284-b827eb9e62be
hello.java
461c6a9e-2e61-11e5-9284-b827eb9e62be
Java
agpl-3.0
9cbe2bee845506ed9a040317dcb13060341e0e81
0
Tanaguru/Tanaguru,dzc34/Asqatasun,medsob/Tanaguru,dzc34/Asqatasun,Tanaguru/Tanaguru,Asqatasun/Asqatasun,dzc34/Asqatasun,Asqatasun/Asqatasun,medsob/Tanaguru,Tanaguru/Tanaguru,medsob/Tanaguru,Asqatasun/Asqatasun,Asqatasun/Asqatasun,dzc34/Asqatasun,Asqatasun/Asqatasun,dzc34/Asqatasun,Tanaguru/Tanaguru,medsob/Tanaguru
/* * Tanaguru - Automated webpage assessment * Copyright (C) 2008-2011 Open-S Company * * This file is part of Tanaguru. * * Tanaguru is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * * Contact us by mail: open-s AT open-s DOT com */ package org.opens.tanaguru.service.command; import java.io.UnsupportedEncodingException; import java.security.NoSuchAlgorithmException; import java.util.*; import org.apache.commons.httpclient.HttpStatus; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import org.opens.tanaguru.contentadapter.AdaptationListener; import org.opens.tanaguru.entity.audit.*; import org.opens.tanaguru.entity.parameterization.Parameter; import org.opens.tanaguru.entity.reference.Test; import org.opens.tanaguru.entity.service.audit.AuditDataService; import org.opens.tanaguru.entity.service.audit.ContentDataService; import org.opens.tanaguru.entity.service.audit.ProcessResultDataService; import org.opens.tanaguru.entity.service.parameterization.ParameterDataService; import org.opens.tanaguru.entity.service.reference.TestDataService; import org.opens.tanaguru.entity.service.subject.WebResourceDataService; import org.opens.tanaguru.entity.subject.Page; import org.opens.tanaguru.entity.subject.Site; import org.opens.tanaguru.entity.subject.WebResource; import org.opens.tanaguru.service.*; import org.opens.tanaguru.util.MD5Encoder; /** * * @author jkowalczyk */ public abstract class AuditCommandImpl implements AuditCommand { private static final Logger LOGGER = Logger.getLogger(AuditCommandImpl.class); public static final String AUDIT_STATUS_IS_LOGGER_STR = "Audit status is"; public static final String WHILE_LOGGER_STR = " while"; public static final String WAS_REQUIRED_LOGGER_STR = " was required"; public static final String TO_LOGGER_STR = " to "; public static final String MS_LOGGER_STR = " ms "; public static final String SSP_TOOK_LOGGER_STR = " SSP took "; public static final String CONSOLIDATING_TOOK_LOGGER_STR = "Consolidating took "; public static final int DEFAULT_ANALYSE_TREATMENT_WINDOW = 10; public static final int DEFAULT_PROCESSING_TREATMENT_WINDOW = 4; public static final int DEFAULT_ADAPTATION_TREATMENT_WINDOW = 4; public static final int DEFAULT_CONSOLIDATION_TREATMENT_WINDOW = 200; private int adaptationTreatmentWindow = DEFAULT_ADAPTATION_TREATMENT_WINDOW; public void setAdaptationTreatmentWindow(int adaptationTreatmentWindow) { this.adaptationTreatmentWindow = adaptationTreatmentWindow; } private int analyseTreatmentWindow = DEFAULT_ANALYSE_TREATMENT_WINDOW; public void setAnalyseTreatmentWindow(int analyseTreatmentWindow) { this.analyseTreatmentWindow = analyseTreatmentWindow; } private int consolidationTreatmentWindow = DEFAULT_CONSOLIDATION_TREATMENT_WINDOW; public void setConsolidationTreatmentWindow(int consolidationTreatmentWindow) { this.consolidationTreatmentWindow = consolidationTreatmentWindow; } private int processingTreatmentWindow = DEFAULT_PROCESSING_TREATMENT_WINDOW; public void setProcessingTreatmentWindow(int processingTreatmentWindow) { this.processingTreatmentWindow = processingTreatmentWindow; } private Audit audit; @Override public Audit getAudit() { return audit; } @Override public void setAudit(Audit audit) { this.audit = audit; } // The dataServices private AuditDataService auditDataService; public AuditDataService getAuditDataService() { return auditDataService; } private TestDataService testDataService; public TestDataService getTestDataService() { return testDataService; } private ParameterDataService parameterDataService; public ParameterDataService getParameterDataService() { return parameterDataService; } private WebResourceDataService webResourceDataService; public WebResourceDataService getWebResourceDataService() { return webResourceDataService; } private ContentDataService contentDataService; public ContentDataService getContentDataService() { return contentDataService; } private ProcessResultDataService processResultDataService; public ProcessResultDataService getProcessResultDataService() { return processResultDataService; } // The services private ContentAdapterService contentAdapterService; public ContentAdapterService getContentAdapterService() { return contentAdapterService; } private ProcessorService processorService; public ProcessorService getProcessorService() { return processorService; } private ConsolidatorService consolidatorService; public ConsolidatorService getConsolidatorService() { return consolidatorService; } private AnalyserService analyserService; public AnalyserService getAnalyserService() { return analyserService; } // The listeners private AdaptationListener adaptationListener; public AdaptationListener getAdaptationListener() { return adaptationListener; } /** * * @param paramSet * @param auditDataService * @param testDataService * @param parameterDataService * @param webResourceDataService * @param contentList * @param processResultDataService * @param contentAdapterService * @param processorService * @param consolidatorService * @param analyserService * @param adaptationListener */ public AuditCommandImpl( Set<Parameter> paramSet, AuditDataService auditDataService, TestDataService testDataService, ParameterDataService parameterDataService, WebResourceDataService webResourceDataService, ContentDataService contentDataService, ProcessResultDataService processResultDataService, ContentAdapterService contentAdapterService, ProcessorService processorService, ConsolidatorService consolidatorService, AnalyserService analyserService, AdaptationListener adaptationListener, int adaptationTreatmentWindow, int processingTreatmentWindow, int consolidationTreatmentWindow, int analysisTreatmentWindow) { this.auditDataService = auditDataService; this.testDataService = testDataService; this.parameterDataService = parameterDataService; this.webResourceDataService = webResourceDataService; this.contentDataService = contentDataService; this.processResultDataService = processResultDataService; this.contentAdapterService = contentAdapterService; this.processorService = processorService; this.consolidatorService = consolidatorService; this.analyserService = analyserService; this.adaptationListener = adaptationListener; this.adaptationTreatmentWindow = adaptationTreatmentWindow; this.processingTreatmentWindow = processingTreatmentWindow; initialiseAudit(paramSet); } /** * * @param paramSet * @return */ private Audit initialiseAudit (Set<Parameter> paramSet) { Logger.getLogger(this.getClass()).info("initializeAudit"); // the paramSet has to be persisted parameterDataService.saveOrUpdate(paramSet); audit = auditDataService.create(); audit.setTestList(testDataService.getTestListFromParamSet(paramSet)); audit.setParameterSet(paramSet); setStatusToAudit(AuditStatus.INITIALISATION); return audit; } @Override public void adaptContent() { audit = auditDataService.getAuditWithWebResource(audit.getId()); if (!audit.getStatus().equals(AuditStatus.CONTENT_ADAPTING)) { LOGGER.warn( new StringBuilder(AUDIT_STATUS_IS_LOGGER_STR) .append(audit.getStatus()) .append(WHILE_LOGGER_STR) .append(AuditStatus.CONTENT_ADAPTING) .append(WAS_REQUIRED_LOGGER_STR).toString()); return; } // debug tools Date beginProcessDate = null; Date endRetrieveDate = null; Date endProcessDate = null; Date endPersistDate; Long persistenceDuration = Long.valueOf(0); boolean hasCorrectDOM = false; Long i = Long.valueOf(0); Long webResourceId = audit.getSubject().getId(); Long nbOfContent = contentDataService.getNumberOfSSPFromWebResource(audit.getSubject(), HttpStatus.SC_OK); // Some actions have to be realized when the adaptation starts if (adaptationListener != null) { adaptationListener.adaptationStarted(audit); } while (i.compareTo(nbOfContent) < 0) { LOGGER.info( new StringBuilder("Adapting ssp from ") .append(i) .append(TO_LOGGER_STR) .append(i + adaptationTreatmentWindow) .append(" for ") .append(audit.getSubject().getURL()).toString()); if (LOGGER.isDebugEnabled()) { beginProcessDate = Calendar.getInstance().getTime(); } List<Content> contentList = retrieveContentList( webResourceId, i, adaptationTreatmentWindow, beginProcessDate, false, true); if (LOGGER.isDebugEnabled()) { endRetrieveDate = Calendar.getInstance().getTime(); } Set<Content> contentSet = new HashSet<Content>(); contentSet.addAll(contentAdapterService.adaptContent(contentList)); if (LOGGER.isDebugEnabled()) { endProcessDate = Calendar.getInstance().getTime(); LOGGER.debug( new StringBuilder("Adapting ") .append(contentList.size()) .append(SSP_TOOK_LOGGER_STR) .append(endProcessDate.getTime() - endRetrieveDate.getTime()) .append(MS_LOGGER_STR) .append(contentSet.size()).toString()); } hasCorrectDOM = hasCorrectDOM || hasContentSetAtLeastOneCorrectDOM(contentSet); this.encodeSourceAndPersistContentList(contentSet); if (LOGGER.isDebugEnabled()) { endPersistDate = Calendar.getInstance().getTime(); LOGGER.debug( new StringBuilder("Persisting ") .append(contentSet.size()) .append(SSP_TOOK_LOGGER_STR) .append(endPersistDate.getTime() - endProcessDate.getTime()) .append(MS_LOGGER_STR) .append("for ") .append(audit.getSubject().getURL()).toString()); persistenceDuration = persistenceDuration + (endPersistDate.getTime() - endProcessDate.getTime()); } i = i + adaptationTreatmentWindow; // explicit call of the Gc System.gc(); } LOGGER.debug(new StringBuilder("Application spent ") .append(persistenceDuration) .append(" ms to write in Disk while adapting").toString()); if (hasCorrectDOM) { setStatusToAudit(AuditStatus.PROCESSING); } else { Logger.getLogger(AuditServiceImpl.class).warn("Audit has no corrected DOM"); setStatusToAudit(AuditStatus.ERROR); } // Some actions have to be realized when the adaptation is completed if (adaptationListener != null) { adaptationListener.adaptationCompleted(audit); } } /** * This method retrieves a list of content for a given webResource * from a startValue regarding the ADAPTATION_TREATMENT_WINDOW * * @param webResourceId * @param startValue * @return */ private List<Content> retrieveContentList( Long webResourceId, Long startValue, int windowSize, Date beginProcessDate, boolean getContentWithRelatedContent, boolean getContentWithNullDom) { List<Content> contentList = new ArrayList<Content>(); // First we retrieve a list of Ids List<Long> contentIdList = contentDataService.getSSPFromWebResource( webResourceId, HttpStatus.SC_OK, startValue.intValue(), windowSize); // we retrieve each content from its ID and add it to the contentList // that will be returned for (Long id : contentIdList) { Content content; if (getContentWithRelatedContent) { content = contentDataService.readWithRelatedContent(id, true); } else { content = contentDataService.read(id); } if (content != null && ( getContentWithNullDom || (!getContentWithNullDom && content instanceof SSP && StringUtils.isNotEmpty(((SSP)content).getDOM())))) { contentList.add(content); } } if (LOGGER.isDebugEnabled()) { long length = 0; int nbOfResources = 0; for (Content content : contentList) { if (((SSP) content).getSource() != null) { length += ((SSP) content).getDOM().length(); if (getContentWithRelatedContent) { nbOfResources += ((SSP) content).getRelatedContentSet().size(); } } } StringBuilder debugMessage = new StringBuilder("Retrieving ") .append(contentList.size()) .append(SSP_TOOK_LOGGER_STR) .append(Calendar.getInstance().getTime().getTime() - beginProcessDate.getTime()) .append(" ms and working on ") .append(length) .append(" characters"); if (getContentWithRelatedContent) { debugMessage.append(" and "); debugMessage.append(nbOfResources); debugMessage.append(" relatedContent "); } LOGGER.debug(debugMessage.toString()); } return contentList; } /** * * @param contentSet * @return */ private boolean hasContentSetAtLeastOneCorrectDOM(Set<Content> contentSet) { for (Content content : contentSet) { // if one SSP with not empty DOM is encountered, we return true if (content instanceof SSP && !((SSP) content).getDOM().isEmpty()) { return true; } } return false; } /** * Encode Source code and persist the content list * * @param contentSet */ private void encodeSourceAndPersistContentList(Set<Content> contentSet) { for (Content content : contentSet) { if (content instanceof SSP && !((SSP) content).getDOM().isEmpty()) { try { ((SSP) content).setSource(MD5Encoder.MD5(((SSP) content).getSource())); } catch (NoSuchAlgorithmException ex) { LOGGER.warn(ex); } catch (UnsupportedEncodingException ex) { LOGGER.warn(ex); } } contentDataService.saveOrUpdate(content); } } @Override public void process() { audit = auditDataService.getAuditWithTest(audit.getId()); if (!audit.getStatus().equals(AuditStatus.PROCESSING)) { LOGGER.warn( new StringBuilder(AUDIT_STATUS_IS_LOGGER_STR) .append(audit.getStatus()) .append(WHILE_LOGGER_STR) .append(AuditStatus.PROCESSING) .append(WAS_REQUIRED_LOGGER_STR).toString()); return; } // debug tools Date beginProcessDate = null; Date endProcessDate = null; Date endPersistDate; Long persistenceDuration = Long.valueOf(0); Long i = Long.valueOf(0); Long webResourceId = audit.getSubject().getId(); Long nbOfContent = contentDataService.getNumberOfSSPFromWebResource(audit.getSubject(), HttpStatus.SC_OK); Set<ProcessResult> processResultSet = new HashSet<ProcessResult>(); while (i.compareTo(nbOfContent) < 0) { if (LOGGER.isInfoEnabled()) { LOGGER.info( new StringBuilder("Processing from ") .append(i) .append(TO_LOGGER_STR) .append(i+processingTreatmentWindow) .append("for ") .append(audit.getSubject().getURL()).toString()); beginProcessDate = Calendar.getInstance().getTime(); } List<Content> contentList = retrieveContentList( webResourceId, i, processingTreatmentWindow, beginProcessDate, true, false); processResultSet.clear(); processResultSet.addAll(processorService.process(contentList, audit.getTestList())); for (ProcessResult processResult : processResultSet) { processResult.setGrossResultAudit(audit); } if (LOGGER.isDebugEnabled()) { endProcessDate = Calendar.getInstance().getTime(); LOGGER.debug( new StringBuilder("Processing of ") .append(processingTreatmentWindow) .append(" elements took ") .append(endProcessDate.getTime() - beginProcessDate.getTime()) .append(MS_LOGGER_STR) .append("for ") .append(audit.getSubject().getURL()).toString()); } processResultDataService.saveOrUpdate(processResultSet); if (LOGGER.isDebugEnabled()) { endPersistDate = Calendar.getInstance().getTime(); LOGGER.debug( new StringBuilder("Persist processing of ") .append(processingTreatmentWindow) .append(" elements took ") .append(endPersistDate.getTime() - endProcessDate.getTime()) .append(MS_LOGGER_STR) .append("for ") .append(audit.getSubject().getURL()).toString()); persistenceDuration = persistenceDuration + (endPersistDate.getTime() - endProcessDate.getTime()); } i = i + processingTreatmentWindow; System.gc(); } if (LOGGER.isDebugEnabled()) { LOGGER.debug( new StringBuilder("Application spent ") .append(persistenceDuration) .append(" ms to write in Disk while processing").toString()); } if (processResultDataService.getNumberOfGrossResultFromAudit(audit) > 0) { setStatusToAudit(AuditStatus.CONSOLIDATION); } else { LOGGER.error("Audit has no gross result"); setStatusToAudit(AuditStatus.ERROR); } } @Override public void consolidate() { audit = auditDataService.getAuditWithTest(audit.getId()); if (!audit.getStatus().equals(AuditStatus.CONSOLIDATION)) { LOGGER.warn( new StringBuilder(AUDIT_STATUS_IS_LOGGER_STR) .append(audit.getStatus()) .append(WHILE_LOGGER_STR) .append(AuditStatus.CONSOLIDATION) .append(WAS_REQUIRED_LOGGER_STR).toString()); return; } // debug tools Date beginProcessDate = null; Date endProcessDate = null; Date endPersistDate; if (LOGGER.isDebugEnabled()) { LOGGER.debug("Consolidation"); beginProcessDate = Calendar.getInstance().getTime(); } if (audit.getSubject() instanceof Page) { consolidate(processResultDataService. getGrossResultFromAudit(audit), audit.getTestList()); if (LOGGER.isDebugEnabled()) { endProcessDate = Calendar.getInstance().getTime(); LOGGER.debug( new StringBuilder(CONSOLIDATING_TOOK_LOGGER_STR) .append(endProcessDate.getTime()-beginProcessDate.getTime()) .append(MS_LOGGER_STR).toString()); } } else if (audit.getSubject() instanceof Site) { if (contentDataService.getNumberOfSSPFromWebResource(audit.getSubject(), HttpStatus.SC_OK) > 20) { List<Test> testList = new ArrayList<Test>(); for (Test test : audit.getTestList()) { testList.add(test); Collection<ProcessResult> prList= (List<ProcessResult>) processResultDataService. getGrossResultFromAuditAndTest(audit, test); consolidate(prList, testList); testList.clear(); } if (LOGGER.isDebugEnabled()) { endProcessDate = Calendar.getInstance().getTime(); LOGGER.debug( new StringBuilder(CONSOLIDATING_TOOK_LOGGER_STR) .append(endProcessDate.getTime()-beginProcessDate.getTime()) .append(MS_LOGGER_STR).toString()); } } else { Collection<ProcessResult> prList= (List<ProcessResult>) processResultDataService. getGrossResultFromAudit(audit); consolidate(prList, audit.getTestList()); if (LOGGER.isDebugEnabled()) { endProcessDate = Calendar.getInstance().getTime(); LOGGER.debug( new StringBuilder(CONSOLIDATING_TOOK_LOGGER_STR) .append(endProcessDate.getTime()-beginProcessDate.getTime()) .append(MS_LOGGER_STR).toString()); } } } audit = auditDataService.saveOrUpdate(audit); if (LOGGER.isDebugEnabled()) { endPersistDate = Calendar.getInstance().getTime(); LOGGER.debug( new StringBuilder("Persisting Consolidation of the audit took") .append(endPersistDate.getTime() - endProcessDate.getTime()) .append(MS_LOGGER_STR).toString()); } } /** * * @param prList * @param testList */ private void consolidate(Collection<ProcessResult> prList, Collection<Test> testList) { Set<ProcessResult> processResultSet = new HashSet<ProcessResult>(); if (LOGGER.isDebugEnabled()) { if (testList.size() == 1) { LOGGER.debug( new StringBuilder("Consolidate ") .append(prList.size()) .append(" elements for test ") .append(testList.iterator().next().getCode()).toString()); } else { LOGGER.debug( new StringBuilder("Consolidate ") .append(prList.size()) .append(" elements for ") .append(testList.size()) .append(" tests ").toString()); } } processResultSet.addAll(consolidatorService.consolidate( prList, testList)); if (!processResultSet.isEmpty()) { audit.setStatus(AuditStatus.ANALYSIS); } else { LOGGER.warn("Audit has no net result"); audit.setStatus(AuditStatus.ERROR); } Iterator<ProcessResult> iter = processResultSet.iterator(); Set<ProcessResult> processResultSubset = new HashSet<ProcessResult>(); int i = 0; while (iter.hasNext()) { ProcessResult pr = iter.next(); // To avoid errors with processResult of Site Type in case of page audit if (!(pr.getTest().getScope().getCode().contains("site") && pr.getSubject() instanceof Page)) { pr.setNetResultAudit(audit); processResultSubset.add(pr); i++; if (i % consolidationTreatmentWindow == 0) { if (LOGGER.isDebugEnabled()) { LOGGER.debug( new StringBuilder("Persisting Consolidation from ") .append(i) .append(TO_LOGGER_STR) .append(i+consolidationTreatmentWindow).toString()); } processResultDataService.saveOrUpdate(processResultSubset); processResultSubset.clear(); } } } processResultDataService.saveOrUpdate(processResultSubset); processResultSubset.clear(); System.gc(); } @Override public void analyse() { audit = auditDataService.getAuditWithWebResource(audit.getId()); if (!audit.getStatus().equals(AuditStatus.ANALYSIS)) { LOGGER.warn( new StringBuilder(AUDIT_STATUS_IS_LOGGER_STR) .append(audit.getStatus()) .append(WHILE_LOGGER_STR) .append(AuditStatus.ANALYSIS) .append(WAS_REQUIRED_LOGGER_STR).toString()); return ; } // debug tools Date beginProcessDate = null; Date endProcessDate = null; Date endPersistDate; Long persistenceDuration = Long.valueOf(0); WebResource parentWebResource = audit.getSubject(); if (parentWebResource instanceof Page) { analyserService.analyse(parentWebResource, audit); webResourceDataService.saveOrUpdate(parentWebResource); } else if (parentWebResource instanceof Site) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Analysing results of scope site"); beginProcessDate = Calendar.getInstance().getTime(); } analyserService.analyse(parentWebResource, audit); if (LOGGER.isDebugEnabled()) { endProcessDate = Calendar.getInstance().getTime(); LOGGER.debug( new StringBuilder("Analysing results of scope site took ") .append(endProcessDate.getTime() - beginProcessDate.getTime()) .append(MS_LOGGER_STR).toString()); } webResourceDataService.saveOrUpdate(parentWebResource); if (LOGGER.isDebugEnabled()) { endPersistDate = Calendar.getInstance().getTime(); LOGGER.debug( new StringBuilder("Persisting Analysis results of scope site ") .append(endPersistDate.getTime() - endProcessDate.getTime()) .append(MS_LOGGER_STR).toString()); persistenceDuration = persistenceDuration + (endPersistDate.getTime() - endProcessDate.getTime()); } Long nbOfContent = webResourceDataService.getNumberOfChildWebResource(parentWebResource); Long i = Long.valueOf(0); List<WebResource> webResourceList; if (LOGGER.isDebugEnabled()) { LOGGER.debug( new StringBuilder("Analysing ") .append(nbOfContent) .append(" elements ").toString()); } while (i.compareTo(nbOfContent) < 0) { if (LOGGER.isDebugEnabled()) { LOGGER.debug( new StringBuilder("Analysing results of scope page from ") .append(i) .append(TO_LOGGER_STR) .append(i + analyseTreatmentWindow).toString()); beginProcessDate = Calendar.getInstance().getTime(); } webResourceList = webResourceDataService.getWebResourceFromItsParent( parentWebResource, i.intValue(), analyseTreatmentWindow); for (WebResource webResource : webResourceList) { if (LOGGER.isDebugEnabled()) { endProcessDate = Calendar.getInstance().getTime(); LOGGER.debug( new StringBuilder("Analysing results for page ") .append(webResource.getURL()) .append(" took ") .append(endProcessDate.getTime() - beginProcessDate.getTime()) .append(MS_LOGGER_STR).toString()); } analyserService.analyse(webResource,audit); if (LOGGER.isDebugEnabled()) { endPersistDate = Calendar.getInstance().getTime(); LOGGER.debug( new StringBuilder("Persisting Analysis results for page ") .append(webResource.getURL()) .append(" took ") .append(endPersistDate.getTime() - endProcessDate.getTime()) .append(MS_LOGGER_STR).toString()); persistenceDuration = persistenceDuration + (endPersistDate.getTime() - endProcessDate.getTime()); } } i = i + analyseTreatmentWindow; } } if (LOGGER.isDebugEnabled()) { LOGGER.debug( new StringBuilder("Application spent ") .append(persistenceDuration) .append(" ms to write in Disk while analysing").toString()); } setStatusToAudit(AuditStatus.COMPLETED); } /** * Set a new status to the audit instance and persist it * @param auditStatus */ public void setStatusToAudit(AuditStatus auditStatus) { audit.setStatus(auditStatus); audit = auditDataService.saveOrUpdate(audit); } }
engine/tanaguru-engine/src/main/java/org/opens/tanaguru/service/command/AuditCommandImpl.java
/* * Tanaguru - Automated webpage assessment * Copyright (C) 2008-2011 Open-S Company * * This file is part of Tanaguru. * * Tanaguru is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * * Contact us by mail: open-s AT open-s DOT com */ package org.opens.tanaguru.service.command; import java.io.UnsupportedEncodingException; import java.security.NoSuchAlgorithmException; import java.util.*; import org.apache.commons.httpclient.HttpStatus; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import org.opens.tanaguru.contentadapter.AdaptationListener; import org.opens.tanaguru.entity.audit.*; import org.opens.tanaguru.entity.parameterization.Parameter; import org.opens.tanaguru.entity.reference.Test; import org.opens.tanaguru.entity.service.audit.AuditDataService; import org.opens.tanaguru.entity.service.audit.ContentDataService; import org.opens.tanaguru.entity.service.audit.ProcessResultDataService; import org.opens.tanaguru.entity.service.parameterization.ParameterDataService; import org.opens.tanaguru.entity.service.reference.TestDataService; import org.opens.tanaguru.entity.service.subject.WebResourceDataService; import org.opens.tanaguru.entity.subject.Page; import org.opens.tanaguru.entity.subject.Site; import org.opens.tanaguru.entity.subject.WebResource; import org.opens.tanaguru.service.*; import org.opens.tanaguru.util.MD5Encoder; /** * * @author jkowalczyk */ public abstract class AuditCommandImpl implements AuditCommand { private static final Logger LOGGER = Logger.getLogger(AuditCommandImpl.class); public static final String AUDIT_STATUS_IS_LOGGER_STR = "Audit status is"; public static final String WHILE_LOGGER_STR = " while"; public static final String WAS_REQUIRED_LOGGER_STR = " was required"; public static final String TO_LOGGER_STR = " to "; public static final String MS_LOGGER_STR = " ms "; public static final String SSP_TOOK_LOGGER_STR = " SSP took "; public static final String CONSOLIDATING_TOOK_LOGGER_STR = "Consolidating took "; public static final int DEFAULT_ANALYSE_TREATMENT_WINDOW = 10; public static final int DEFAULT_PROCESSING_TREATMENT_WINDOW = 4; public static final int DEFAULT_ADAPTATION_TREATMENT_WINDOW = 4; public static final int DEFAULT_CONSOLIDATION_TREATMENT_WINDOW = 200; private int adaptationTreatmentWindow = DEFAULT_ADAPTATION_TREATMENT_WINDOW; public void setAdaptationTreatmentWindow(int adaptationTreatmentWindow) { this.adaptationTreatmentWindow = adaptationTreatmentWindow; } private int analyseTreatmentWindow = DEFAULT_ANALYSE_TREATMENT_WINDOW; public void setAnalyseTreatmentWindow(int analyseTreatmentWindow) { this.analyseTreatmentWindow = analyseTreatmentWindow; } private int consolidationTreatmentWindow = DEFAULT_CONSOLIDATION_TREATMENT_WINDOW; public void setConsolidationTreatmentWindow(int consolidationTreatmentWindow) { this.consolidationTreatmentWindow = consolidationTreatmentWindow; } private int processingTreatmentWindow = DEFAULT_PROCESSING_TREATMENT_WINDOW; public void setProcessingTreatmentWindow(int processingTreatmentWindow) { this.processingTreatmentWindow = processingTreatmentWindow; } private Audit audit; @Override public Audit getAudit() { return audit; } @Override public void setAudit(Audit audit) { this.audit = audit; } // The dataServices private AuditDataService auditDataService; public AuditDataService getAuditDataService() { return auditDataService; } private TestDataService testDataService; public TestDataService getTestDataService() { return testDataService; } private ParameterDataService parameterDataService; public ParameterDataService getParameterDataService() { return parameterDataService; } private WebResourceDataService webResourceDataService; public WebResourceDataService getWebResourceDataService() { return webResourceDataService; } private ContentDataService contentDataService; public ContentDataService getContentDataService() { return contentDataService; } private ProcessResultDataService processResultDataService; public ProcessResultDataService getProcessResultDataService() { return processResultDataService; } // The services private ContentAdapterService contentAdapterService; public ContentAdapterService getContentAdapterService() { return contentAdapterService; } private ProcessorService processorService; public ProcessorService getProcessorService() { return processorService; } private ConsolidatorService consolidatorService; public ConsolidatorService getConsolidatorService() { return consolidatorService; } private AnalyserService analyserService; public AnalyserService getAnalyserService() { return analyserService; } // The listeners private AdaptationListener adaptationListener; public AdaptationListener getAdaptationListener() { return adaptationListener; } /** * * @param paramSet * @param auditDataService * @param testDataService * @param parameterDataService * @param webResourceDataService * @param contentList * @param processResultDataService * @param contentAdapterService * @param processorService * @param consolidatorService * @param analyserService * @param adaptationListener */ public AuditCommandImpl( Set<Parameter> paramSet, AuditDataService auditDataService, TestDataService testDataService, ParameterDataService parameterDataService, WebResourceDataService webResourceDataService, ContentDataService contentDataService, ProcessResultDataService processResultDataService, ContentAdapterService contentAdapterService, ProcessorService processorService, ConsolidatorService consolidatorService, AnalyserService analyserService, AdaptationListener adaptationListener, int adaptationTreatmentWindow, int processingTreatmentWindow, int consolidationTreatmentWindow, int analysisTreatmentWindow) { this.auditDataService = auditDataService; this.testDataService = testDataService; this.parameterDataService = parameterDataService; this.webResourceDataService = webResourceDataService; this.contentDataService = contentDataService; this.processResultDataService = processResultDataService; this.contentAdapterService = contentAdapterService; this.processorService = processorService; this.consolidatorService = consolidatorService; this.analyserService = analyserService; this.adaptationListener = adaptationListener; this.adaptationTreatmentWindow = adaptationTreatmentWindow; this.processingTreatmentWindow = processingTreatmentWindow; initialiseAudit(paramSet); } /** * * @param paramSet * @return */ private Audit initialiseAudit (Set<Parameter> paramSet) { Logger.getLogger(this.getClass()).info("initializeAudit"); // the paramSet has to be persisted parameterDataService.saveOrUpdate(paramSet); audit = auditDataService.create(); audit.setTestList(testDataService.getTestListFromParamSet(paramSet)); audit.setParameterSet(paramSet); setStatusToAudit(AuditStatus.INITIALISATION); return audit; } @Override public void adaptContent() { audit = auditDataService.getAuditWithWebResource(audit.getId()); if (!audit.getStatus().equals(AuditStatus.CONTENT_ADAPTING)) { LOGGER.warn( new StringBuilder(AUDIT_STATUS_IS_LOGGER_STR) .append(audit.getStatus()) .append(WHILE_LOGGER_STR) .append(AuditStatus.CONTENT_ADAPTING) .append(WAS_REQUIRED_LOGGER_STR).toString()); return; } // debug tools Date beginProcessDate = null; Date endRetrieveDate = null; Date endProcessDate = null; Date endPersistDate; Long persistenceDuration = Long.valueOf(0); boolean hasCorrectDOM = false; Long i = Long.valueOf(0); Long webResourceId = audit.getSubject().getId(); Long nbOfContent = contentDataService.getNumberOfSSPFromWebResource(audit.getSubject(), HttpStatus.SC_OK); // Some actions have to be realized when the adaptation starts if (adaptationListener != null) { adaptationListener.adaptationStarted(audit); } while (i.compareTo(nbOfContent) < 0) { if (LOGGER.isDebugEnabled()) { beginProcessDate = Calendar.getInstance().getTime(); LOGGER.debug( new StringBuilder("Adapt ssp from ") .append(i) .append(TO_LOGGER_STR) .append(i + adaptationTreatmentWindow).toString()); } List<Content> contentList = retrieveContentList( webResourceId, i, adaptationTreatmentWindow, beginProcessDate, false, true); if (LOGGER.isDebugEnabled()) { endRetrieveDate = Calendar.getInstance().getTime(); } Set<Content> contentSet = new HashSet<Content>(); contentSet.addAll(contentAdapterService.adaptContent(contentList)); if (LOGGER.isDebugEnabled()) { endProcessDate = Calendar.getInstance().getTime(); LOGGER.debug( new StringBuilder("Adapting ") .append(contentList.size()) .append(SSP_TOOK_LOGGER_STR) .append(endProcessDate.getTime() - endRetrieveDate.getTime()) .append(MS_LOGGER_STR) .append(contentSet.size()).toString()); } hasCorrectDOM = hasCorrectDOM || hasContentSetAtLeastOneCorrectDOM(contentSet); this.encodeSourceAndPersistContentList(contentSet); if (LOGGER.isDebugEnabled()) { endPersistDate = Calendar.getInstance().getTime(); LOGGER.debug( new StringBuilder("Persisting ") .append(contentSet.size()) .append(SSP_TOOK_LOGGER_STR) .append(endPersistDate.getTime() - endProcessDate.getTime()) .append(MS_LOGGER_STR).toString()); persistenceDuration = persistenceDuration + (endPersistDate.getTime() - endProcessDate.getTime()); } i = i + adaptationTreatmentWindow; // explicit call of the Gc System.gc(); } LOGGER.debug(new StringBuilder("Application spent ") .append(persistenceDuration) .append(" ms to write in Disk while adapting").toString()); if (hasCorrectDOM) { setStatusToAudit(AuditStatus.PROCESSING); } else { Logger.getLogger(AuditServiceImpl.class).warn("Audit has no corrected DOM"); setStatusToAudit(AuditStatus.ERROR); } // Some actions have to be realized when the adaptation is completed if (adaptationListener != null) { adaptationListener.adaptationCompleted(audit); } } /** * This method retrieves a list of content for a given webResource * from a startValue regarding the ADAPTATION_TREATMENT_WINDOW * * @param webResourceId * @param startValue * @return */ private List<Content> retrieveContentList( Long webResourceId, Long startValue, int windowSize, Date beginProcessDate, boolean getContentWithRelatedContent, boolean getContentWithNullDom) { List<Content> contentList = new ArrayList<Content>(); // First we retrieve a list of Ids List<Long> contentIdList = contentDataService.getSSPFromWebResource( webResourceId, HttpStatus.SC_OK, startValue.intValue(), windowSize); LOGGER.debug(new StringBuilder("Retrieved ") .append(contentIdList.size()) .append(" Ids ").toString()); // we retrieve each content from its ID and add it to the contentList // that will be returned for (Long id : contentIdList) { Content content; if (getContentWithRelatedContent) { content = contentDataService.readWithRelatedContent(id, true); } else { content = contentDataService.read(id); } if (content != null && ( getContentWithNullDom || (!getContentWithNullDom && content instanceof SSP && StringUtils.isNotEmpty(((SSP)content).getDOM())))) { contentList.add(content); } } if (LOGGER.isDebugEnabled()) { long length = 0; int nbOfResources = 0; for (Content content : contentList) { if (((SSP) content).getSource() != null) { length += ((SSP) content).getSource().length(); if (getContentWithRelatedContent) { nbOfResources += ((SSP) content).getRelatedContentSet().size(); } } } StringBuilder debugMessage = new StringBuilder("Retrieving ") .append(contentList.size()) .append(SSP_TOOK_LOGGER_STR) .append(Calendar.getInstance().getTime().getTime() - beginProcessDate.getTime()) .append(" ms and working on ") .append(length) .append(" characters"); if (getContentWithRelatedContent) { debugMessage.append(" and "); debugMessage.append(nbOfResources); debugMessage.append(" relatedContent "); } LOGGER.debug(debugMessage.toString()); } return contentList; } /** * * @param contentSet * @return */ private boolean hasContentSetAtLeastOneCorrectDOM(Set<Content> contentSet) { for (Content content : contentSet) { // if one SSP with not empty DOM is encountered, we return true if (content instanceof SSP && !((SSP) content).getDOM().isEmpty()) { return true; } } return false; } /** * Encode Source code and persist the content list * * @param contentSet */ private void encodeSourceAndPersistContentList(Set<Content> contentSet) { for (Content content : contentSet) { if (content instanceof SSP && !((SSP) content).getDOM().isEmpty()) { try { ((SSP) content).setSource(MD5Encoder.MD5(((SSP) content).getSource())); } catch (NoSuchAlgorithmException ex) { LOGGER.warn(ex); } catch (UnsupportedEncodingException ex) { LOGGER.warn(ex); } } contentDataService.saveOrUpdate(content); } } @Override public void process() { audit = auditDataService.getAuditWithTest(audit.getId()); if (!audit.getStatus().equals(AuditStatus.PROCESSING)) { LOGGER.warn( new StringBuilder(AUDIT_STATUS_IS_LOGGER_STR) .append(audit.getStatus()) .append(WHILE_LOGGER_STR) .append(AuditStatus.PROCESSING) .append(WAS_REQUIRED_LOGGER_STR).toString()); return; } // debug tools Date beginProcessDate = null; Date endProcessDate = null; Date endPersistDate; Long persistenceDuration = Long.valueOf(0); Long i = Long.valueOf(0); Long webResourceId = audit.getSubject().getId(); Long nbOfContent = contentDataService.getNumberOfSSPFromWebResource(audit.getSubject(), HttpStatus.SC_OK); Set<ProcessResult> processResultSet = new HashSet<ProcessResult>(); while (i.compareTo(nbOfContent) < 0) { if (LOGGER.isDebugEnabled()) { LOGGER.debug( new StringBuilder("Processing from ") .append(i) .append(TO_LOGGER_STR) .append(i+processingTreatmentWindow).toString()); beginProcessDate = Calendar.getInstance().getTime(); } List<Content> contentList = retrieveContentList( webResourceId, i, processingTreatmentWindow, beginProcessDate, true, false); processResultSet.clear(); processResultSet.addAll(processorService.process(contentList, audit.getTestList())); for (ProcessResult processResult : processResultSet) { processResult.setGrossResultAudit(audit); } if (LOGGER.isDebugEnabled()) { endProcessDate = Calendar.getInstance().getTime(); LOGGER.debug( new StringBuilder("Processing of ") .append(processingTreatmentWindow) .append(" elements took ") .append(endProcessDate.getTime() - beginProcessDate.getTime()) .append(MS_LOGGER_STR).toString()); } processResultDataService.saveOrUpdate(processResultSet); if (LOGGER.isDebugEnabled()) { endPersistDate = Calendar.getInstance().getTime(); LOGGER.debug( new StringBuilder("Persist processing of ") .append(processingTreatmentWindow) .append(" elements took ") .append(endPersistDate.getTime() - endProcessDate.getTime()) .append(MS_LOGGER_STR).toString()); persistenceDuration = persistenceDuration + (endPersistDate.getTime() - endProcessDate.getTime()); } i = i + processingTreatmentWindow; System.gc(); } if (LOGGER.isDebugEnabled()) { LOGGER.debug( new StringBuilder("Application spent ") .append(persistenceDuration) .append(" ms to write in Disk while processing").toString()); } if (processResultDataService.getNumberOfGrossResultFromAudit(audit) > 0) { setStatusToAudit(AuditStatus.CONSOLIDATION); } else { LOGGER.error("Audit has no gross result"); setStatusToAudit(AuditStatus.ERROR); } } @Override public void consolidate() { audit = auditDataService.getAuditWithTest(audit.getId()); if (!audit.getStatus().equals(AuditStatus.CONSOLIDATION)) { LOGGER.warn( new StringBuilder(AUDIT_STATUS_IS_LOGGER_STR) .append(audit.getStatus()) .append(WHILE_LOGGER_STR) .append(AuditStatus.CONSOLIDATION) .append(WAS_REQUIRED_LOGGER_STR).toString()); return; } // debug tools Date beginProcessDate = null; Date endProcessDate = null; Date endPersistDate; if (LOGGER.isDebugEnabled()) { LOGGER.debug("Consolidation"); beginProcessDate = Calendar.getInstance().getTime(); } if (audit.getSubject() instanceof Page) { consolidate(processResultDataService. getGrossResultFromAudit(audit), audit.getTestList()); if (LOGGER.isDebugEnabled()) { endProcessDate = Calendar.getInstance().getTime(); LOGGER.debug( new StringBuilder(CONSOLIDATING_TOOK_LOGGER_STR) .append(endProcessDate.getTime()-beginProcessDate.getTime()) .append(MS_LOGGER_STR).toString()); } } else if (audit.getSubject() instanceof Site) { if (contentDataService.getNumberOfSSPFromWebResource(audit.getSubject(), HttpStatus.SC_OK) > 20) { List<Test> testList = new ArrayList<Test>(); for (Test test : audit.getTestList()) { testList.add(test); Collection<ProcessResult> prList= (List<ProcessResult>) processResultDataService. getGrossResultFromAuditAndTest(audit, test); consolidate(prList, testList); testList.clear(); } if (LOGGER.isDebugEnabled()) { endProcessDate = Calendar.getInstance().getTime(); LOGGER.debug( new StringBuilder(CONSOLIDATING_TOOK_LOGGER_STR) .append(endProcessDate.getTime()-beginProcessDate.getTime()) .append(MS_LOGGER_STR).toString()); } } else { Collection<ProcessResult> prList= (List<ProcessResult>) processResultDataService. getGrossResultFromAudit(audit); consolidate(prList, audit.getTestList()); if (LOGGER.isDebugEnabled()) { endProcessDate = Calendar.getInstance().getTime(); LOGGER.debug( new StringBuilder(CONSOLIDATING_TOOK_LOGGER_STR) .append(endProcessDate.getTime()-beginProcessDate.getTime()) .append(MS_LOGGER_STR).toString()); } } } audit = auditDataService.saveOrUpdate(audit); if (LOGGER.isDebugEnabled()) { endPersistDate = Calendar.getInstance().getTime(); LOGGER.debug( new StringBuilder("Persisting Consolidation of the audit took") .append(endPersistDate.getTime() - endProcessDate.getTime()) .append(MS_LOGGER_STR).toString()); } } /** * * @param prList * @param testList */ private void consolidate(Collection<ProcessResult> prList, Collection<Test> testList) { Set<ProcessResult> processResultSet = new HashSet<ProcessResult>(); if (LOGGER.isDebugEnabled()) { if (testList.size() == 1) { LOGGER.debug( new StringBuilder("Consolidate ") .append(prList.size()) .append(" elements for test ") .append(testList.iterator().next().getCode()).toString()); } else { LOGGER.debug( new StringBuilder("Consolidate ") .append(prList.size()) .append(" elements for ") .append(testList.size()) .append(" tests ").toString()); } } processResultSet.addAll(consolidatorService.consolidate( prList, testList)); if (!processResultSet.isEmpty()) { audit.setStatus(AuditStatus.ANALYSIS); } else { LOGGER.warn("Audit has no net result"); audit.setStatus(AuditStatus.ERROR); } Iterator<ProcessResult> iter = processResultSet.iterator(); Set<ProcessResult> processResultSubset = new HashSet<ProcessResult>(); int i = 0; while (iter.hasNext()) { ProcessResult pr = iter.next(); // To avoid errors with processResult of Site Type in case of page audit if (!(pr.getTest().getScope().getCode().contains("site") && pr.getSubject() instanceof Page)) { pr.setNetResultAudit(audit); processResultSubset.add(pr); i++; if (i % consolidationTreatmentWindow == 0) { if (LOGGER.isDebugEnabled()) { LOGGER.debug( new StringBuilder("Persisting Consolidation from ") .append(i) .append(TO_LOGGER_STR) .append(i+consolidationTreatmentWindow).toString()); } processResultDataService.saveOrUpdate(processResultSubset); processResultSubset.clear(); } } } processResultDataService.saveOrUpdate(processResultSubset); processResultSubset.clear(); System.gc(); } @Override public void analyse() { audit = auditDataService.getAuditWithWebResource(audit.getId()); if (!audit.getStatus().equals(AuditStatus.ANALYSIS)) { LOGGER.warn( new StringBuilder(AUDIT_STATUS_IS_LOGGER_STR) .append(audit.getStatus()) .append(WHILE_LOGGER_STR) .append(AuditStatus.ANALYSIS) .append(WAS_REQUIRED_LOGGER_STR).toString()); return ; } // debug tools Date beginProcessDate = null; Date endProcessDate = null; Date endPersistDate; Long persistenceDuration = Long.valueOf(0); WebResource parentWebResource = audit.getSubject(); if (parentWebResource instanceof Page) { analyserService.analyse(parentWebResource, audit); webResourceDataService.saveOrUpdate(parentWebResource); } else if (parentWebResource instanceof Site) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Analysing results of scope site"); beginProcessDate = Calendar.getInstance().getTime(); } analyserService.analyse(parentWebResource, audit); if (LOGGER.isDebugEnabled()) { endProcessDate = Calendar.getInstance().getTime(); LOGGER.debug( new StringBuilder("Analysing results of scope site took ") .append(endProcessDate.getTime() - beginProcessDate.getTime()) .append(MS_LOGGER_STR).toString()); } webResourceDataService.saveOrUpdate(parentWebResource); if (LOGGER.isDebugEnabled()) { endPersistDate = Calendar.getInstance().getTime(); LOGGER.debug( new StringBuilder("Persisting Analysis results of scope site ") .append(endPersistDate.getTime() - endProcessDate.getTime()) .append(MS_LOGGER_STR).toString()); persistenceDuration = persistenceDuration + (endPersistDate.getTime() - endProcessDate.getTime()); } Long nbOfContent = webResourceDataService.getNumberOfChildWebResource(parentWebResource); Long i = Long.valueOf(0); List<WebResource> webResourceList; if (LOGGER.isDebugEnabled()) { LOGGER.debug( new StringBuilder("Analysing ") .append(nbOfContent) .append(" elements ").toString()); } while (i.compareTo(nbOfContent) < 0) { if (LOGGER.isDebugEnabled()) { LOGGER.debug( new StringBuilder("Analysing results of scope page from ") .append(i) .append(TO_LOGGER_STR) .append(i + analyseTreatmentWindow).toString()); beginProcessDate = Calendar.getInstance().getTime(); } webResourceList = webResourceDataService.getWebResourceFromItsParent( parentWebResource, i.intValue(), analyseTreatmentWindow); for (WebResource webResource : webResourceList) { if (LOGGER.isDebugEnabled()) { endProcessDate = Calendar.getInstance().getTime(); LOGGER.debug( new StringBuilder("Analysing results for page ") .append(webResource.getURL()) .append(" took ") .append(endProcessDate.getTime() - beginProcessDate.getTime()) .append(MS_LOGGER_STR).toString()); } analyserService.analyse(webResource,audit); if (LOGGER.isDebugEnabled()) { endPersistDate = Calendar.getInstance().getTime(); LOGGER.debug( new StringBuilder("Persisting Analysis results for page ") .append(webResource.getURL()) .append(" took ") .append(endPersistDate.getTime() - endProcessDate.getTime()) .append(MS_LOGGER_STR).toString()); persistenceDuration = persistenceDuration + (endPersistDate.getTime() - endProcessDate.getTime()); } } i = i + analyseTreatmentWindow; } } if (LOGGER.isDebugEnabled()) { LOGGER.debug( new StringBuilder("Application spent ") .append(persistenceDuration) .append(" ms to write in Disk while analysing").toString()); } setStatusToAudit(AuditStatus.COMPLETED); } /** * Set a new status to the audit instance and persist it * @param auditStatus */ public void setStatusToAudit(AuditStatus auditStatus) { audit.setStatus(auditStatus); audit = auditDataService.saveOrUpdate(audit); } }
tanaguru-engine : update log level
engine/tanaguru-engine/src/main/java/org/opens/tanaguru/service/command/AuditCommandImpl.java
tanaguru-engine : update log level
Java
agpl-3.0
72bf671cbfa5fb059ebb5111df3013f75a895867
0
duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test
d0620926-2e60-11e5-9284-b827eb9e62be
hello.java
d05c8776-2e60-11e5-9284-b827eb9e62be
d0620926-2e60-11e5-9284-b827eb9e62be
hello.java
d0620926-2e60-11e5-9284-b827eb9e62be
Java
agpl-3.0
529d108ad344f4086d33a34d8d98e38819d3a3c2
0
tdefilip/opennms,aihua/opennms,rdkgit/opennms,rdkgit/opennms,aihua/opennms,aihua/opennms,tdefilip/opennms,aihua/opennms,aihua/opennms,tdefilip/opennms,rdkgit/opennms,tdefilip/opennms,rdkgit/opennms,roskens/opennms-pre-github,rdkgit/opennms,rdkgit/opennms,aihua/opennms,tdefilip/opennms,rdkgit/opennms,aihua/opennms,tdefilip/opennms,roskens/opennms-pre-github,roskens/opennms-pre-github,roskens/opennms-pre-github,roskens/opennms-pre-github,roskens/opennms-pre-github,tdefilip/opennms,tdefilip/opennms,aihua/opennms,roskens/opennms-pre-github,roskens/opennms-pre-github,rdkgit/opennms,aihua/opennms,tdefilip/opennms,rdkgit/opennms,roskens/opennms-pre-github,roskens/opennms-pre-github,roskens/opennms-pre-github,rdkgit/opennms
/* * This file is part of the OpenNMS(R) Application. * * OpenNMS(R) is Copyright (C) 2006-2008 The OpenNMS Group, Inc. All rights reserved. * OpenNMS(R) is a derivative work, containing both original code, included code and modified * code that was published under the GNU General Public License. Copyrights for modified * and included code are below. * * OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc. * * Modifications: * * 2008 Oct 22: Use new loadResourceById method. - [email protected] * * Created: November 8, 2006 * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. * * For more information contact: * OpenNMS Licensing <[email protected]> * http://www.opennms.org/ * http://www.opennms.com/ */ package org.opennms.web.svclayer.support; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.opennms.netmgt.dao.ResourceDao; import org.opennms.netmgt.model.OnmsResource; import org.opennms.netmgt.model.OnmsResourceType; import org.opennms.web.Util; import org.opennms.web.svclayer.ChooseResourceService; import org.springframework.beans.factory.InitializingBean; /** * * @author <a href="mailto:[email protected]">DJ Gregor</a> */ public class DefaultChooseResourceService implements ChooseResourceService, InitializingBean { public ResourceDao m_resourceDao; public ChooseResourceModel findChildResources(String resourceId, String endUrl) { if (resourceId == null) { throw new IllegalArgumentException("resourceId parameter may not be null"); } if (endUrl == null) { throw new IllegalArgumentException("endUrl parameter may not be null"); } ChooseResourceModel model = new ChooseResourceModel(); model.setEndUrl(endUrl); OnmsResource resource = m_resourceDao.loadResourceById(resourceId); model.setResource(resource); Map<OnmsResourceType, List<OnmsResource>> resourceTypeMap = new LinkedHashMap<OnmsResourceType, List<OnmsResource>>(); for (OnmsResource childResource : resource.getChildResources()) { if (!resourceTypeMap.containsKey(childResource.getResourceType())) { resourceTypeMap.put(childResource.getResourceType(), new LinkedList<OnmsResource>()); } System.out.println("getId(): " + childResource.getId()); System.out.println("getName(): " + childResource.getName()); //checkLabelForQuotes( resourceTypeMap.get(childResource.getResourceType()).add(checkLabelForQuotes(childResource)); } model.setResourceTypes(resourceTypeMap); return model; } private OnmsResource checkLabelForQuotes(OnmsResource childResource) { String lbl = Util.convertToJsSafeString(childResource.getLabel()); OnmsResource resource = new OnmsResource(childResource.getName(), lbl, childResource.getResourceType(), childResource.getAttributes()); resource.setParent(childResource.getParent()); resource.setEntity(childResource.getEntity()); resource.setLink(childResource.getLink()); return resource; } public void afterPropertiesSet() { if (m_resourceDao == null) { throw new IllegalStateException("resourceDao property not set"); } } public ResourceDao getResourceDao() { return m_resourceDao; } public void setResourceDao(ResourceDao resourceDao) { m_resourceDao = resourceDao; } }
opennms-webapp/src/main/java/org/opennms/web/svclayer/support/DefaultChooseResourceService.java
/* * This file is part of the OpenNMS(R) Application. * * OpenNMS(R) is Copyright (C) 2006-2008 The OpenNMS Group, Inc. All rights reserved. * OpenNMS(R) is a derivative work, containing both original code, included code and modified * code that was published under the GNU General Public License. Copyrights for modified * and included code are below. * * OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc. * * Modifications: * * 2008 Oct 22: Use new loadResourceById method. - [email protected] * * Created: November 8, 2006 * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. * * For more information contact: * OpenNMS Licensing <[email protected]> * http://www.opennms.org/ * http://www.opennms.com/ */ package org.opennms.web.svclayer.support; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.opennms.netmgt.dao.ResourceDao; import org.opennms.netmgt.model.OnmsResource; import org.opennms.netmgt.model.OnmsResourceType; import org.opennms.web.Util; import org.opennms.web.svclayer.ChooseResourceService; import org.springframework.beans.factory.InitializingBean; /** * * @author <a href="mailto:[email protected]">DJ Gregor</a> */ public class DefaultChooseResourceService implements ChooseResourceService, InitializingBean { public ResourceDao m_resourceDao; public ChooseResourceModel findChildResources(String resourceId, String endUrl) { if (resourceId == null) { throw new IllegalArgumentException("resourceId parameter may not be null"); } if (endUrl == null) { throw new IllegalArgumentException("endUrl parameter may not be null"); } ChooseResourceModel model = new ChooseResourceModel(); model.setEndUrl(endUrl); OnmsResource resource = m_resourceDao.loadResourceById(resourceId); model.setResource(resource); Map<OnmsResourceType, List<OnmsResource>> resourceTypeMap = new LinkedHashMap<OnmsResourceType, List<OnmsResource>>(); for (OnmsResource childResource : resource.getChildResources()) { if (!resourceTypeMap.containsKey(childResource.getResourceType())) { resourceTypeMap.put(childResource.getResourceType(), new LinkedList<OnmsResource>()); } resourceTypeMap.get(childResource.getResourceType()).add(checkLabelForQuotes(childResource)); } model.setResourceTypes(resourceTypeMap); return model; } private OnmsResource checkLabelForQuotes(OnmsResource childResource) { String lbl = Util.convertToJsSafeString(childResource.getLabel()); return new OnmsResource(childResource.getName(), lbl, childResource.getResourceType(), childResource.getAttributes()); } public void afterPropertiesSet() { if (m_resourceDao == null) { throw new IllegalStateException("resourceDao property not set"); } } public ResourceDao getResourceDao() { return m_resourceDao; } public void setResourceDao(ResourceDao resourceDao) { m_resourceDao = resourceDao; } }
forgot to set the parent of the resource for the graph section.
opennms-webapp/src/main/java/org/opennms/web/svclayer/support/DefaultChooseResourceService.java
forgot to set the parent of the resource for the graph section.
Java
lgpl-2.1
d90ae6e79f8c236977f21cb60c3f16e688981a79
0
opax/exist,ambs/exist,wolfgangmm/exist,ambs/exist,eXist-db/exist,shabanovd/exist,adamretter/exist,windauer/exist,hungerburg/exist,MjAbuz/exist,joewiz/exist,dizzzz/exist,joewiz/exist,dizzzz/exist,jensopetersen/exist,joewiz/exist,ljo/exist,joewiz/exist,patczar/exist,lcahlander/exist,ljo/exist,jensopetersen/exist,jensopetersen/exist,jessealama/exist,wolfgangmm/exist,olvidalo/exist,windauer/exist,zwobit/exist,wshager/exist,hungerburg/exist,lcahlander/exist,windauer/exist,wolfgangmm/exist,wshager/exist,jensopetersen/exist,hungerburg/exist,MjAbuz/exist,opax/exist,adamretter/exist,wshager/exist,wolfgangmm/exist,RemiKoutcherawy/exist,ambs/exist,jessealama/exist,zwobit/exist,lcahlander/exist,kohsah/exist,RemiKoutcherawy/exist,adamretter/exist,jessealama/exist,ambs/exist,eXist-db/exist,dizzzz/exist,ljo/exist,MjAbuz/exist,RemiKoutcherawy/exist,hungerburg/exist,ljo/exist,lcahlander/exist,windauer/exist,kohsah/exist,kohsah/exist,wolfgangmm/exist,RemiKoutcherawy/exist,jessealama/exist,dizzzz/exist,jensopetersen/exist,zwobit/exist,kohsah/exist,kohsah/exist,shabanovd/exist,zwobit/exist,MjAbuz/exist,eXist-db/exist,RemiKoutcherawy/exist,jessealama/exist,zwobit/exist,adamretter/exist,eXist-db/exist,olvidalo/exist,wshager/exist,lcahlander/exist,wshager/exist,windauer/exist,patczar/exist,hungerburg/exist,kohsah/exist,olvidalo/exist,dizzzz/exist,shabanovd/exist,olvidalo/exist,windauer/exist,RemiKoutcherawy/exist,shabanovd/exist,eXist-db/exist,opax/exist,dizzzz/exist,adamretter/exist,olvidalo/exist,opax/exist,MjAbuz/exist,zwobit/exist,patczar/exist,patczar/exist,wshager/exist,jessealama/exist,ljo/exist,wolfgangmm/exist,eXist-db/exist,lcahlander/exist,patczar/exist,ljo/exist,patczar/exist,joewiz/exist,opax/exist,joewiz/exist,shabanovd/exist,jensopetersen/exist,ambs/exist,shabanovd/exist,adamretter/exist,ambs/exist,MjAbuz/exist
/* * eXist Open Source Native XML Database * Copyright (C) 2001-03, Wolfgang M. Meier ([email protected]) * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Library General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Library General Public License for more details. * * You should have received a copy of the GNU Library General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. * * $Id$ */ package org.exist.dom; import java.util.Iterator; import org.exist.memtree.DocumentBuilderReceiver; import org.exist.storage.DBBroker; import org.exist.storage.RangeIndexSpec; import org.exist.storage.StorageAddress; import org.exist.storage.serializers.Serializer; import org.exist.xquery.Cardinality; import org.exist.xquery.XPathException; import org.exist.xquery.value.AtomicValue; import org.exist.xquery.value.Item; import org.exist.xquery.value.NodeValue; import org.exist.xquery.value.Sequence; import org.exist.xquery.value.SequenceIterator; import org.exist.xquery.value.StringValue; import org.exist.xquery.value.Type; import org.exist.xquery.value.UntypedAtomicValue; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.ContentHandler; import org.xml.sax.SAXException; /** * Placeholder class for DOM nodes. * * NodeProxy is an internal proxy class, acting as a placeholder for all types of persistent XML nodes * during query processing. NodeProxy just stores the node's unique id and the document it belongs to. * Query processing deals with these proxys most of the time. Using a NodeProxy is much cheaper * than loading the actual node from the database. The real DOM node is only loaded, * if further information is required for the evaluation of an XPath expression. To obtain * the real node for a proxy, simply call {@link #getNode()}. * * All sets of type NodeSet operate on NodeProxys. A node set is a special type of * sequence, so NodeProxy does also implement {@link org.exist.xquery.value.Item} and * can thus be an item in a sequence. Since, according to XPath 2, a single node is also * a sequence, NodeProxy does itself extend NodeSet. It thus represents a node set containing * just one, single node. * *@author Wolfgang Meier <[email protected]> */ public class NodeProxy implements NodeSet, NodeValue, Comparable { /** special value for gid: means document node */ public static final int DOCUMENT_NODE_GID = -1; /** special value for gid: means document element node */ public static final int DOCUMENT_ELEMENT_GID = 1; public static final int TO_BE_COMPUTED = -1; public static final int UNKNOWN = -1; public static final int UNKNOWN_GID = 0; /** * The owner document of this node. */ public DocumentImpl doc = null; /** * The unique internal node id in the document. * @link #DOCUMENT_NODE_GID means document node. */ public long gid = UNKNOWN_GID; /** * The internal storage address of the node in the * dom.dbx node store. This field is optional. */ private long internalAddress = UNKNOWN; /** * The type of this node (as defined by DOM), if known, @link #UNKNOW * otherwise. */ public short nodeType = UNKNOWN; /** * The first {@link Match} object associated with this node. * Match objects are used to track fulltext hits throughout query processing. * * Matches are stored as a linked list. */ public Match match = null; private ContextItem context = null; public NodeProxy() { } /** * Construct a node proxy with unique id gid and owned by document doc. * *@param doc Description of the Parameter *@param gid Description of the Parameter */ public NodeProxy(DocumentImpl doc, long gid) { this.doc = doc; this.gid = gid; } /** * as above, but a hint is given about the node type of this proxy-object. * *@param doc Description of the Parameter *@param gid Description of the Parameter *@param nodeType Description of the Parameter */ public NodeProxy(DocumentImpl doc, long gid, short nodeType) { this.doc = doc; this.gid = gid; this.nodeType = nodeType; } public NodeProxy(DocumentImpl doc, long gid, short nodeType, long address) { this.doc = doc; this.gid = gid; this.nodeType = nodeType; this.internalAddress = address; } public NodeProxy(DocumentImpl doc, long gid, long address) { this.gid = gid; this.doc = doc; this.internalAddress = address; } public NodeProxy(NodeProxy p) { doc = p.doc; gid = p.gid; nodeType = p.nodeType; match = p.match; internalAddress = p.internalAddress; } /** create a proxy to a document node */ public NodeProxy(DocumentImpl doc) { this.doc = doc; this.gid = DOCUMENT_NODE_GID; this.nodeType = Node.DOCUMENT_NODE; } /* (non-Javadoc) * @see org.exist.xquery.value.NodeValue#getImplementation() */ public int getImplementationType() { return NodeValue.PERSISTENT_NODE; } /** Ordering first according to document ID; then if equal * according to node gid. */ public int compareTo(NodeProxy other) { final int diff = doc.docId - other.doc.docId; if ( diff != 0 ) return diff; return (int) (gid - other.gid); } public int compareTo(Object other) { if(!(other instanceof NodeProxy)) return 1; final NodeProxy p = (NodeProxy) other; return compareTo(p); } public boolean equals(Object other) { if (!(other instanceof NodeProxy)) throw new RuntimeException("cannot compare nodes from different implementations"); NodeProxy node = (NodeProxy) other; if (node.doc.getDocId() == doc.getDocId() && node.gid == gid) return true; return false; } public boolean equals(NodeValue other) throws XPathException { if (other.getImplementationType() != NodeValue.PERSISTENT_NODE) throw new XPathException("cannot compare persistent node with in-memory node"); NodeProxy node = (NodeProxy) other; if (node.doc.getDocId() == doc.getDocId() && node.gid == gid) return true; return false; } public boolean before(NodeValue other) throws XPathException { return before(other, true); } protected boolean before(NodeValue other, boolean includeAncestors) throws XPathException { if (other.getImplementationType() != NodeValue.PERSISTENT_NODE) throw new XPathException("cannot compare persistent node with in-memory node"); NodeProxy node = (NodeProxy) other; if (doc.docId != node.doc.docId) return false; // System.out.println(gid + " << " + node.gid); int la = doc.getTreeLevel(gid); int lb = doc.getTreeLevel(node.gid); long pa = gid, pb = node.gid; if (la > lb) { while (la > lb) { pa = XMLUtil.getParentId(doc, pa, la); --la; } if (pa == pb) // a is a descendant of b return false; else return pa < pb; } else if (lb > la) { while (lb > la) { pb = XMLUtil.getParentId(node.doc, pb, lb); --lb; } if (pb == pa) // a is an ancestor of b return includeAncestors ? true : false; else return pa < pb; } else return pa < pb; } public boolean after(NodeValue other) throws XPathException { return after(other, true); } protected boolean after(NodeValue other, boolean includeDescendants) throws XPathException { if (other.getImplementationType() != NodeValue.PERSISTENT_NODE) throw new XPathException("cannot compare persistent node with in-memory node"); NodeProxy node = (NodeProxy) other; if (doc.docId != node.doc.docId) return false; // System.out.println(gid + " >> " + node.gid); int la = doc.getTreeLevel(gid); int lb = doc.getTreeLevel(node.gid); long pa = gid, pb = node.gid; if (la > lb) { while (la > lb) { pa = XMLUtil.getParentId(doc, pa, la); --la; } // a is a descendant of b if (pa == pb) return includeDescendants ? true : false; else return pa > pb; } else if (lb > la) { while (lb > la) { pb = XMLUtil.getParentId(node.doc, pb, lb); --lb; } if (pb == pa) return false; else return pa > pb; } else return pa > pb; } public Document getOwnerDocument() { return doc; } public final DocumentImpl getDocument() { return doc; } public long getGID() { return gid; } public Node getNode() { if (isDocument()) return doc; else return doc.getNode(this); } public boolean isDocument() { return nodeType == Node.DOCUMENT_NODE; } public short getNodeType() { return nodeType; } public String getNodeValue() { if ( isDocument() ) { NodeImpl root = (NodeImpl) doc.getDocumentElement(); return doc.getBroker().getNodeValue( new NodeProxy(doc, root.gid, root.internalAddress), false); } else { return doc.getBroker().getNodeValue(this, false); } } public String getNodeValueSeparated() { return doc.getBroker().getNodeValue(this, true); } public String toString() { return doc.getNode(gid).toString(); } /** * Sets the nodeType. * @param nodeType The nodeType to set */ public void setNodeType(short nodeType) { this.nodeType = nodeType; } /** * Returns the storage address of this node in dom.dbx. * @return long */ public long getInternalAddress() { return internalAddress; } /** * Sets the storage address of this node in dom.dbx. * * @param internalAddress The internalAddress to set */ public void setInternalAddress(long internalAddress) { this.internalAddress = internalAddress; } public void setIndexType(int type) { internalAddress = StorageAddress.setIndexType(internalAddress, (short) type); } public int getIndexType() { return RangeIndexSpec.indexTypeToXPath( StorageAddress.indexTypeFromPointer(internalAddress) ); } public boolean hasTextIndex() { return RangeIndexSpec.hasFulltextIndex( StorageAddress.indexTypeFromPointer(internalAddress) ); } public boolean hasMixedContent() { return RangeIndexSpec.hasMixedContent( StorageAddress.indexTypeFromPointer(internalAddress) ); } public Match getMatches() { return match; } public void setMatches(Match match) { this.match = match; } public boolean hasMatch(Match m) { if (m == null || match == null) return false; Match next = match; do { if (next.equals(m)) return true; } while ((next = next.getNextMatch()) != null); return false; } public void addMatch(Match m) { if (match == null) { match = m; match.prevMatch = null; match.nextMatch = null; return; } Match next = match; int cmp; while (next != null) { cmp = next.compareTo(m); if (cmp == 0 && m.getNodeId() == next.getNodeId()) return; else if (cmp < 0) { if (next.prevMatch != null) next.prevMatch.nextMatch = m; else match = m; m.prevMatch = next.prevMatch; next.prevMatch = m; m.nextMatch = next; return; } else if (next.nextMatch == null) { next.nextMatch = m; m.prevMatch = next; m.nextMatch = null; return; } next = next.nextMatch; } } public void addMatches(NodeProxy p) { if(p == this) return; Match m = p.getMatches(); while (m != null) { addMatch(new Match(m)); m = m.nextMatch; } } public void printMatches(Match m) { System.out.print(gid); System.out.print(": "); Match next = m; while (next != null) { System.out.print(next.getMatchingTerm() + " [" + next.getNodeId() + "] "); System.out.print("-> " + (next.nextMatch == null ? "null" : next.nextMatch.getMatchingTerm())); System.out.print(" "); next = next.nextMatch; } System.out.println(); } /** * Add a node to the list of context nodes for this node. * * NodeProxy internally stores the context nodes of the XPath context, for which * this node has been selected during a previous processing step. * * Since eXist tries to process many expressions in one, single processing step, * the context information is required to resolve predicate expressions. For * example, for an expression like //SCENE[SPEECH/SPEAKER='HAMLET'], * we have to remember the SCENE nodes for which the equality expression * in the predicate was true. Thus, when evaluating the step SCENE[SPEECH], the * SCENE nodes become context items of the SPEECH nodes and this context * information is preserved through all following steps. * * To process the predicate expression, {@link org.exist.xquery.Predicate} will take the * context nodes returned by the filter expression and compare them to its context * node set. */ public void addContextNode(NodeProxy node) { if (context == null) { context = new ContextItem(node); return; } ContextItem next = context; while (next != null) { if (next.getNode().gid == node.gid) break; if (next.getNextItem() == null) { next.setNextItem(new ContextItem(node)); break; } next = next.getNextItem(); } } public void printContext() { ContextItem next = context; System.out.print(gid + ": "); while (next != null) { System.out.print(next.getNode().gid); System.out.print(' '); next = next.getNextItem(); } System.out.println(); } public void copyContext(NodeProxy node) { context = node.getContext(); } public void clearContext() { context = null; } public ContextItem getContext() { return context; } // methods of interface Item /* (non-Javadoc) * @see org.exist.xquery.value.Item#getType() */ public int getType() { switch (nodeType) { case Node.ELEMENT_NODE : return Type.ELEMENT; case Node.ATTRIBUTE_NODE : return Type.ATTRIBUTE; case Node.TEXT_NODE : return Type.TEXT; case Node.PROCESSING_INSTRUCTION_NODE : return Type.PROCESSING_INSTRUCTION; case Node.COMMENT_NODE : return Type.COMMENT; case Node.DOCUMENT_NODE: return Type.DOCUMENT; default : return Type.NODE; // unknown type } } /* (non-Javadoc) * @see org.exist.xquery.value.Item#toSequence() */ public Sequence toSequence() { return this; } /* (non-Javadoc) * @see org.exist.xquery.value.Item#getStringValue() */ public String getStringValue() { return getNodeValue(); } /* (non-Javadoc) * @see org.exist.xquery.value.Item#convertTo(int) */ public AtomicValue convertTo(int requiredType) throws XPathException { return new StringValue(getNodeValue()).convertTo(requiredType); } /* (non-Javadoc) * @see org.exist.xquery.value.Item#atomize() */ public AtomicValue atomize() throws XPathException { return new UntypedAtomicValue(getNodeValue()); } /* (non-Javadoc) * @see org.exist.xquery.value.Item#toSAX(org.exist.storage.DBBroker, org.xml.sax.ContentHandler) */ public void toSAX(DBBroker broker, ContentHandler handler) throws SAXException { Serializer serializer = broker.getSerializer(); serializer.reset(); serializer.setProperty(Serializer.GENERATE_DOC_EVENTS, "false"); serializer.setSAXHandlers(handler, null); serializer.toSAX(this); } /* (non-Javadoc) * @see org.exist.xquery.value.Item#copyTo(org.exist.storage.DBBroker, org.exist.memtree.DocumentBuilderReceiver) */ public void copyTo(DBBroker broker, DocumentBuilderReceiver receiver) throws SAXException { if(nodeType == Node.ATTRIBUTE_NODE) { AttrImpl attr = (AttrImpl) getNode(); receiver.attribute(attr.getQName(), attr.getValue()); } else receiver.addReferenceNode(this); // Serializer serializer = broker.getSerializer(); // serializer.toReceiver(this, receiver); } /* (non-Javadoc) * @see org.exist.xquery.value.Item#conversionPreference(java.lang.Class) */ public int conversionPreference(Class javaClass) { if (javaClass.isAssignableFrom(NodeProxy.class)) return 0; if (javaClass.isAssignableFrom(Node.class)) return 1; if (javaClass == String.class || javaClass == CharSequence.class) return 2; if (javaClass == Character.class || javaClass == char.class) return 2; if (javaClass == Double.class || javaClass == double.class) return 10; if (javaClass == Float.class || javaClass == float.class) return 11; if (javaClass == Long.class || javaClass == long.class) return 12; if (javaClass == Integer.class || javaClass == int.class) return 13; if (javaClass == Short.class || javaClass == short.class) return 14; if (javaClass == Byte.class || javaClass == byte.class) return 15; if (javaClass == Boolean.class || javaClass == boolean.class) return 16; if (javaClass == Object.class) return 20; return Integer.MAX_VALUE; } /* (non-Javadoc) * @see org.exist.xquery.value.Item#toJavaObject(java.lang.Class) */ public Object toJavaObject(Class target) throws XPathException { if (target.isAssignableFrom(NodeProxy.class)) return this; else if (target.isAssignableFrom(Node.class)) return getNode(); else if (target == Object.class) return getNode(); else { StringValue v = new StringValue(getStringValue()); return v.toJavaObject(target); } } /* * Methods of interface Sequence: */ /* (non-Javadoc) * @see org.exist.xquery.value.Sequence#getItemType() */ public int getItemType() { return getType(); } /* (non-Javadoc) * @see org.exist.xquery.value.Sequence#getCardinality() */ public int getCardinality() { return Cardinality.EXACTLY_ONE; } /* (non-Javadoc) * @see org.exist.xquery.value.Sequence#isCached() */ public boolean isCached() { return false; } /* (non-Javadoc) * @see org.exist.xquery.value.Sequence#setIsCached(boolean) */ public void setIsCached(boolean cached) { } /* (non-Javadoc) * @see org.exist.xquery.value.Sequence#toNodeSet() */ public NodeSet toNodeSet() throws XPathException { return this; } /* (non-Javadoc) * @see org.exist.xquery.value.Sequence#effectiveBooleanValue() */ public boolean effectiveBooleanValue() throws XPathException { return true; } /* (non-Javadoc) * @see org.exist.xquery.value.Sequence#removeDuplicates() */ public void removeDuplicates() { // single node: no duplicates } /* (non-Javadoc) * @see org.exist.xquery.value.Sequence#setSelfAsContext() */ public void setSelfAsContext() { addContextNode(this); } /* -----------------------------------------------* * Methods of class NodeSet * -----------------------------------------------*/ /* (non-Javadoc) * @see org.exist.dom.NodeSet#iterator() */ public Iterator iterator() { return new SingleNodeIterator(this); } /* (non-Javadoc) * @see org.exist.dom.NodeSet#iterate() */ public SequenceIterator iterate() { return new SingleNodeIterator(this); } /* (non-Javadoc) * @see org.exist.xquery.value.Sequence#unorderedIterator() */ public SequenceIterator unorderedIterator() { return new SingleNodeIterator(this); } /* (non-Javadoc) * @see org.exist.dom.NodeSet#contains(org.exist.dom.DocumentImpl, long) */ public boolean contains(DocumentImpl doc, long nodeId) { return this.doc.getDocId() == doc.getDocId() && this.gid == nodeId; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#contains(org.exist.dom.NodeProxy) */ public boolean contains(NodeProxy proxy) { return doc.getDocId() == proxy.doc.getDocId() && gid == proxy.gid; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#addAll(org.exist.dom.NodeSet) */ public void addAll(NodeSet other) { } /* (non-Javadoc) * @see org.exist.dom.NodeSet#add(org.exist.dom.NodeProxy) */ public void add(NodeProxy proxy) { } /* (non-Javadoc) * @see org.exist.xquery.value.Sequence#add(org.exist.xquery.value.Item) */ public void add(Item item) throws XPathException { } /* (non-Javadoc) * @see org.exist.dom.NodeSet#add(org.exist.dom.NodeProxy, int) */ public void add(NodeProxy proxy, int sizeHint) { } /* (non-Javadoc) * @see org.exist.xquery.value.Sequence#addAll(org.exist.xquery.value.Sequence) */ public void addAll(Sequence other) throws XPathException { } /* (non-Javadoc) * @see org.w3c.dom.NodeList#getLength() */ public int getLength() { return 1; } /* (non-Javadoc) * @see org.w3c.dom.NodeList#item(int) */ public Node item(int pos) { return pos > 0 ? null : getNode(); } /* (non-Javadoc) * @see org.exist.xquery.value.Sequence#itemAt(int) */ public Item itemAt(int pos) { return pos > 0 ? null : this; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#get(int) */ public NodeProxy get(int pos) { return pos > 0 ? null : this; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#get(org.exist.dom.NodeProxy) */ public NodeProxy get(NodeProxy p) { return contains(p) ? this : null; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#get(org.exist.dom.DocumentImpl, long) */ public NodeProxy get(DocumentImpl document, long nodeId) { if(doc.getDocId() == document.getDocId() && nodeId == gid) return this; else return null; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#parentWithChild(org.exist.dom.NodeProxy, boolean, boolean, int) */ public NodeProxy parentWithChild(NodeProxy proxy, boolean directParent, boolean includeSelf, int level) { return parentWithChild(proxy.getDocument(), proxy.gid, directParent, includeSelf, level); } /* (non-Javadoc) * @see org.exist.dom.NodeSet#parentWithChild(org.exist.dom.DocumentImpl, long, boolean, boolean) */ public NodeProxy parentWithChild(DocumentImpl doc, long gid, boolean directParent, boolean includeSelf) { return parentWithChild(doc, gid, directParent, includeSelf, TO_BE_COMPUTED ); } /* (non-Javadoc) * @see org.exist.dom.NodeSet#parentWithChild(org.exist.dom.DocumentImpl, long, boolean, boolean, int) */ public NodeProxy parentWithChild( DocumentImpl otherDoc, long otherId, boolean directParent, boolean includeSelf, int level) { if(otherDoc.getDocId() != doc.getDocId()) return null; if(includeSelf && otherId == gid) return this; if (level < 0) level = doc.getTreeLevel(otherId); while (otherId > 0) { otherId = XMLUtil.getParentId(doc, otherId, level); if(otherId == gid) return this; else if (directParent) return null; else --level; } return null; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#getContextNodes(boolean) */ public NodeSet getContextNodes(boolean rememberContext) { ExtArrayNodeSet result = new ExtArrayNodeSet(); ContextItem contextNode = getContext(); while (contextNode != null) { NodeProxy p = contextNode.getNode(); p.addMatches(this); if (!result.contains(p)) { if (rememberContext) p.addContextNode(p); result.add(p); } contextNode = contextNode.getNextItem(); } return result; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#getRange(org.exist.dom.DocumentImpl, long, long) */ public NodeSet getRange(DocumentImpl document, long lower, long upper) { if(doc.getDocId() == document.getDocId() && gid >= lower && gid <= upper) return this; return NodeSet.EMPTY_SET; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#getState() */ public int getState() { return 1; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#hasChanged(int) */ public boolean hasChanged(int previousState) { return false; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#getSizeHint(org.exist.dom.DocumentImpl) */ public int getSizeHint(DocumentImpl document) { if(document.getDocId() == doc.getDocId()) return 1; else return 0; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#getDocumentSet() */ public DocumentSet getDocumentSet() { DocumentSet docs = new DocumentSet(1); docs.add(doc); return docs; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#containsDoc(org.exist.dom.DocumentImpl) */ public boolean containsDoc(DocumentImpl document) { return doc.getDocId() == document.getDocId(); } /* (non-Javadoc) * @see org.exist.dom.NodeSet#getParents(boolean) */ public NodeSet getParents(boolean rememberContext) { long pid = XMLUtil.getParentId(doc, gid); if ( pid != DOCUMENT_NODE_GID ) { NodeProxy parent = new NodeProxy(doc, pid, Node.ELEMENT_NODE); if (rememberContext) parent.addContextNode(this); else parent.copyContext(this); return parent; } return NodeSet.EMPTY_SET; } public NodeSet getAncestors(boolean rememberContext, boolean includeSelf) { NodeSet ancestors = new ExtArrayNodeSet(); if (includeSelf) { ancestors.add(this); } long pid = gid; // calculate parent's gid while((pid = XMLUtil.getParentId(getDocument(), pid)) > 0) { NodeProxy parent = new NodeProxy(getDocument(), pid, Node.ELEMENT_NODE); if (rememberContext) parent.addContextNode(this); else parent.copyContext(this); ancestors.add(parent); } return ancestors; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#intersection(org.exist.dom.NodeSet) */ public NodeSet intersection(NodeSet other) { if(other.contains(this)) return this; else return NodeSet.EMPTY_SET; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#deepIntersection(org.exist.dom.NodeSet) */ public NodeSet deepIntersection(NodeSet other) { NodeProxy p = other.parentWithChild(this, false, true, TO_BE_COMPUTED ); if(p == null) return NodeSet.EMPTY_SET; if(p.gid != gid) p.addMatches(this); return p; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#union(org.exist.dom.NodeSet) */ public NodeSet union(NodeSet other) { ExtArrayNodeSet result = new ExtArrayNodeSet(); result.addAll(other); result.add(this); return result; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#except(org.exist.dom.NodeSet) */ public NodeSet except(NodeSet other) { return other.contains(this) ? NodeSet.EMPTY_SET : this; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#selectParentChild(org.exist.dom.NodeSet, int) */ public NodeSet selectParentChild(NodeSet al, int mode) { return selectParentChild(al, mode, false); } /* (non-Javadoc) * @see org.exist.dom.NodeSet#selectParentChild(org.exist.dom.NodeSet, int, boolean) */ public NodeSet selectParentChild(NodeSet al, int mode, boolean rememberContext) { NodeProxy p = al.parentWithChild(this, true, false, TO_BE_COMPUTED ); if(p != null) { if(mode == DESCENDANT) { if (rememberContext) addContextNode(p); else copyContext(p); return this; } else { if (rememberContext) p.addContextNode(this); else p.copyContext(this); return p; } } else return NodeSet.EMPTY_SET; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#selectAncestors(org.exist.dom.NodeSet, boolean, boolean) */ public NodeSet selectAncestors(NodeSet al, boolean includeSelf, boolean rememberContext) { return NodeSetHelper.selectAncestors(this, al, includeSelf, rememberContext); } /* (non-Javadoc) * @see org.exist.dom.NodeSet#selectSiblings(org.exist.dom.NodeSet, int) */ public NodeSet selectSiblings(NodeSet siblings, int mode) { return NodeSetHelper.selectSiblings(this, siblings, mode); } /* (non-Javadoc) * @see org.exist.dom.NodeSet#selectAncestorDescendant(org.exist.dom.NodeSet, int, boolean, boolean) */ public NodeSet selectAncestorDescendant(NodeSet al, int mode, boolean includeSelf, boolean rememberContext) { return NodeSetHelper.selectAncestorDescendant(this, al, mode, includeSelf, rememberContext); } /* (non-Javadoc) * @see org.exist.dom.NodeSet#selectFollowing(org.exist.dom.NodeSet) */ public NodeSet selectFollowing(NodeSet following) throws XPathException { return NodeSetHelper.selectFollowing(this, following); } public NodeSet selectPreceding(NodeSet preceding) throws XPathException { return NodeSetHelper.selectPreceding(this, preceding); } public NodeSet directSelectAttribute(QName qname, boolean rememberContext) { if (nodeType != UNKNOWN && nodeType != Node.ELEMENT_NODE) return NodeSet.EMPTY_SET; NodeImpl node = (NodeImpl) getNode(); if (node.getNodeType() != Node.ELEMENT_NODE) return NodeSet.EMPTY_SET; AttrImpl attr = (AttrImpl) ((ElementImpl) node).getAttributeNodeNS(qname.getNamespaceURI(), qname.getLocalName()); if (attr == null) return NodeSet.EMPTY_SET; NodeProxy child = new NodeProxy(doc, attr.getGID(), Node.ATTRIBUTE_NODE, attr.getInternalAddress()); if (rememberContext) child.addContextNode(this); else child.copyContext(this); return child; } private final static class SingleNodeIterator implements Iterator, SequenceIterator { private boolean hasNext = true; private NodeProxy node; public SingleNodeIterator(NodeProxy node) { this.node = node; } public boolean hasNext() { return hasNext; } public Object next() { if (hasNext) { hasNext = false; return node; } else return null; } public void remove() { throw new RuntimeException("not supported"); } /* (non-Javadoc) * @see org.exist.xquery.value.SequenceIterator#nextItem() */ public Item nextItem() { if (hasNext) { hasNext = false; return node; } else return null; } } /* (non-Javadoc) * @see org.exist.xquery.value.Sequence#isPersistentSet() */ public boolean isPersistentSet() { return true; } }
src/org/exist/dom/NodeProxy.java
/* * eXist Open Source Native XML Database * Copyright (C) 2001-03, Wolfgang M. Meier ([email protected]) * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Library General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Library General Public License for more details. * * You should have received a copy of the GNU Library General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. * * $Id$ */ package org.exist.dom; import java.util.Iterator; import org.exist.memtree.DocumentBuilderReceiver; import org.exist.storage.DBBroker; import org.exist.storage.RangeIndexSpec; import org.exist.storage.StorageAddress; import org.exist.storage.serializers.Serializer; import org.exist.xquery.Cardinality; import org.exist.xquery.XPathException; import org.exist.xquery.value.AtomicValue; import org.exist.xquery.value.Item; import org.exist.xquery.value.NodeValue; import org.exist.xquery.value.Sequence; import org.exist.xquery.value.SequenceIterator; import org.exist.xquery.value.StringValue; import org.exist.xquery.value.Type; import org.exist.xquery.value.UntypedAtomicValue; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.xml.sax.ContentHandler; import org.xml.sax.SAXException; /** * Placeholder class for DOM nodes. * * NodeProxy is an internal proxy class, acting as a placeholder for all types of persistent XML nodes * during query processing. NodeProxy just stores the node's unique id and the document it belongs to. * Query processing deals with these proxys most of the time. Using a NodeProxy is much cheaper * than loading the actual node from the database. The real DOM node is only loaded, * if further information is required for the evaluation of an XPath expression. To obtain * the real node for a proxy, simply call {@link #getNode()}. * * All sets of type NodeSet operate on NodeProxys. A node set is a special type of * sequence, so NodeProxy does also implement {@link org.exist.xquery.value.Item} and * can thus be an item in a sequence. Since, according to XPath 2, a single node is also * a sequence, NodeProxy does itself extend NodeSet. It thus represents a node set containing * just one, single node. * *@author Wolfgang Meier <[email protected]> */ public class NodeProxy implements NodeSet, NodeValue, Comparable { /** special value for gid: means document node */ public static final int DOCUMENT_NODE_GID = -1; /** special value for gid: means document element node */ public static final int DOCUMENT_ELEMENT_GID = 1; public static final int TO_BE_COMPUTED = -1; public static final int UNKNOWN = -1; public static final int UNKNOWN_GID = 0; /** * The owner document of this node. */ public DocumentImpl doc = null; /** * The unique internal node id in the document. * @link #DOCUMENT_NODE_GID means document node. */ public long gid = UNKNOWN_GID; /** * The internal storage address of the node in the * dom.dbx node store. This field is optional. */ private long internalAddress = UNKNOWN; /** * The type of this node (as defined by DOM), if known, @link #UNKNOW * otherwise. */ public short nodeType = UNKNOWN; /** * The first {@link Match} object associated with this node. * Match objects are used to track fulltext hits throughout query processing. * * Matches are stored as a linked list. */ public Match match = null; private ContextItem context = null; public NodeProxy() { } /** * Construct a node proxy with unique id gid and owned by document doc. * *@param doc Description of the Parameter *@param gid Description of the Parameter */ public NodeProxy(DocumentImpl doc, long gid) { this.doc = doc; this.gid = gid; } /** * as above, but a hint is given about the node type of this proxy-object. * *@param doc Description of the Parameter *@param gid Description of the Parameter *@param nodeType Description of the Parameter */ public NodeProxy(DocumentImpl doc, long gid, short nodeType) { this.doc = doc; this.gid = gid; this.nodeType = nodeType; } public NodeProxy(DocumentImpl doc, long gid, short nodeType, long address) { this.doc = doc; this.gid = gid; this.nodeType = nodeType; this.internalAddress = address; } public NodeProxy(DocumentImpl doc, long gid, long address) { this.gid = gid; this.doc = doc; this.internalAddress = address; } public NodeProxy(NodeProxy p) { doc = p.doc; gid = p.gid; nodeType = p.nodeType; match = p.match; internalAddress = p.internalAddress; } /** create a proxy to a document node */ public NodeProxy(DocumentImpl doc) { this.doc = doc; this.gid = DOCUMENT_NODE_GID; this.nodeType = Node.DOCUMENT_NODE; } /* (non-Javadoc) * @see org.exist.xquery.value.NodeValue#getImplementation() */ public int getImplementationType() { return NodeValue.PERSISTENT_NODE; } /** Ordering first according to document ID; then if equal * according to node gid. */ public int compareTo(NodeProxy other) { final int diff = doc.docId - other.doc.docId; if ( diff != 0 ) return diff; return (int) (gid - other.gid); } public int compareTo(Object other) { if(!(other instanceof NodeProxy)) return 1; final NodeProxy p = (NodeProxy) other; return compareTo(p); } public boolean equals(Object other) { if (!(other instanceof NodeProxy)) throw new RuntimeException("cannot compare nodes from different implementations"); NodeProxy node = (NodeProxy) other; if (node.doc.getDocId() == doc.getDocId() && node.gid == gid) return true; return false; } public boolean equals(NodeValue other) throws XPathException { if (other.getImplementationType() != NodeValue.PERSISTENT_NODE) throw new XPathException("cannot compare persistent node with in-memory node"); NodeProxy node = (NodeProxy) other; if (node.doc.getDocId() == doc.getDocId() && node.gid == gid) return true; return false; } public boolean before(NodeValue other) throws XPathException { return before(other, true); } protected boolean before(NodeValue other, boolean includeAncestors) throws XPathException { if (other.getImplementationType() != NodeValue.PERSISTENT_NODE) throw new XPathException("cannot compare persistent node with in-memory node"); NodeProxy node = (NodeProxy) other; if (doc.docId != node.doc.docId) return false; // System.out.println(gid + " << " + node.gid); int la = doc.getTreeLevel(gid); int lb = doc.getTreeLevel(node.gid); long pa = gid, pb = node.gid; if (la > lb) { while (la > lb) { pa = XMLUtil.getParentId(doc, pa, la); --la; } if (pa == pb) // a is a descendant of b return false; else return pa < pb; } else if (lb > la) { while (lb > la) { pb = XMLUtil.getParentId(node.doc, pb, lb); --lb; } if (pb == pa) // a is an ancestor of b return includeAncestors ? true : false; else return pa < pb; } else return pa < pb; } public boolean after(NodeValue other) throws XPathException { return after(other, true); } protected boolean after(NodeValue other, boolean includeDescendants) throws XPathException { if (other.getImplementationType() != NodeValue.PERSISTENT_NODE) throw new XPathException("cannot compare persistent node with in-memory node"); NodeProxy node = (NodeProxy) other; if (doc.docId != node.doc.docId) return false; // System.out.println(gid + " >> " + node.gid); int la = doc.getTreeLevel(gid); int lb = doc.getTreeLevel(node.gid); long pa = gid, pb = node.gid; if (la > lb) { while (la > lb) { pa = XMLUtil.getParentId(doc, pa, la); --la; } // a is a descendant of b if (pa == pb) return includeDescendants ? true : false; else return pa > pb; } else if (lb > la) { while (lb > la) { pb = XMLUtil.getParentId(node.doc, pb, lb); --lb; } if (pb == pa) return false; else return pa > pb; } else return pa > pb; } public Document getOwnerDocument() { return doc; } public final DocumentImpl getDocument() { return doc; } public long getGID() { return gid; } public Node getNode() { if (isDocument()) return doc; else return doc.getNode(this); } public boolean isDocument() { return nodeType == Node.DOCUMENT_NODE; } public short getNodeType() { return nodeType; } public String getNodeValue() { if ( isDocument() ) { NodeImpl root = (NodeImpl) doc.getDocumentElement(); return doc.getBroker().getNodeValue( new NodeProxy(doc, root.gid, root.internalAddress), false); } else { return doc.getBroker().getNodeValue(this, false); } } public String getNodeValueSeparated() { return doc.getBroker().getNodeValue(this, true); } public String toString() { return doc.getNode(gid).toString(); } /** * Sets the nodeType. * @param nodeType The nodeType to set */ public void setNodeType(short nodeType) { this.nodeType = nodeType; } /** * Returns the storage address of this node in dom.dbx. * @return long */ public long getInternalAddress() { return internalAddress; } /** * Sets the storage address of this node in dom.dbx. * * @param internalAddress The internalAddress to set */ public void setInternalAddress(long internalAddress) { this.internalAddress = internalAddress; } public void setIndexType(int type) { internalAddress = StorageAddress.setIndexType(internalAddress, (short) type); } public int getIndexType() { return RangeIndexSpec.indexTypeToXPath( StorageAddress.indexTypeFromPointer(internalAddress) ); } public boolean hasTextIndex() { return RangeIndexSpec.hasFulltextIndex( StorageAddress.indexTypeFromPointer(internalAddress) ); } public boolean hasMixedContent() { return RangeIndexSpec.hasMixedContent( StorageAddress.indexTypeFromPointer(internalAddress) ); } public Match getMatches() { return match; } public void setMatches(Match match) { this.match = match; } public boolean hasMatch(Match m) { if (m == null || match == null) return false; Match next = match; do { if (next.equals(m)) return true; } while ((next = next.getNextMatch()) != null); return false; } public void addMatch(Match m) { if (match == null) { match = m; match.prevMatch = null; match.nextMatch = null; return; } Match next = match; int cmp; while (next != null) { cmp = next.compareTo(m); if (cmp == 0 && m.getNodeId() == next.getNodeId()) return; else if (cmp < 0) { if (next.prevMatch != null) next.prevMatch.nextMatch = m; else match = m; m.prevMatch = next.prevMatch; next.prevMatch = m; m.nextMatch = next; return; } else if (next.nextMatch == null) { next.nextMatch = m; m.prevMatch = next; m.nextMatch = null; return; } next = next.nextMatch; } } public void addMatches(NodeProxy p) { if(p == this) return; Match m = p.getMatches(); while (m != null) { addMatch(new Match(m)); m = m.nextMatch; } } public void printMatches(Match m) { System.out.print(gid); System.out.print(": "); Match next = m; while (next != null) { System.out.print(next.getMatchingTerm() + " [" + next.getNodeId() + "] "); System.out.print("-> " + (next.nextMatch == null ? "null" : next.nextMatch.getMatchingTerm())); System.out.print(" "); next = next.nextMatch; } System.out.println(); } /** * Add a node to the list of context nodes for this node. * * NodeProxy internally stores the context nodes of the XPath context, for which * this node has been selected during a previous processing step. * * Since eXist tries to process many expressions in one, single processing step, * the context information is required to resolve predicate expressions. For * example, for an expression like //SCENE[SPEECH/SPEAKER='HAMLET'], * we have to remember the SCENE nodes for which the equality expression * in the predicate was true. Thus, when evaluating the step SCENE[SPEECH], the * SCENE nodes become context items of the SPEECH nodes and this context * information is preserved through all following steps. * * To process the predicate expression, {@link org.exist.xquery.Predicate} will take the * context nodes returned by the filter expression and compare them to its context * node set. */ public void addContextNode(NodeProxy node) { if (context == null) { context = new ContextItem(node); return; } ContextItem next = context; while (next != null) { if (next.getNode().gid == node.gid) break; if (next.getNextItem() == null) { next.setNextItem(new ContextItem(node)); break; } next = next.getNextItem(); } } public void printContext() { ContextItem next = context; System.out.print(gid + ": "); while (next != null) { System.out.print(next.getNode().gid); System.out.print(' '); next = next.getNextItem(); } System.out.println(); } public void copyContext(NodeProxy node) { context = node.getContext(); } public void clearContext() { context = null; } public ContextItem getContext() { return context; } // methods of interface Item /* (non-Javadoc) * @see org.exist.xquery.value.Item#getType() */ public int getType() { switch (nodeType) { case Node.ELEMENT_NODE : return Type.ELEMENT; case Node.ATTRIBUTE_NODE : return Type.ATTRIBUTE; case Node.TEXT_NODE : return Type.TEXT; case Node.PROCESSING_INSTRUCTION_NODE : return Type.PROCESSING_INSTRUCTION; case Node.COMMENT_NODE : return Type.COMMENT; case Node.DOCUMENT_NODE: return Type.DOCUMENT; default : return Type.NODE; // unknown type } } /* (non-Javadoc) * @see org.exist.xquery.value.Item#toSequence() */ public Sequence toSequence() { return this; } /* (non-Javadoc) * @see org.exist.xquery.value.Item#getStringValue() */ public String getStringValue() { return getNodeValue(); } /* (non-Javadoc) * @see org.exist.xquery.value.Item#convertTo(int) */ public AtomicValue convertTo(int requiredType) throws XPathException { return new StringValue(getNodeValue()).convertTo(requiredType); } /* (non-Javadoc) * @see org.exist.xquery.value.Item#atomize() */ public AtomicValue atomize() throws XPathException { return new UntypedAtomicValue(getNodeValue()); } /* (non-Javadoc) * @see org.exist.xquery.value.Item#toSAX(org.exist.storage.DBBroker, org.xml.sax.ContentHandler) */ public void toSAX(DBBroker broker, ContentHandler handler) throws SAXException { Serializer serializer = broker.getSerializer(); serializer.reset(); serializer.setProperty(Serializer.GENERATE_DOC_EVENTS, "false"); serializer.setSAXHandlers(handler, null); serializer.toSAX(this); } /* (non-Javadoc) * @see org.exist.xquery.value.Item#copyTo(org.exist.storage.DBBroker, org.exist.memtree.DocumentBuilderReceiver) */ public void copyTo(DBBroker broker, DocumentBuilderReceiver receiver) throws SAXException { if(nodeType == Node.ATTRIBUTE_NODE) { AttrImpl attr = (AttrImpl) getNode(); receiver.attribute(attr.getQName(), attr.getValue()); } else receiver.addReferenceNode(this); // Serializer serializer = broker.getSerializer(); // serializer.toReceiver(this, receiver); } /* (non-Javadoc) * @see org.exist.xquery.value.Item#conversionPreference(java.lang.Class) */ public int conversionPreference(Class javaClass) { if (javaClass.isAssignableFrom(NodeProxy.class)) return 0; if (javaClass.isAssignableFrom(Node.class)) return 1; if (javaClass == String.class || javaClass == CharSequence.class) return 2; if (javaClass == Character.class || javaClass == char.class) return 2; if (javaClass == Double.class || javaClass == double.class) return 10; if (javaClass == Float.class || javaClass == float.class) return 11; if (javaClass == Long.class || javaClass == long.class) return 12; if (javaClass == Integer.class || javaClass == int.class) return 13; if (javaClass == Short.class || javaClass == short.class) return 14; if (javaClass == Byte.class || javaClass == byte.class) return 15; if (javaClass == Boolean.class || javaClass == boolean.class) return 16; if (javaClass == Object.class) return 20; return Integer.MAX_VALUE; } /* (non-Javadoc) * @see org.exist.xquery.value.Item#toJavaObject(java.lang.Class) */ public Object toJavaObject(Class target) throws XPathException { if (target.isAssignableFrom(NodeProxy.class)) return this; else if (target.isAssignableFrom(Node.class)) return getNode(); else if (target == Object.class) return getNode(); else { StringValue v = new StringValue(getStringValue()); return v.toJavaObject(target); } } /* * Methods of interface Sequence: */ /* (non-Javadoc) * @see org.exist.xquery.value.Sequence#getItemType() */ public int getItemType() { return Type.NODE; } /* (non-Javadoc) * @see org.exist.xquery.value.Sequence#getCardinality() */ public int getCardinality() { return Cardinality.EXACTLY_ONE; } /* (non-Javadoc) * @see org.exist.xquery.value.Sequence#isCached() */ public boolean isCached() { return false; } /* (non-Javadoc) * @see org.exist.xquery.value.Sequence#setIsCached(boolean) */ public void setIsCached(boolean cached) { } /* (non-Javadoc) * @see org.exist.xquery.value.Sequence#toNodeSet() */ public NodeSet toNodeSet() throws XPathException { return this; } /* (non-Javadoc) * @see org.exist.xquery.value.Sequence#effectiveBooleanValue() */ public boolean effectiveBooleanValue() throws XPathException { return true; } /* (non-Javadoc) * @see org.exist.xquery.value.Sequence#removeDuplicates() */ public void removeDuplicates() { // single node: no duplicates } /* (non-Javadoc) * @see org.exist.xquery.value.Sequence#setSelfAsContext() */ public void setSelfAsContext() { addContextNode(this); } /* -----------------------------------------------* * Methods of class NodeSet * -----------------------------------------------*/ /* (non-Javadoc) * @see org.exist.dom.NodeSet#iterator() */ public Iterator iterator() { return new SingleNodeIterator(this); } /* (non-Javadoc) * @see org.exist.dom.NodeSet#iterate() */ public SequenceIterator iterate() { return new SingleNodeIterator(this); } /* (non-Javadoc) * @see org.exist.xquery.value.Sequence#unorderedIterator() */ public SequenceIterator unorderedIterator() { return new SingleNodeIterator(this); } /* (non-Javadoc) * @see org.exist.dom.NodeSet#contains(org.exist.dom.DocumentImpl, long) */ public boolean contains(DocumentImpl doc, long nodeId) { return this.doc.getDocId() == doc.getDocId() && this.gid == nodeId; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#contains(org.exist.dom.NodeProxy) */ public boolean contains(NodeProxy proxy) { return doc.getDocId() == proxy.doc.getDocId() && gid == proxy.gid; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#addAll(org.exist.dom.NodeSet) */ public void addAll(NodeSet other) { } /* (non-Javadoc) * @see org.exist.dom.NodeSet#add(org.exist.dom.NodeProxy) */ public void add(NodeProxy proxy) { } /* (non-Javadoc) * @see org.exist.xquery.value.Sequence#add(org.exist.xquery.value.Item) */ public void add(Item item) throws XPathException { } /* (non-Javadoc) * @see org.exist.dom.NodeSet#add(org.exist.dom.NodeProxy, int) */ public void add(NodeProxy proxy, int sizeHint) { } /* (non-Javadoc) * @see org.exist.xquery.value.Sequence#addAll(org.exist.xquery.value.Sequence) */ public void addAll(Sequence other) throws XPathException { } /* (non-Javadoc) * @see org.w3c.dom.NodeList#getLength() */ public int getLength() { return 1; } /* (non-Javadoc) * @see org.w3c.dom.NodeList#item(int) */ public Node item(int pos) { return pos > 0 ? null : getNode(); } /* (non-Javadoc) * @see org.exist.xquery.value.Sequence#itemAt(int) */ public Item itemAt(int pos) { return pos > 0 ? null : this; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#get(int) */ public NodeProxy get(int pos) { return pos > 0 ? null : this; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#get(org.exist.dom.NodeProxy) */ public NodeProxy get(NodeProxy p) { return contains(p) ? this : null; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#get(org.exist.dom.DocumentImpl, long) */ public NodeProxy get(DocumentImpl document, long nodeId) { if(doc.getDocId() == document.getDocId() && nodeId == gid) return this; else return null; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#parentWithChild(org.exist.dom.NodeProxy, boolean, boolean, int) */ public NodeProxy parentWithChild(NodeProxy proxy, boolean directParent, boolean includeSelf, int level) { return parentWithChild(proxy.getDocument(), proxy.gid, directParent, includeSelf, level); } /* (non-Javadoc) * @see org.exist.dom.NodeSet#parentWithChild(org.exist.dom.DocumentImpl, long, boolean, boolean) */ public NodeProxy parentWithChild(DocumentImpl doc, long gid, boolean directParent, boolean includeSelf) { return parentWithChild(doc, gid, directParent, includeSelf, TO_BE_COMPUTED ); } /* (non-Javadoc) * @see org.exist.dom.NodeSet#parentWithChild(org.exist.dom.DocumentImpl, long, boolean, boolean, int) */ public NodeProxy parentWithChild( DocumentImpl otherDoc, long otherId, boolean directParent, boolean includeSelf, int level) { if(otherDoc.getDocId() != doc.getDocId()) return null; if(includeSelf && otherId == gid) return this; if (level < 0) level = doc.getTreeLevel(otherId); while (otherId > 0) { otherId = XMLUtil.getParentId(doc, otherId, level); if(otherId == gid) return this; else if (directParent) return null; else --level; } return null; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#getContextNodes(boolean) */ public NodeSet getContextNodes(boolean rememberContext) { ExtArrayNodeSet result = new ExtArrayNodeSet(); ContextItem contextNode = getContext(); while (contextNode != null) { NodeProxy p = contextNode.getNode(); p.addMatches(this); if (!result.contains(p)) { if (rememberContext) p.addContextNode(p); result.add(p); } contextNode = contextNode.getNextItem(); } return result; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#getRange(org.exist.dom.DocumentImpl, long, long) */ public NodeSet getRange(DocumentImpl document, long lower, long upper) { if(doc.getDocId() == document.getDocId() && gid >= lower && gid <= upper) return this; return NodeSet.EMPTY_SET; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#getState() */ public int getState() { return 1; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#hasChanged(int) */ public boolean hasChanged(int previousState) { return false; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#getSizeHint(org.exist.dom.DocumentImpl) */ public int getSizeHint(DocumentImpl document) { if(document.getDocId() == doc.getDocId()) return 1; else return 0; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#getDocumentSet() */ public DocumentSet getDocumentSet() { DocumentSet docs = new DocumentSet(1); docs.add(doc); return docs; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#containsDoc(org.exist.dom.DocumentImpl) */ public boolean containsDoc(DocumentImpl document) { return doc.getDocId() == document.getDocId(); } /* (non-Javadoc) * @see org.exist.dom.NodeSet#getParents(boolean) */ public NodeSet getParents(boolean rememberContext) { long pid = XMLUtil.getParentId(doc, gid); if ( pid != DOCUMENT_NODE_GID ) { NodeProxy parent = new NodeProxy(doc, pid, Node.ELEMENT_NODE); if (rememberContext) parent.addContextNode(this); else parent.copyContext(this); return parent; } return NodeSet.EMPTY_SET; } public NodeSet getAncestors(boolean rememberContext, boolean includeSelf) { NodeSet ancestors = new ExtArrayNodeSet(); if (includeSelf) { ancestors.add(this); } long pid = gid; // calculate parent's gid while((pid = XMLUtil.getParentId(getDocument(), pid)) > 0) { NodeProxy parent = new NodeProxy(getDocument(), pid, Node.ELEMENT_NODE); if (rememberContext) parent.addContextNode(this); else parent.copyContext(this); ancestors.add(parent); } return ancestors; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#intersection(org.exist.dom.NodeSet) */ public NodeSet intersection(NodeSet other) { if(other.contains(this)) return this; else return NodeSet.EMPTY_SET; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#deepIntersection(org.exist.dom.NodeSet) */ public NodeSet deepIntersection(NodeSet other) { NodeProxy p = other.parentWithChild(this, false, true, TO_BE_COMPUTED ); if(p == null) return NodeSet.EMPTY_SET; if(p.gid != gid) p.addMatches(this); return p; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#union(org.exist.dom.NodeSet) */ public NodeSet union(NodeSet other) { ExtArrayNodeSet result = new ExtArrayNodeSet(); result.addAll(other); result.add(this); return result; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#except(org.exist.dom.NodeSet) */ public NodeSet except(NodeSet other) { return other.contains(this) ? NodeSet.EMPTY_SET : this; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#selectParentChild(org.exist.dom.NodeSet, int) */ public NodeSet selectParentChild(NodeSet al, int mode) { return selectParentChild(al, mode, false); } /* (non-Javadoc) * @see org.exist.dom.NodeSet#selectParentChild(org.exist.dom.NodeSet, int, boolean) */ public NodeSet selectParentChild(NodeSet al, int mode, boolean rememberContext) { NodeProxy p = al.parentWithChild(this, true, false, TO_BE_COMPUTED ); if(p != null) { if(mode == DESCENDANT) { if (rememberContext) addContextNode(p); else copyContext(p); return this; } else { if (rememberContext) p.addContextNode(this); else p.copyContext(this); return p; } } else return NodeSet.EMPTY_SET; } /* (non-Javadoc) * @see org.exist.dom.NodeSet#selectAncestors(org.exist.dom.NodeSet, boolean, boolean) */ public NodeSet selectAncestors(NodeSet al, boolean includeSelf, boolean rememberContext) { return NodeSetHelper.selectAncestors(this, al, includeSelf, rememberContext); } /* (non-Javadoc) * @see org.exist.dom.NodeSet#selectSiblings(org.exist.dom.NodeSet, int) */ public NodeSet selectSiblings(NodeSet siblings, int mode) { return NodeSetHelper.selectSiblings(this, siblings, mode); } /* (non-Javadoc) * @see org.exist.dom.NodeSet#selectAncestorDescendant(org.exist.dom.NodeSet, int, boolean, boolean) */ public NodeSet selectAncestorDescendant(NodeSet al, int mode, boolean includeSelf, boolean rememberContext) { return NodeSetHelper.selectAncestorDescendant(this, al, mode, includeSelf, rememberContext); } /* (non-Javadoc) * @see org.exist.dom.NodeSet#selectFollowing(org.exist.dom.NodeSet) */ public NodeSet selectFollowing(NodeSet following) throws XPathException { return NodeSetHelper.selectFollowing(this, following); } public NodeSet selectPreceding(NodeSet preceding) throws XPathException { return NodeSetHelper.selectPreceding(this, preceding); } public NodeSet directSelectAttribute(QName qname, boolean rememberContext) { if (nodeType != UNKNOWN && nodeType != Node.ELEMENT_NODE) return NodeSet.EMPTY_SET; NodeImpl node = (NodeImpl) getNode(); if (node.getNodeType() != Node.ELEMENT_NODE) return NodeSet.EMPTY_SET; AttrImpl attr = (AttrImpl) ((ElementImpl) node).getAttributeNodeNS(qname.getNamespaceURI(), qname.getLocalName()); if (attr == null) return NodeSet.EMPTY_SET; NodeProxy child = new NodeProxy(doc, attr.getGID(), Node.ATTRIBUTE_NODE, attr.getInternalAddress()); if (rememberContext) child.addContextNode(this); else child.copyContext(this); return child; } private final static class SingleNodeIterator implements Iterator, SequenceIterator { private boolean hasNext = true; private NodeProxy node; public SingleNodeIterator(NodeProxy node) { this.node = node; } public boolean hasNext() { return hasNext; } public Object next() { if (hasNext) { hasNext = false; return node; } else return null; } public void remove() { throw new RuntimeException("not supported"); } /* (non-Javadoc) * @see org.exist.xquery.value.SequenceIterator#nextItem() */ public Item nextItem() { if (hasNext) { hasNext = false; return node; } else return null; } } /* (non-Javadoc) * @see org.exist.xquery.value.Sequence#isPersistentSet() */ public boolean isPersistentSet() { return true; } }
NodeProxy.getItemType() should call NodeProxy.getType() to determine the actual type of the node instead of just returning Type.NODE. svn path=/trunk/eXist-1.0/; revision=1738
src/org/exist/dom/NodeProxy.java
NodeProxy.getItemType() should call NodeProxy.getType() to determine the actual type of the node instead of just returning Type.NODE.
Java
apache-2.0
2ff55b375a856d52f705ee677100c331f706b22b
0
rodm/gradle-teamcity-plugin,rodm/gradle-teamcity-plugin
/* * Copyright 2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.rodm.teamcity.internal; import com.github.dockerjava.api.DockerClient; import com.github.dockerjava.api.command.CreateContainerCmd; import com.github.dockerjava.api.command.CreateContainerResponse; import com.github.dockerjava.api.command.InspectContainerCmd; import com.github.dockerjava.api.command.InspectContainerResponse; import com.github.dockerjava.api.command.StartContainerCmd; import com.github.dockerjava.api.command.StopContainerCmd; import com.github.dockerjava.api.exception.NotFoundException; import com.github.dockerjava.api.exception.NotModifiedException; import com.github.dockerjava.api.model.ContainerNetwork; import com.github.dockerjava.api.model.HostConfig; import com.github.dockerjava.core.DefaultDockerClientConfig; import com.github.dockerjava.core.DockerClientImpl; import com.github.dockerjava.httpclient5.ApacheDockerHttpClient; import com.github.dockerjava.transport.DockerHttpClient; import org.gradle.api.GradleException; import java.util.Map; public class DockerOperations { private final DockerClient client; public DockerOperations() { DefaultDockerClientConfig clientConfig = DefaultDockerClientConfig.createDefaultConfigBuilder().build(); DockerHttpClient httpClient = new ApacheDockerHttpClient.Builder() .dockerHost(clientConfig.getDockerHost()) .build(); client = DockerClientImpl.getInstance(clientConfig, httpClient); } public boolean isImageAvailable(String image) { try { client.inspectImageCmd(image).exec(); return true; } catch (NotFoundException e) { return false; } } public String createContainer(ContainerConfiguration configuration) { HostConfig hostConfig = HostConfig.newHostConfig() .withAutoRemove(configuration.getAutoRemove()) .withBinds(configuration.getBinds()) .withPortBindings(configuration.getPortBindings()); CreateContainerCmd createContainer = client.createContainerCmd(configuration.getImage()) .withName(configuration.getName()) .withHostConfig(hostConfig) .withEnv(configuration.getEnvironment()) .withExposedPorts(configuration.getExposedPorts()); CreateContainerResponse response = createContainer.exec(); return response.getId(); } public boolean isContainerAvailable(String containerId) { try { client.inspectContainerCmd(containerId); return true; } catch (NotFoundException e) { return false; } } public boolean isContainerRunning(String containerId) { try { InspectContainerCmd inspectContainer = client.inspectContainerCmd(containerId); InspectContainerResponse inspectResponse = inspectContainer.exec(); return Boolean.TRUE.equals(inspectResponse.getState().getRunning()); } catch (NotFoundException e) { return false; } } public void startContainer(String containerId) { StartContainerCmd startContainer = client.startContainerCmd(containerId); try { startContainer.exec(); } catch (NotModifiedException e) { // ignore - already started } } public void stopContainer(String containerId) { StopContainerCmd stopContainer = client.stopContainerCmd(containerId); try { stopContainer.exec(); } catch (NotModifiedException e) { // ignore - already stopped } } public String getIpAddress(String containerId) { InspectContainerCmd inspectContainer = client.inspectContainerCmd(containerId); InspectContainerResponse inspectResponse = inspectContainer.exec(); Map<String, ContainerNetwork> networks = inspectResponse.getNetworkSettings().getNetworks(); return networks.values().stream() .findFirst() .map(ContainerNetwork::getIpAddress) .orElseThrow(() -> new GradleException("Failed to get IP address for container: " + containerId)); } }
src/main/java/com/github/rodm/teamcity/internal/DockerOperations.java
/* * Copyright 2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.rodm.teamcity.internal; import com.github.dockerjava.api.DockerClient; import com.github.dockerjava.api.command.CreateContainerCmd; import com.github.dockerjava.api.command.CreateContainerResponse; import com.github.dockerjava.api.command.InspectContainerCmd; import com.github.dockerjava.api.command.InspectContainerResponse; import com.github.dockerjava.api.command.StartContainerCmd; import com.github.dockerjava.api.command.StopContainerCmd; import com.github.dockerjava.api.exception.NotFoundException; import com.github.dockerjava.api.exception.NotModifiedException; import com.github.dockerjava.api.model.ContainerNetwork; import com.github.dockerjava.api.model.HostConfig; import com.github.dockerjava.core.DefaultDockerClientConfig; import com.github.dockerjava.core.DockerClientImpl; import com.github.dockerjava.httpclient5.ApacheDockerHttpClient; import com.github.dockerjava.transport.DockerHttpClient; import org.gradle.api.GradleException; import java.util.Map; public class DockerOperations { private final DockerClient client; public DockerOperations() { DefaultDockerClientConfig clientConfig = DefaultDockerClientConfig.createDefaultConfigBuilder().build(); DockerHttpClient httpClient = new ApacheDockerHttpClient.Builder() .dockerHost(clientConfig.getDockerHost()) .build(); client = DockerClientImpl.getInstance(clientConfig, httpClient); } public boolean isImageAvailable(String image) { try { client.inspectImageCmd(image).exec(); return true; } catch (NotFoundException e) { return false; } } public String createContainer(ContainerConfiguration configuration) { HostConfig hostConfig = HostConfig.newHostConfig() .withAutoRemove(configuration.getAutoRemove()) .withBinds(configuration.getBinds()) .withPortBindings(configuration.getPortBindings()); CreateContainerCmd createContainer = client.createContainerCmd(configuration.getImage()) .withName(configuration.getName()) .withHostConfig(hostConfig) .withEnv(configuration.getEnvironment()) .withExposedPorts(configuration.getExposedPorts()); CreateContainerResponse response = createContainer.exec(); return response.getId(); } public boolean isContainerRunning(String containerId) { try { InspectContainerCmd inspectContainer = client.inspectContainerCmd(containerId); InspectContainerResponse inspectResponse = inspectContainer.exec(); return Boolean.TRUE.equals(inspectResponse.getState().getRunning()); } catch (NotFoundException e) { return false; } } public void startContainer(String containerId) { StartContainerCmd startContainer = client.startContainerCmd(containerId); try { startContainer.exec(); } catch (NotModifiedException e) { // ignore - already started } } public void stopContainer(String containerId) { StopContainerCmd stopContainer = client.stopContainerCmd(containerId); try { stopContainer.exec(); } catch (NotModifiedException e) { // ignore - already stopped } } public String getIpAddress(String containerId) { InspectContainerCmd inspectContainer = client.inspectContainerCmd(containerId); InspectContainerResponse inspectResponse = inspectContainer.exec(); Map<String, ContainerNetwork> networks = inspectResponse.getNetworkSettings().getNetworks(); return networks.values().stream() .findFirst() .map(ContainerNetwork::getIpAddress) .orElseThrow(() -> new GradleException("Failed to get IP address for container: " + containerId)); } }
Add new query method to check if a container is available
src/main/java/com/github/rodm/teamcity/internal/DockerOperations.java
Add new query method to check if a container is available
Java
apache-2.0
0afc036deb35df7e86ede3dcebc430c8f05ed368
0
JingchengDu/hadoop,nandakumar131/hadoop,steveloughran/hadoop,dierobotsdie/hadoop,mapr/hadoop-common,GeLiXin/hadoop,steveloughran/hadoop,plusplusjiajia/hadoop,GeLiXin/hadoop,nandakumar131/hadoop,GeLiXin/hadoop,mapr/hadoop-common,xiao-chen/hadoop,apache/hadoop,apache/hadoop,nandakumar131/hadoop,littlezhou/hadoop,plusplusjiajia/hadoop,wwjiang007/hadoop,apache/hadoop,JingchengDu/hadoop,lukmajercak/hadoop,apurtell/hadoop,steveloughran/hadoop,apurtell/hadoop,mapr/hadoop-common,ucare-uchicago/hadoop,ucare-uchicago/hadoop,GeLiXin/hadoop,steveloughran/hadoop,dierobotsdie/hadoop,dierobotsdie/hadoop,apache/hadoop,szegedim/hadoop,ucare-uchicago/hadoop,mapr/hadoop-common,plusplusjiajia/hadoop,JingchengDu/hadoop,nandakumar131/hadoop,wwjiang007/hadoop,apurtell/hadoop,GeLiXin/hadoop,nandakumar131/hadoop,littlezhou/hadoop,dierobotsdie/hadoop,plusplusjiajia/hadoop,JingchengDu/hadoop,wwjiang007/hadoop,dierobotsdie/hadoop,lukmajercak/hadoop,lukmajercak/hadoop,dierobotsdie/hadoop,littlezhou/hadoop,lukmajercak/hadoop,szegedim/hadoop,apache/hadoop,littlezhou/hadoop,JingchengDu/hadoop,steveloughran/hadoop,plusplusjiajia/hadoop,GeLiXin/hadoop,plusplusjiajia/hadoop,mapr/hadoop-common,nandakumar131/hadoop,nandakumar131/hadoop,wwjiang007/hadoop,dierobotsdie/hadoop,ucare-uchicago/hadoop,mapr/hadoop-common,JingchengDu/hadoop,wwjiang007/hadoop,lukmajercak/hadoop,ucare-uchicago/hadoop,mapr/hadoop-common,plusplusjiajia/hadoop,szegedim/hadoop,GeLiXin/hadoop,apache/hadoop,lukmajercak/hadoop,szegedim/hadoop,xiao-chen/hadoop,steveloughran/hadoop,littlezhou/hadoop,apurtell/hadoop,xiao-chen/hadoop,JingchengDu/hadoop,apurtell/hadoop,steveloughran/hadoop,wwjiang007/hadoop,szegedim/hadoop,apache/hadoop,ucare-uchicago/hadoop,xiao-chen/hadoop,apurtell/hadoop,szegedim/hadoop,xiao-chen/hadoop,xiao-chen/hadoop,lukmajercak/hadoop,littlezhou/hadoop,xiao-chen/hadoop,ucare-uchicago/hadoop,apurtell/hadoop,littlezhou/hadoop,wwjiang007/hadoop,szegedim/hadoop
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapreduce.jobhistory; import org.apache.hadoop.mapred.TaskAttemptID; import org.apache.hadoop.mapred.TaskID; import org.apache.hadoop.mapred.TaskStatus; import org.apache.hadoop.mapreduce.Counters; import org.apache.hadoop.mapreduce.JobID; import org.apache.hadoop.mapreduce.JobStatus; import org.apache.hadoop.mapreduce.TaskType; import org.junit.Assert; import org.junit.Test; import org.skyscreamer.jsonassert.JSONAssert; import org.skyscreamer.jsonassert.JSONCompareMode; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.ByteArrayOutputStream; import java.io.PrintStream; import java.util.HashMap; import java.util.TimeZone; public class TestHistoryViewerPrinter { private static final Logger LOG = LoggerFactory.getLogger(TestHistoryViewerPrinter.class); private final String LINE_SEPARATOR = System.lineSeparator(); @Test public void testHumanPrinter() throws Exception { JobHistoryParser.JobInfo job = createJobInfo(); HumanReadableHistoryViewerPrinter printer = new HumanReadableHistoryViewerPrinter(job, false, "http://", TimeZone.getTimeZone("GMT")); String outStr = run(printer); Assert.assertEquals("\n" + "Hadoop job: job_1317928501754_0001\n" + "=====================================\n" + "User: rkanter\n" + "JobName: my job\n" + "JobConf: /tmp/job.xml\n" + "Submitted At: 6-Oct-2011 19:15:01\n" + "Launched At: 6-Oct-2011 19:15:02 (1sec)\n" + "Finished At: 6-Oct-2011 19:15:16 (14sec)\n" + "Status: SUCCEEDED\n" + "Counters: \n" + "\n" + "|Group Name |Counter name |Map Value |Reduce Value|Total Value|\n" + "---------------------------------------------------------------------------------------" + LINE_SEPARATOR + "|group1 |counter1 |5 |5 |5 " + LINE_SEPARATOR + "|group1 |counter2 |10 |10 |10 " + LINE_SEPARATOR + "|group2 |counter1 |15 |15 |15 " + "\n\n" + "=====================================" + LINE_SEPARATOR + "\n" + "Task Summary\n" + "============================\n" + "Kind\tTotal\tSuccessful\tFailed\tKilled\tStartTime\tFinishTime\n" + "\n" + "Setup\t1\t1\t\t0\t0\t6-Oct-2011 19:15:03\t6-Oct-2011 19:15:04 (1sec)\n" + "Map\t6\t5\t\t1\t0\t6-Oct-2011 19:15:04\t6-Oct-2011 19:15:16 (12sec)\n" + "Reduce\t1\t1\t\t0\t0\t6-Oct-2011 19:15:10\t6-Oct-2011 19:15:18 (8sec)\n" + "Cleanup\t1\t1\t\t0\t0\t6-Oct-2011 19:15:11\t6-Oct-2011 19:15:20 (9sec)\n" + "============================\n" + LINE_SEPARATOR + "\n" + "Analysis" + LINE_SEPARATOR + "=========" + LINE_SEPARATOR + "\n" + "Time taken by best performing map task task_1317928501754_0001_m_000003: 3sec\n" + "Average time taken by map tasks: 5sec\n" + "Worse performing map tasks: \n" + "TaskId\t\tTimetaken" + LINE_SEPARATOR + "task_1317928501754_0001_m_000007 7sec" + LINE_SEPARATOR + "task_1317928501754_0001_m_000006 6sec" + LINE_SEPARATOR + "task_1317928501754_0001_m_000005 5sec" + LINE_SEPARATOR + "task_1317928501754_0001_m_000004 4sec" + LINE_SEPARATOR + "task_1317928501754_0001_m_000003 3sec" + LINE_SEPARATOR + "The last map task task_1317928501754_0001_m_000007 finished at (relative to the Job launch time): 6-Oct-2011 19:15:16 (14sec)" + LINE_SEPARATOR + "\n" + "Time taken by best performing shuffle task task_1317928501754_0001_r_000008: 8sec\n" + "Average time taken by shuffle tasks: 8sec\n" + "Worse performing shuffle tasks: \n" + "TaskId\t\tTimetaken" + LINE_SEPARATOR + "task_1317928501754_0001_r_000008 8sec" + LINE_SEPARATOR + "The last shuffle task task_1317928501754_0001_r_000008 finished at (relative to the Job launch time): 6-Oct-2011 19:15:18 (16sec)" + LINE_SEPARATOR + "\n" + "Time taken by best performing reduce task task_1317928501754_0001_r_000008: 0sec\n" + "Average time taken by reduce tasks: 0sec\n" + "Worse performing reduce tasks: \n" + "TaskId\t\tTimetaken"+ LINE_SEPARATOR + "task_1317928501754_0001_r_000008 0sec" + LINE_SEPARATOR + "The last reduce task task_1317928501754_0001_r_000008 finished at (relative to the Job launch time): 6-Oct-2011 19:15:18 (16sec)" + LINE_SEPARATOR + "=========" + LINE_SEPARATOR + "\n" + "FAILED MAP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" + "====================================================" + LINE_SEPARATOR + "task_1317928501754_0001_m_000002\t6-Oct-2011 19:15:04\t6-Oct-2011 19:15:06 (2sec)\t\t" + LINE_SEPARATOR + "\n" + "FAILED task attempts by nodes\n" + "Hostname\tFailedTasks\n" + "===============================" + LINE_SEPARATOR + "localhost\ttask_1317928501754_0001_m_000002, " + LINE_SEPARATOR, outStr); } @Test public void testHumanPrinterAll() throws Exception { JobHistoryParser.JobInfo job = createJobInfo(); HumanReadableHistoryViewerPrinter printer = new HumanReadableHistoryViewerPrinter(job, true, "http://", TimeZone.getTimeZone("GMT")); String outStr = run(printer); if (System.getProperty("java.version").startsWith("1.7")) { Assert.assertEquals("\n" + "Hadoop job: job_1317928501754_0001\n" + "=====================================\n" + "User: rkanter\n" + "JobName: my job\n" + "JobConf: /tmp/job.xml\n" + "Submitted At: 6-Oct-2011 19:15:01\n" + "Launched At: 6-Oct-2011 19:15:02 (1sec)\n" + "Finished At: 6-Oct-2011 19:15:16 (14sec)\n" + "Status: SUCCEEDED\n" + "Counters: \n" + "\n" + "|Group Name |Counter name |Map Value |Reduce Value|Total Value|\n" + "---------------------------------------------------------------------------------------" + LINE_SEPARATOR + "|group1 |counter1 |5 |5 |5 " + LINE_SEPARATOR + "|group1 |counter2 |10 |10 |10 " + LINE_SEPARATOR + "|group2 |counter1 |15 |15 |15 \n" + "\n" + "=====================================" + LINE_SEPARATOR + "\n" + "Task Summary\n" + "============================\n" + "Kind\tTotal\tSuccessful\tFailed\tKilled\tStartTime\tFinishTime\n" + "\n" + "Setup\t1\t1\t\t0\t0\t6-Oct-2011 19:15:03\t6-Oct-2011 19:15:04 (1sec)\n" + "Map\t6\t5\t\t1\t0\t6-Oct-2011 19:15:04\t6-Oct-2011 19:15:16 (12sec)\n" + "Reduce\t1\t1\t\t0\t0\t6-Oct-2011 19:15:10\t6-Oct-2011 19:15:18 (8sec)\n" + "Cleanup\t1\t1\t\t0\t0\t6-Oct-2011 19:15:11\t6-Oct-2011 19:15:20 (9sec)\n" + "============================\n" + LINE_SEPARATOR + "\n" + "Analysis" + LINE_SEPARATOR + "=========" + LINE_SEPARATOR + "\n" + "Time taken by best performing map task task_1317928501754_0001_m_000003: 3sec\n" + "Average time taken by map tasks: 5sec\n" + "Worse performing map tasks: \n" + "TaskId\t\tTimetaken" + LINE_SEPARATOR + "task_1317928501754_0001_m_000007 7sec" + LINE_SEPARATOR + "task_1317928501754_0001_m_000006 6sec" + LINE_SEPARATOR + "task_1317928501754_0001_m_000005 5sec" + LINE_SEPARATOR + "task_1317928501754_0001_m_000004 4sec" + LINE_SEPARATOR + "task_1317928501754_0001_m_000003 3sec" + LINE_SEPARATOR + "The last map task task_1317928501754_0001_m_000007 finished at (relative to the Job launch time): 6-Oct-2011 19:15:16 (14sec)" + LINE_SEPARATOR + "\n" + "Time taken by best performing shuffle task task_1317928501754_0001_r_000008: 8sec\n" + "Average time taken by shuffle tasks: 8sec\n" + "Worse performing shuffle tasks: \n" + "TaskId\t\tTimetaken" + LINE_SEPARATOR + "task_1317928501754_0001_r_000008 8sec" + LINE_SEPARATOR + "The last shuffle task task_1317928501754_0001_r_000008 finished at (relative to the Job launch time): 6-Oct-2011 19:15:18 (16sec)" + LINE_SEPARATOR + "\n" + "Time taken by best performing reduce task task_1317928501754_0001_r_000008: 0sec\n" + "Average time taken by reduce tasks: 0sec\n" + "Worse performing reduce tasks: \n" + "TaskId\t\tTimetaken" + LINE_SEPARATOR + "task_1317928501754_0001_r_000008 0sec" + LINE_SEPARATOR + "The last reduce task task_1317928501754_0001_r_000008 finished at (relative to the Job launch time): 6-Oct-2011 19:15:18 (16sec)" + LINE_SEPARATOR + "=========" + LINE_SEPARATOR + "\n" + "FAILED MAP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" + "====================================================" + LINE_SEPARATOR + "task_1317928501754_0001_m_000002\t6-Oct-2011 19:15:04\t6-Oct-2011 19:15:06 (2sec)\t\t" + LINE_SEPARATOR + "\n" + "SUCCEEDED JOB_SETUP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\n" + "====================================================" + LINE_SEPARATOR + "task_1317928501754_0001_s_000001\t6-Oct-2011 19:15:03\t6-Oct-2011 19:15:04 (1sec)\t" + LINE_SEPARATOR + "\n" + "SUCCEEDED MAP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" + "====================================================" + LINE_SEPARATOR + "task_1317928501754_0001_m_000006\t6-Oct-2011 19:15:08\t6-Oct-2011 19:15:14 (6sec)\t\t\n" + LINE_SEPARATOR + "\n" + "SUCCEEDED MAP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" + "====================================================" + LINE_SEPARATOR + "task_1317928501754_0001_m_000005\t6-Oct-2011 19:15:07\t6-Oct-2011 19:15:12 (5sec)\t\t\n" + LINE_SEPARATOR + "\n" + "SUCCEEDED MAP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" + "====================================================" + LINE_SEPARATOR + "task_1317928501754_0001_m_000004\t6-Oct-2011 19:15:06\t6-Oct-2011 19:15:10 (4sec)\t\t\n" + LINE_SEPARATOR + "\n" + "SUCCEEDED MAP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" + "====================================================" + LINE_SEPARATOR + "task_1317928501754_0001_m_000003\t6-Oct-2011 19:15:05\t6-Oct-2011 19:15:08 (3sec)\t\t\n" + LINE_SEPARATOR + "\n" + "SUCCEEDED MAP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" + "====================================================" + LINE_SEPARATOR + "task_1317928501754_0001_m_000007\t6-Oct-2011 19:15:09\t6-Oct-2011 19:15:16 (7sec)\t\t\n" + LINE_SEPARATOR + "\n" + "SUCCEEDED REDUCE task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\n" + "====================================================" + LINE_SEPARATOR + "task_1317928501754_0001_r_000008\t6-Oct-2011 19:15:10\t6-Oct-2011 19:15:18 (8sec)\t" + LINE_SEPARATOR + "\n" + "SUCCEEDED JOB_CLEANUP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\n" + "====================================================" + LINE_SEPARATOR + "task_1317928501754_0001_c_000009\t6-Oct-2011 19:15:11\t6-Oct-2011 19:15:20 (9sec)\t" + LINE_SEPARATOR + "\n" + "JOB_SETUP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tHostName\tError\tTaskLogs\n" + "====================================================" + LINE_SEPARATOR + "attempt_1317928501754_0001_s_000001_1\t6-Oct-2011 19:15:03\t6-Oct-2011 19:15:04 (1sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_s_000001_1" + LINE_SEPARATOR + "\n" + "MAP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tHostName\tError\tTaskLogs\n" + "====================================================\n" + LINE_SEPARATOR + "attempt_1317928501754_0001_m_000002_1\t6-Oct-2011 19:15:04\t6-Oct-2011 19:15:06 (2sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000002_1\n" + LINE_SEPARATOR + "attempt_1317928501754_0001_m_000006_1\t6-Oct-2011 19:15:08\t6-Oct-2011 19:15:14 (6sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000006_1\n" + LINE_SEPARATOR + "attempt_1317928501754_0001_m_000005_1\t6-Oct-2011 19:15:07\t6-Oct-2011 19:15:12 (5sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000005_1\n" + LINE_SEPARATOR + "attempt_1317928501754_0001_m_000004_1\t6-Oct-2011 19:15:06\t6-Oct-2011 19:15:10 (4sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000004_1\n" + LINE_SEPARATOR + "attempt_1317928501754_0001_m_000003_1\t6-Oct-2011 19:15:05\t6-Oct-2011 19:15:08 (3sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000003_1\n" + LINE_SEPARATOR + "attempt_1317928501754_0001_m_000007_1\t6-Oct-2011 19:15:09\t6-Oct-2011 19:15:16 (7sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000007_1\n" + LINE_SEPARATOR + "\n" + "REDUCE task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tShuffleFinished\tSortFinished\tFinishTime\tHostName\tError\tTaskLogs\n" + "====================================================" + LINE_SEPARATOR + "attempt_1317928501754_0001_r_000008_1\t6-Oct-2011 19:15:10\t6-Oct-2011 19:15:18 (8sec)\t6-Oct-2011 19:15:18 (0sec)6-Oct-2011 19:15:18 (8sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_r_000008_1" + LINE_SEPARATOR + "\n" + "JOB_CLEANUP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tHostName\tError\tTaskLogs\n" + "====================================================" + LINE_SEPARATOR + "attempt_1317928501754_0001_c_000009_1\t6-Oct-2011 19:15:11\t6-Oct-2011 19:15:20 (9sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_c_000009_1" + LINE_SEPARATOR + "\n" + "FAILED task attempts by nodes\n" + "Hostname\tFailedTasks\n" + "===============================" + LINE_SEPARATOR + "localhost\ttask_1317928501754_0001_m_000002, " + LINE_SEPARATOR, outStr); } else { Assert.assertEquals("\n" + "Hadoop job: job_1317928501754_0001\n" + "=====================================\n" + "User: rkanter\n" + "JobName: my job\n" + "JobConf: /tmp/job.xml\n" + "Submitted At: 6-Oct-2011 19:15:01\n" + "Launched At: 6-Oct-2011 19:15:02 (1sec)\n" + "Finished At: 6-Oct-2011 19:15:16 (14sec)\n" + "Status: SUCCEEDED\n" + "Counters: \n" + "\n" + "|Group Name |Counter name |Map Value |Reduce Value|Total Value|\n" + "---------------------------------------------------------------------------------------" + LINE_SEPARATOR + "|group1 |counter1 |5 |5 |5 " + LINE_SEPARATOR + "|group1 |counter2 |10 |10 |10 " + LINE_SEPARATOR + "|group2 |counter1 |15 |15 |15 \n" + "\n" + "=====================================" + LINE_SEPARATOR + "\n" + "Task Summary\n" + "============================\n" + "Kind\tTotal\tSuccessful\tFailed\tKilled\tStartTime\tFinishTime\n" + "\n" + "Setup\t1\t1\t\t0\t0\t6-Oct-2011 19:15:03\t6-Oct-2011 19:15:04 (1sec)\n" + "Map\t6\t5\t\t1\t0\t6-Oct-2011 19:15:04\t6-Oct-2011 19:15:16 (12sec)\n" + "Reduce\t1\t1\t\t0\t0\t6-Oct-2011 19:15:10\t6-Oct-2011 19:15:18 (8sec)\n" + "Cleanup\t1\t1\t\t0\t0\t6-Oct-2011 19:15:11\t6-Oct-2011 19:15:20 (9sec)\n" + "============================\n" + LINE_SEPARATOR + "\n" + "Analysis" + LINE_SEPARATOR + "=========" + LINE_SEPARATOR + "\n" + "Time taken by best performing map task task_1317928501754_0001_m_000003: 3sec\n" + "Average time taken by map tasks: 5sec\n" + "Worse performing map tasks: \n" + "TaskId\t\tTimetaken" + LINE_SEPARATOR + "task_1317928501754_0001_m_000007 7sec" + LINE_SEPARATOR + "task_1317928501754_0001_m_000006 6sec" + LINE_SEPARATOR + "task_1317928501754_0001_m_000005 5sec" + LINE_SEPARATOR + "task_1317928501754_0001_m_000004 4sec" + LINE_SEPARATOR + "task_1317928501754_0001_m_000003 3sec" + LINE_SEPARATOR + "The last map task task_1317928501754_0001_m_000007 finished at (relative to the Job launch time): 6-Oct-2011 19:15:16 (14sec)" + LINE_SEPARATOR + "\n" + "Time taken by best performing shuffle task task_1317928501754_0001_r_000008: 8sec\n" + "Average time taken by shuffle tasks: 8sec\n" + "Worse performing shuffle tasks: \n" + "TaskId\t\tTimetaken" + LINE_SEPARATOR + "task_1317928501754_0001_r_000008 8sec" + LINE_SEPARATOR + "The last shuffle task task_1317928501754_0001_r_000008 finished at (relative to the Job launch time): 6-Oct-2011 19:15:18 (16sec)" + LINE_SEPARATOR + "\n" + "Time taken by best performing reduce task task_1317928501754_0001_r_000008: 0sec\n" + "Average time taken by reduce tasks: 0sec\n" + "Worse performing reduce tasks: \n" + "TaskId\t\tTimetaken" + LINE_SEPARATOR + "task_1317928501754_0001_r_000008 0sec" + LINE_SEPARATOR + "The last reduce task task_1317928501754_0001_r_000008 finished at (relative to the Job launch time): 6-Oct-2011 19:15:18 (16sec)" + LINE_SEPARATOR + "=========" + LINE_SEPARATOR + "\n" + "FAILED MAP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" + "====================================================" + LINE_SEPARATOR + "task_1317928501754_0001_m_000002\t6-Oct-2011 19:15:04\t6-Oct-2011 19:15:06 (2sec)\t\t" + LINE_SEPARATOR + "\n" + "SUCCEEDED JOB_SETUP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\n" + "====================================================" + LINE_SEPARATOR + "task_1317928501754_0001_s_000001\t6-Oct-2011 19:15:03\t6-Oct-2011 19:15:04 (1sec)\t" + LINE_SEPARATOR + "\n" + "SUCCEEDED MAP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" + "====================================================" + LINE_SEPARATOR + "task_1317928501754_0001_m_000007\t6-Oct-2011 19:15:09\t6-Oct-2011 19:15:16 (7sec)\t\t" + LINE_SEPARATOR + "\n" + "SUCCEEDED MAP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" + "====================================================" + LINE_SEPARATOR + "task_1317928501754_0001_m_000006\t6-Oct-2011 19:15:08\t6-Oct-2011 19:15:14 (6sec)\t\t" + LINE_SEPARATOR + "\n" + "SUCCEEDED MAP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" + "====================================================" + LINE_SEPARATOR + "task_1317928501754_0001_m_000005\t6-Oct-2011 19:15:07\t6-Oct-2011 19:15:12 (5sec)\t\t" + LINE_SEPARATOR + "\n" + "SUCCEEDED MAP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" + "====================================================" + LINE_SEPARATOR + "task_1317928501754_0001_m_000004\t6-Oct-2011 19:15:06\t6-Oct-2011 19:15:10 (4sec)\t\t" + LINE_SEPARATOR + "\n" + "SUCCEEDED MAP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" + "====================================================" + LINE_SEPARATOR + "task_1317928501754_0001_m_000003\t6-Oct-2011 19:15:05\t6-Oct-2011 19:15:08 (3sec)\t\t" + LINE_SEPARATOR + "\n" + "SUCCEEDED REDUCE task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\n" + "====================================================" + LINE_SEPARATOR + "task_1317928501754_0001_r_000008\t6-Oct-2011 19:15:10\t6-Oct-2011 19:15:18 (8sec)\t" + LINE_SEPARATOR + "\n" + "SUCCEEDED JOB_CLEANUP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\n" + "====================================================" + LINE_SEPARATOR + "task_1317928501754_0001_c_000009\t6-Oct-2011 19:15:11\t6-Oct-2011 19:15:20 (9sec)\t" + LINE_SEPARATOR + "\n" + "JOB_SETUP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tHostName\tError\tTaskLogs\n" + "====================================================" + LINE_SEPARATOR + "attempt_1317928501754_0001_s_000001_1\t6-Oct-2011 19:15:03\t6-Oct-2011 19:15:04 (1sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_s_000001_1" + LINE_SEPARATOR + "\n" + "MAP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tHostName\tError\tTaskLogs\n" + "====================================================" + LINE_SEPARATOR + "attempt_1317928501754_0001_m_000007_1\t6-Oct-2011 19:15:09\t6-Oct-2011 19:15:16 (7sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000007_1" + LINE_SEPARATOR + "attempt_1317928501754_0001_m_000002_1\t6-Oct-2011 19:15:04\t6-Oct-2011 19:15:06 (2sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000002_1" + LINE_SEPARATOR + "attempt_1317928501754_0001_m_000006_1\t6-Oct-2011 19:15:08\t6-Oct-2011 19:15:14 (6sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000006_1" + LINE_SEPARATOR + "attempt_1317928501754_0001_m_000005_1\t6-Oct-2011 19:15:07\t6-Oct-2011 19:15:12 (5sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000005_1" + LINE_SEPARATOR + "attempt_1317928501754_0001_m_000004_1\t6-Oct-2011 19:15:06\t6-Oct-2011 19:15:10 (4sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000004_1" + LINE_SEPARATOR + "attempt_1317928501754_0001_m_000003_1\t6-Oct-2011 19:15:05\t6-Oct-2011 19:15:08 (3sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000003_1" + LINE_SEPARATOR + "\n" + "REDUCE task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tShuffleFinished\tSortFinished\tFinishTime\tHostName\tError\tTaskLogs\n" + "====================================================" + LINE_SEPARATOR + "attempt_1317928501754_0001_r_000008_1\t6-Oct-2011 19:15:10\t6-Oct-2011 19:15:18 (8sec)\t6-Oct-2011 19:15:18 (0sec)6-Oct-2011 19:15:18 (8sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_r_000008_1" + LINE_SEPARATOR + "\n" + "JOB_CLEANUP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tHostName\tError\tTaskLogs\n" + "====================================================" + LINE_SEPARATOR + "attempt_1317928501754_0001_c_000009_1\t6-Oct-2011 19:15:11\t6-Oct-2011 19:15:20 (9sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_c_000009_1" + LINE_SEPARATOR + "\n" + "FAILED task attempts by nodes\n" + "Hostname\tFailedTasks\n" + "===============================" + LINE_SEPARATOR + "localhost\ttask_1317928501754_0001_m_000002, " + LINE_SEPARATOR, outStr); } } @Test public void testJSONPrinter() throws Exception { JobHistoryParser.JobInfo job = createJobInfo(); JSONHistoryViewerPrinter printer = new JSONHistoryViewerPrinter(job, false, "http://"); String outStr = run(printer); JSONAssert.assertEquals("{\n" + " \"counters\": {\n" + " \"group1\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"mapValue\": 5,\n" + " \"reduceValue\": 5,\n" + " \"totalValue\": 5\n" + " },\n" + " {\n" + " \"counterName\": \"counter2\",\n" + " \"mapValue\": 10,\n" + " \"reduceValue\": 10,\n" + " \"totalValue\": 10\n" + " }\n" + " ],\n" + " \"group2\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"mapValue\": 15,\n" + " \"reduceValue\": 15,\n" + " \"totalValue\": 15\n" + " }\n" + " ]\n" + " },\n" + " \"finishedAt\": 1317928516754,\n" + " \"hadoopJob\": \"job_1317928501754_0001\",\n" + " \"jobConf\": \"/tmp/job.xml\",\n" + " \"jobName\": \"my job\",\n" + " \"launchedAt\": 1317928502754,\n" + " \"status\": \"SUCCEEDED\",\n" + " \"submittedAt\": 1317928501754,\n" + " \"taskSummary\": {\n" + " \"cleanup\": {\n" + " \"failed\": 0,\n" + " \"finishTime\": 1317928520754,\n" + " \"killed\": 0,\n" + " \"startTime\": 1317928511754,\n" + " \"successful\": 1,\n" + " \"total\": 1\n" + " },\n" + " \"map\": {\n" + " \"failed\": 1,\n" + " \"finishTime\": 1317928516754,\n" + " \"killed\": 0,\n" + " \"startTime\": 1317928504754,\n" + " \"successful\": 5,\n" + " \"total\": 6\n" + " },\n" + " \"reduce\": {\n" + " \"failed\": 0,\n" + " \"finishTime\": 1317928518754,\n" + " \"killed\": 0,\n" + " \"startTime\": 1317928510754,\n" + " \"successful\": 1,\n" + " \"total\": 1\n" + " },\n" + " \"setup\": {\n" + " \"failed\": 0,\n" + " \"finishTime\": 1317928504754,\n" + " \"killed\": 0,\n" + " \"startTime\": 1317928503754,\n" + " \"successful\": 1,\n" + " \"total\": 1\n" + " }\n" + " },\n" + " \"tasks\": [\n" + " {\n" + " \"finishTime\": 1317928506754,\n" + " \"inputSplits\": \"\",\n" + " \"startTime\": 1317928504754,\n" + " \"status\": \"FAILED\",\n" + " \"taskId\": \"task_1317928501754_0001_m_000002\",\n" + " \"type\": \"MAP\"\n" + " }\n" + " ],\n" + " \"user\": \"rkanter\"\n" + "}\n", outStr, JSONCompareMode.NON_EXTENSIBLE); } @Test public void testJSONPrinterAll() throws Exception { JobHistoryParser.JobInfo job = createJobInfo(); JSONHistoryViewerPrinter printer = new JSONHistoryViewerPrinter(job, true, "http://"); String outStr = run(printer); JSONAssert.assertEquals("{\n" + " \"counters\": {\n" + " \"group1\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"mapValue\": 5,\n" + " \"reduceValue\": 5,\n" + " \"totalValue\": 5\n" + " },\n" + " {\n" + " \"counterName\": \"counter2\",\n" + " \"mapValue\": 10,\n" + " \"reduceValue\": 10,\n" + " \"totalValue\": 10\n" + " }\n" + " ],\n" + " \"group2\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"mapValue\": 15,\n" + " \"reduceValue\": 15,\n" + " \"totalValue\": 15\n" + " }\n" + " ]\n" + " },\n" + " \"finishedAt\": 1317928516754,\n" + " \"hadoopJob\": \"job_1317928501754_0001\",\n" + " \"jobConf\": \"/tmp/job.xml\",\n" + " \"jobName\": \"my job\",\n" + " \"launchedAt\": 1317928502754,\n" + " \"status\": \"SUCCEEDED\",\n" + " \"submittedAt\": 1317928501754,\n" + " \"taskSummary\": {\n" + " \"cleanup\": {\n" + " \"failed\": 0,\n" + " \"finishTime\": 1317928520754,\n" + " \"killed\": 0,\n" + " \"startTime\": 1317928511754,\n" + " \"successful\": 1,\n" + " \"total\": 1\n" + " },\n" + " \"map\": {\n" + " \"failed\": 1,\n" + " \"finishTime\": 1317928516754,\n" + " \"killed\": 0,\n" + " \"startTime\": 1317928504754,\n" + " \"successful\": 5,\n" + " \"total\": 6\n" + " },\n" + " \"reduce\": {\n" + " \"failed\": 0,\n" + " \"finishTime\": 1317928518754,\n" + " \"killed\": 0,\n" + " \"startTime\": 1317928510754,\n" + " \"successful\": 1,\n" + " \"total\": 1\n" + " },\n" + " \"setup\": {\n" + " \"failed\": 0,\n" + " \"finishTime\": 1317928504754,\n" + " \"killed\": 0,\n" + " \"startTime\": 1317928503754,\n" + " \"successful\": 1,\n" + " \"total\": 1\n" + " }\n" + " },\n" + " \"tasks\": [\n" + " {\n" + " \"attempts\": {\n" + " \"attemptId\": \"attempt_1317928501754_0001_m_000002_1\",\n" + " \"finishTime\": 1317928506754,\n" + " \"hostName\": \"localhost\",\n" + " \"startTime\": 1317928504754,\n" + " \"taskLogs\": \"http://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000002_1\"\n" + " },\n" + " \"counters\": {\n" + " \"group1\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 5\n" + " },\n" + " {\n" + " \"counterName\": \"counter2\",\n" + " \"value\": 10\n" + " }\n" + " ],\n" + " \"group2\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 15\n" + " }\n" + " ]\n" + " },\n" + " \"finishTime\": 1317928506754,\n" + " \"inputSplits\": \"\",\n" + " \"startTime\": 1317928504754,\n" + " \"status\": \"FAILED\",\n" + " \"taskId\": \"task_1317928501754_0001_m_000002\",\n" + " \"type\": \"MAP\"\n" + " },\n" + " {\n" + " \"attempts\": {\n" + " \"attemptId\": \"attempt_1317928501754_0001_s_000001_1\",\n" + " \"finishTime\": 1317928504754,\n" + " \"hostName\": \"localhost\",\n" + " \"startTime\": 1317928503754,\n" + " \"taskLogs\": \"http://t:1234/tasklog?attemptid=attempt_1317928501754_0001_s_000001_1\"\n" + " },\n" + " \"counters\": {\n" + " \"group1\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 5\n" + " },\n" + " {\n" + " \"counterName\": \"counter2\",\n" + " \"value\": 10\n" + " }\n" + " ],\n" + " \"group2\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 15\n" + " }\n" + " ]\n" + " },\n" + " \"finishTime\": 1317928504754,\n" + " \"startTime\": 1317928503754,\n" + " \"status\": \"SUCCEEDED\",\n" + " \"taskId\": \"task_1317928501754_0001_s_000001\",\n" + " \"type\": \"JOB_SETUP\"\n" + " },\n" + " {\n" + " \"attempts\": {\n" + " \"attemptId\": \"attempt_1317928501754_0001_m_000006_1\",\n" + " \"finishTime\": 1317928514754,\n" + " \"hostName\": \"localhost\",\n" + " \"startTime\": 1317928508754,\n" + " \"taskLogs\": \"http://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000006_1\"\n" + " },\n" + " \"counters\": {\n" + " \"group1\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 5\n" + " },\n" + " {\n" + " \"counterName\": \"counter2\",\n" + " \"value\": 10\n" + " }\n" + " ],\n" + " \"group2\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 15\n" + " }\n" + " ]\n" + " },\n" + " \"finishTime\": 1317928514754,\n" + " \"inputSplits\": \"\",\n" + " \"startTime\": 1317928508754,\n" + " \"status\": \"SUCCEEDED\",\n" + " \"taskId\": \"task_1317928501754_0001_m_000006\",\n" + " \"type\": \"MAP\"\n" + " },\n" + " {\n" + " \"attempts\": {\n" + " \"attemptId\": \"attempt_1317928501754_0001_m_000005_1\",\n" + " \"finishTime\": 1317928512754,\n" + " \"hostName\": \"localhost\",\n" + " \"startTime\": 1317928507754,\n" + " \"taskLogs\": \"http://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000005_1\"\n" + " },\n" + " \"counters\": {\n" + " \"group1\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 5\n" + " },\n" + " {\n" + " \"counterName\": \"counter2\",\n" + " \"value\": 10\n" + " }\n" + " ],\n" + " \"group2\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 15\n" + " }\n" + " ]\n" + " },\n" + " \"finishTime\": 1317928512754,\n" + " \"inputSplits\": \"\",\n" + " \"startTime\": 1317928507754,\n" + " \"status\": \"SUCCEEDED\",\n" + " \"taskId\": \"task_1317928501754_0001_m_000005\",\n" + " \"type\": \"MAP\"\n" + " },\n" + " {\n" + " \"attempts\": {\n" + " \"attemptId\": \"attempt_1317928501754_0001_m_000004_1\",\n" + " \"finishTime\": 1317928510754,\n" + " \"hostName\": \"localhost\",\n" + " \"startTime\": 1317928506754,\n" + " \"taskLogs\": \"http://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000004_1\"\n" + " },\n" + " \"counters\": {\n" + " \"group1\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 5\n" + " },\n" + " {\n" + " \"counterName\": \"counter2\",\n" + " \"value\": 10\n" + " }\n" + " ],\n" + " \"group2\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 15\n" + " }\n" + " ]\n" + " },\n" + " \"finishTime\": 1317928510754,\n" + " \"inputSplits\": \"\",\n" + " \"startTime\": 1317928506754,\n" + " \"status\": \"SUCCEEDED\",\n" + " \"taskId\": \"task_1317928501754_0001_m_000004\",\n" + " \"type\": \"MAP\"\n" + " },\n" + " {\n" + " \"attempts\": {\n" + " \"attemptId\": \"attempt_1317928501754_0001_m_000003_1\",\n" + " \"finishTime\": 1317928508754,\n" + " \"hostName\": \"localhost\",\n" + " \"startTime\": 1317928505754,\n" + " \"taskLogs\": \"http://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000003_1\"\n" + " },\n" + " \"counters\": {\n" + " \"group1\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 5\n" + " },\n" + " {\n" + " \"counterName\": \"counter2\",\n" + " \"value\": 10\n" + " }\n" + " ],\n" + " \"group2\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 15\n" + " }\n" + " ]\n" + " },\n" + " \"finishTime\": 1317928508754,\n" + " \"inputSplits\": \"\",\n" + " \"startTime\": 1317928505754,\n" + " \"status\": \"SUCCEEDED\",\n" + " \"taskId\": \"task_1317928501754_0001_m_000003\",\n" + " \"type\": \"MAP\"\n" + " },\n" + " {\n" + " \"attempts\": {\n" + " \"attemptId\": \"attempt_1317928501754_0001_c_000009_1\",\n" + " \"finishTime\": 1317928520754,\n" + " \"hostName\": \"localhost\",\n" + " \"startTime\": 1317928511754,\n" + " \"taskLogs\": \"http://t:1234/tasklog?attemptid=attempt_1317928501754_0001_c_000009_1\"\n" + " },\n" + " \"counters\": {\n" + " \"group1\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 5\n" + " },\n" + " {\n" + " \"counterName\": \"counter2\",\n" + " \"value\": 10\n" + " }\n" + " ],\n" + " \"group2\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 15\n" + " }\n" + " ]\n" + " },\n" + " \"finishTime\": 1317928520754,\n" + " \"startTime\": 1317928511754,\n" + " \"status\": \"SUCCEEDED\",\n" + " \"taskId\": \"task_1317928501754_0001_c_000009\",\n" + " \"type\": \"JOB_CLEANUP\"\n" + " },\n" + " {\n" + " \"attempts\": {\n" + " \"attemptId\": \"attempt_1317928501754_0001_m_000007_1\",\n" + " \"finishTime\": 1317928516754,\n" + " \"hostName\": \"localhost\",\n" + " \"startTime\": 1317928509754,\n" + " \"taskLogs\": \"http://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000007_1\"\n" + " },\n" + " \"counters\": {\n" + " \"group1\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 5\n" + " },\n" + " {\n" + " \"counterName\": \"counter2\",\n" + " \"value\": 10\n" + " }\n" + " ],\n" + " \"group2\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 15\n" + " }\n" + " ]\n" + " },\n" + " \"finishTime\": 1317928516754,\n" + " \"inputSplits\": \"\",\n" + " \"startTime\": 1317928509754,\n" + " \"status\": \"SUCCEEDED\",\n" + " \"taskId\": \"task_1317928501754_0001_m_000007\",\n" + " \"type\": \"MAP\"\n" + " },\n" + " {\n" + " \"attempts\": {\n" + " \"attemptId\": \"attempt_1317928501754_0001_r_000008_1\",\n" + " \"finishTime\": 1317928518754,\n" + " \"hostName\": \"localhost\",\n" + " \"shuffleFinished\": 1317928518754,\n" + " \"sortFinished\": 1317928518754,\n" + " \"startTime\": 1317928510754,\n" + " \"taskLogs\": \"http://t:1234/tasklog?attemptid=attempt_1317928501754_0001_r_000008_1\"\n" + " },\n" + " \"counters\": {\n" + " \"group1\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 5\n" + " },\n" + " {\n" + " \"counterName\": \"counter2\",\n" + " \"value\": 10\n" + " }\n" + " ],\n" + " \"group2\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 15\n" + " }\n" + " ]\n" + " },\n" + " \"finishTime\": 1317928518754,\n" + " \"startTime\": 1317928510754,\n" + " \"status\": \"SUCCEEDED\",\n" + " \"taskId\": \"task_1317928501754_0001_r_000008\",\n" + " \"type\": \"REDUCE\"\n" + " }\n" + " ],\n" + " \"user\": \"rkanter\"\n" + "}\n", outStr, JSONCompareMode.NON_EXTENSIBLE); } @Test public void testHumanDupePrinter() throws Exception { JobHistoryParser.JobInfo job = createJobInfo2(); // Counters are only part of the overview so printAll can be false or true // this does not affect the test, task counters are not printed HumanReadableHistoryViewerPrinter printer = new HumanReadableHistoryViewerPrinter(job, false, "http://", TimeZone.getTimeZone("GMT")); String outStr = run(printer); // We are not interested in anything but the duplicate counter int count1 = outStr.indexOf( "|Map-Reduce Framework |Map input records |"); Assert.assertNotEquals("First counter occurrence not found", -1, count1); int count2 = outStr.indexOf( "|Map-Reduce Framework |Map input records |", count1 + 1); Assert.assertEquals("Duplicate counter found at: " + count1 + " and " + count2, -1, count2); } @Test public void testJSONDupePrinter() throws Exception { JobHistoryParser.JobInfo job = createJobInfo2(); // Counters are part of the overview and task info // Tasks only have bogus counters in the test if that is changed printAll // must then be kept as false for this test to pass JSONHistoryViewerPrinter printer = new JSONHistoryViewerPrinter(job, false, "http://"); String outStr = run(printer); // We are not interested in anything but the duplicate counter int count1 = outStr.indexOf( "\"counterName\":\"MAP_INPUT_RECORDS\""); Assert.assertNotEquals("First counter occurrence not found", -1, count1); int count2 = outStr.indexOf( "\"counterName\":\"MAP_INPUT_RECORDS\"", count1 + 1); Assert.assertEquals("Duplicate counter found at: " + count1 + " and " + count2, -1, count2); } private String run(HistoryViewerPrinter printer) throws Exception { ByteArrayOutputStream boas = new ByteArrayOutputStream(); PrintStream out = new PrintStream(boas, true); printer.print(out); out.close(); String outStr = boas.toString("UTF-8"); LOG.info("out = " + outStr); return outStr; } private static JobHistoryParser.JobInfo createJobInfo() { JobHistoryParser.JobInfo job = new JobHistoryParser.JobInfo(); job.submitTime = 1317928501754L; job.finishTime = job.submitTime + 15000; job.jobid = JobID.forName("job_1317928501754_0001"); job.username = "rkanter"; job.jobname = "my job"; job.jobQueueName = "my queue"; job.jobConfPath = "/tmp/job.xml"; job.launchTime = job.submitTime + 1000; job.totalMaps = 5; job.totalReduces = 1; job.failedMaps = 1; job.failedReduces = 0; job.succeededMaps = 5; job.succeededReduces = 1; job.jobStatus = JobStatus.State.SUCCEEDED.name(); job.totalCounters = createCounters(); job.mapCounters = createCounters(); job.reduceCounters = createCounters(); job.tasksMap = new HashMap<>(); addTaskInfo(job, TaskType.JOB_SETUP, 1, TaskStatus.State.SUCCEEDED); addTaskInfo(job, TaskType.MAP, 2, TaskStatus.State.FAILED); addTaskInfo(job, TaskType.MAP, 3, TaskStatus.State.SUCCEEDED); addTaskInfo(job, TaskType.MAP, 4, TaskStatus.State.SUCCEEDED); addTaskInfo(job, TaskType.MAP, 5, TaskStatus.State.SUCCEEDED); addTaskInfo(job, TaskType.MAP, 6, TaskStatus.State.SUCCEEDED); addTaskInfo(job, TaskType.MAP, 7, TaskStatus.State.SUCCEEDED); addTaskInfo(job, TaskType.REDUCE, 8, TaskStatus.State.SUCCEEDED); addTaskInfo(job, TaskType.JOB_CLEANUP, 9, TaskStatus.State.SUCCEEDED); return job; } private static JobHistoryParser.JobInfo createJobInfo2() { JobHistoryParser.JobInfo job = new JobHistoryParser.JobInfo(); job.submitTime = 1317928501754L; job.finishTime = job.submitTime + 15000; job.jobid = JobID.forName("job_1317928501754_0001"); job.username = "test"; job.jobname = "Dupe counter output"; job.jobQueueName = "root.test"; job.jobConfPath = "/tmp/job.xml"; job.launchTime = job.submitTime + 1000; job.totalMaps = 1; job.totalReduces = 0; job.failedMaps = 0; job.failedReduces = 0; job.succeededMaps = 1; job.succeededReduces = 0; job.jobStatus = JobStatus.State.SUCCEEDED.name(); job.totalCounters = createDeprecatedCounters(); job.mapCounters = createDeprecatedCounters(); job.reduceCounters = createDeprecatedCounters(); job.tasksMap = new HashMap<>(); addTaskInfo(job, TaskType.JOB_SETUP, 1, TaskStatus.State.SUCCEEDED); addTaskInfo(job, TaskType.MAP, 2, TaskStatus.State.SUCCEEDED); addTaskInfo(job, TaskType.JOB_CLEANUP, 3, TaskStatus.State.SUCCEEDED); return job; } private static Counters createCounters() { Counters counters = new Counters(); counters.findCounter("group1", "counter1").setValue(5); counters.findCounter("group1", "counter2").setValue(10); counters.findCounter("group2", "counter1").setValue(15); return counters; } private static Counters createDeprecatedCounters() { Counters counters = new Counters(); // Deprecated counter: make sure it is only printed once counters.findCounter("org.apache.hadoop.mapred.Task$Counter", "MAP_INPUT_RECORDS").setValue(1); counters.findCounter("File System Counters", "FILE: Number of bytes read").setValue(1); return counters; } private static void addTaskInfo(JobHistoryParser.JobInfo job, TaskType type, int id, TaskStatus.State status) { JobHistoryParser.TaskInfo task = new JobHistoryParser.TaskInfo(); task.taskId = new TaskID(job.getJobId(), type, id); task.startTime = job.getLaunchTime() + id * 1000; task.finishTime = task.startTime + id * 1000; task.taskType = type; task.counters = createCounters(); task.status = status.name(); task.attemptsMap = new HashMap<>(); addTaskAttemptInfo(task, 1); job.tasksMap.put(task.getTaskId(), task); } private static void addTaskAttemptInfo( JobHistoryParser.TaskInfo task, int id) { JobHistoryParser.TaskAttemptInfo attempt = new JobHistoryParser.TaskAttemptInfo(); attempt.attemptId = new TaskAttemptID( TaskID.downgrade(task.getTaskId()), id); attempt.startTime = task.getStartTime(); attempt.finishTime = task.getFinishTime(); attempt.shuffleFinishTime = task.getFinishTime(); attempt.sortFinishTime = task.getFinishTime(); attempt.mapFinishTime = task.getFinishTime(); attempt.status = task.getTaskStatus(); attempt.taskType = task.getTaskType(); attempt.trackerName = "localhost"; attempt.httpPort = 1234; attempt.hostname = "localhost"; task.attemptsMap.put(attempt.getAttemptId(), attempt); } }
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestHistoryViewerPrinter.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapreduce.jobhistory; import org.apache.hadoop.mapred.TaskAttemptID; import org.apache.hadoop.mapred.TaskID; import org.apache.hadoop.mapred.TaskStatus; import org.apache.hadoop.mapreduce.Counters; import org.apache.hadoop.mapreduce.JobID; import org.apache.hadoop.mapreduce.JobStatus; import org.apache.hadoop.mapreduce.TaskType; import org.junit.Assert; import org.junit.Test; import org.skyscreamer.jsonassert.JSONAssert; import org.skyscreamer.jsonassert.JSONCompareMode; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.ByteArrayOutputStream; import java.io.PrintStream; import java.util.HashMap; import java.util.TimeZone; public class TestHistoryViewerPrinter { private static final Logger LOG = LoggerFactory.getLogger(TestHistoryViewerPrinter.class); @Test public void testHumanPrinter() throws Exception { JobHistoryParser.JobInfo job = createJobInfo(); HumanReadableHistoryViewerPrinter printer = new HumanReadableHistoryViewerPrinter(job, false, "http://", TimeZone.getTimeZone("GMT")); String outStr = run(printer); Assert.assertEquals("\n" + "Hadoop job: job_1317928501754_0001\n" + "=====================================\n" + "User: rkanter\n" + "JobName: my job\n" + "JobConf: /tmp/job.xml\n" + "Submitted At: 6-Oct-2011 19:15:01\n" + "Launched At: 6-Oct-2011 19:15:02 (1sec)\n" + "Finished At: 6-Oct-2011 19:15:16 (14sec)\n" + "Status: SUCCEEDED\n" + "Counters: \n" + "\n" + "|Group Name |Counter name |Map Value |Reduce Value|Total Value|\n" + "---------------------------------------------------------------------------------------\n" + "|group1 |counter1 |5 |5 |5 \n" + "|group1 |counter2 |10 |10 |10 \n" + "|group2 |counter1 |15 |15 |15 \n" + "\n" + "=====================================\n" + "\n" + "Task Summary\n" + "============================\n" + "Kind\tTotal\tSuccessful\tFailed\tKilled\tStartTime\tFinishTime\n" + "\n" + "Setup\t1\t1\t\t0\t0\t6-Oct-2011 19:15:03\t6-Oct-2011 19:15:04 (1sec)\n" + "Map\t6\t5\t\t1\t0\t6-Oct-2011 19:15:04\t6-Oct-2011 19:15:16 (12sec)\n" + "Reduce\t1\t1\t\t0\t0\t6-Oct-2011 19:15:10\t6-Oct-2011 19:15:18 (8sec)\n" + "Cleanup\t1\t1\t\t0\t0\t6-Oct-2011 19:15:11\t6-Oct-2011 19:15:20 (9sec)\n" + "============================\n" + "\n" + "\n" + "Analysis\n" + "=========\n" + "\n" + "Time taken by best performing map task task_1317928501754_0001_m_000003: 3sec\n" + "Average time taken by map tasks: 5sec\n" + "Worse performing map tasks: \n" + "TaskId\t\tTimetaken\n" + "task_1317928501754_0001_m_000007 7sec\n" + "task_1317928501754_0001_m_000006 6sec\n" + "task_1317928501754_0001_m_000005 5sec\n" + "task_1317928501754_0001_m_000004 4sec\n" + "task_1317928501754_0001_m_000003 3sec\n" + "The last map task task_1317928501754_0001_m_000007 finished at (relative to the Job launch time): 6-Oct-2011 19:15:16 (14sec)\n" + "\n" + "Time taken by best performing shuffle task task_1317928501754_0001_r_000008: 8sec\n" + "Average time taken by shuffle tasks: 8sec\n" + "Worse performing shuffle tasks: \n" + "TaskId\t\tTimetaken\n" + "task_1317928501754_0001_r_000008 8sec\n" + "The last shuffle task task_1317928501754_0001_r_000008 finished at (relative to the Job launch time): 6-Oct-2011 19:15:18 (16sec)\n" + "\n" + "Time taken by best performing reduce task task_1317928501754_0001_r_000008: 0sec\n" + "Average time taken by reduce tasks: 0sec\n" + "Worse performing reduce tasks: \n" + "TaskId\t\tTimetaken\n" + "task_1317928501754_0001_r_000008 0sec\n" + "The last reduce task task_1317928501754_0001_r_000008 finished at (relative to the Job launch time): 6-Oct-2011 19:15:18 (16sec)\n" + "=========\n" + "\n" + "FAILED MAP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" + "====================================================\n" + "task_1317928501754_0001_m_000002\t6-Oct-2011 19:15:04\t6-Oct-2011 19:15:06 (2sec)\t\t\n" + "\n" + "FAILED task attempts by nodes\n" + "Hostname\tFailedTasks\n" + "===============================\n" + "localhost\ttask_1317928501754_0001_m_000002, \n", outStr); } @Test public void testHumanPrinterAll() throws Exception { JobHistoryParser.JobInfo job = createJobInfo(); HumanReadableHistoryViewerPrinter printer = new HumanReadableHistoryViewerPrinter(job, true, "http://", TimeZone.getTimeZone("GMT")); String outStr = run(printer); if (System.getProperty("java.version").startsWith("1.7")) { Assert.assertEquals("\n" + "Hadoop job: job_1317928501754_0001\n" + "=====================================\n" + "User: rkanter\n" + "JobName: my job\n" + "JobConf: /tmp/job.xml\n" + "Submitted At: 6-Oct-2011 19:15:01\n" + "Launched At: 6-Oct-2011 19:15:02 (1sec)\n" + "Finished At: 6-Oct-2011 19:15:16 (14sec)\n" + "Status: SUCCEEDED\n" + "Counters: \n" + "\n" + "|Group Name |Counter name |Map Value |Reduce Value|Total Value|\n" + "---------------------------------------------------------------------------------------\n" + "|group1 |counter1 |5 |5 |5 \n" + "|group1 |counter2 |10 |10 |10 \n" + "|group2 |counter1 |15 |15 |15 \n" + "\n" + "=====================================\n" + "\n" + "Task Summary\n" + "============================\n" + "Kind\tTotal\tSuccessful\tFailed\tKilled\tStartTime\tFinishTime\n" + "\n" + "Setup\t1\t1\t\t0\t0\t6-Oct-2011 19:15:03\t6-Oct-2011 19:15:04 (1sec)\n" + "Map\t6\t5\t\t1\t0\t6-Oct-2011 19:15:04\t6-Oct-2011 19:15:16 (12sec)\n" + "Reduce\t1\t1\t\t0\t0\t6-Oct-2011 19:15:10\t6-Oct-2011 19:15:18 (8sec)\n" + "Cleanup\t1\t1\t\t0\t0\t6-Oct-2011 19:15:11\t6-Oct-2011 19:15:20 (9sec)\n" + "============================\n" + "\n" + "\n" + "Analysis\n" + "=========\n" + "\n" + "Time taken by best performing map task task_1317928501754_0001_m_000003: 3sec\n" + "Average time taken by map tasks: 5sec\n" + "Worse performing map tasks: \n" + "TaskId\t\tTimetaken\n" + "task_1317928501754_0001_m_000007 7sec\n" + "task_1317928501754_0001_m_000006 6sec\n" + "task_1317928501754_0001_m_000005 5sec\n" + "task_1317928501754_0001_m_000004 4sec\n" + "task_1317928501754_0001_m_000003 3sec\n" + "The last map task task_1317928501754_0001_m_000007 finished at (relative to the Job launch time): 6-Oct-2011 19:15:16 (14sec)\n" + "\n" + "Time taken by best performing shuffle task task_1317928501754_0001_r_000008: 8sec\n" + "Average time taken by shuffle tasks: 8sec\n" + "Worse performing shuffle tasks: \n" + "TaskId\t\tTimetaken\n" + "task_1317928501754_0001_r_000008 8sec\n" + "The last shuffle task task_1317928501754_0001_r_000008 finished at (relative to the Job launch time): 6-Oct-2011 19:15:18 (16sec)\n" + "\n" + "Time taken by best performing reduce task task_1317928501754_0001_r_000008: 0sec\n" + "Average time taken by reduce tasks: 0sec\n" + "Worse performing reduce tasks: \n" + "TaskId\t\tTimetaken\n" + "task_1317928501754_0001_r_000008 0sec\n" + "The last reduce task task_1317928501754_0001_r_000008 finished at (relative to the Job launch time): 6-Oct-2011 19:15:18 (16sec)\n" + "=========\n" + "\n" + "FAILED MAP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" + "====================================================\n" + "task_1317928501754_0001_m_000002\t6-Oct-2011 19:15:04\t6-Oct-2011 19:15:06 (2sec)\t\t\n" + "\n" + "SUCCEEDED JOB_SETUP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\n" + "====================================================\n" + "task_1317928501754_0001_s_000001\t6-Oct-2011 19:15:03\t6-Oct-2011 19:15:04 (1sec)\t\n" + "\n" + "SUCCEEDED MAP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" + "====================================================\n" + "task_1317928501754_0001_m_000006\t6-Oct-2011 19:15:08\t6-Oct-2011 19:15:14 (6sec)\t\t\n" + "\n" + "SUCCEEDED MAP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" + "====================================================\n" + "task_1317928501754_0001_m_000005\t6-Oct-2011 19:15:07\t6-Oct-2011 19:15:12 (5sec)\t\t\n" + "\n" + "SUCCEEDED MAP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" + "====================================================\n" + "task_1317928501754_0001_m_000004\t6-Oct-2011 19:15:06\t6-Oct-2011 19:15:10 (4sec)\t\t\n" + "\n" + "SUCCEEDED MAP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" + "====================================================\n" + "task_1317928501754_0001_m_000003\t6-Oct-2011 19:15:05\t6-Oct-2011 19:15:08 (3sec)\t\t\n" + "\n" + "SUCCEEDED MAP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" + "====================================================\n" + "task_1317928501754_0001_m_000007\t6-Oct-2011 19:15:09\t6-Oct-2011 19:15:16 (7sec)\t\t\n" + "\n" + "SUCCEEDED REDUCE task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\n" + "====================================================\n" + "task_1317928501754_0001_r_000008\t6-Oct-2011 19:15:10\t6-Oct-2011 19:15:18 (8sec)\t\n" + "\n" + "SUCCEEDED JOB_CLEANUP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\n" + "====================================================\n" + "task_1317928501754_0001_c_000009\t6-Oct-2011 19:15:11\t6-Oct-2011 19:15:20 (9sec)\t\n" + "\n" + "JOB_SETUP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tHostName\tError\tTaskLogs\n" + "====================================================\n" + "attempt_1317928501754_0001_s_000001_1\t6-Oct-2011 19:15:03\t6-Oct-2011 19:15:04 (1sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_s_000001_1\n" + "\n" + "MAP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tHostName\tError\tTaskLogs\n" + "====================================================\n" + "attempt_1317928501754_0001_m_000002_1\t6-Oct-2011 19:15:04\t6-Oct-2011 19:15:06 (2sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000002_1\n" + "attempt_1317928501754_0001_m_000006_1\t6-Oct-2011 19:15:08\t6-Oct-2011 19:15:14 (6sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000006_1\n" + "attempt_1317928501754_0001_m_000005_1\t6-Oct-2011 19:15:07\t6-Oct-2011 19:15:12 (5sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000005_1\n" + "attempt_1317928501754_0001_m_000004_1\t6-Oct-2011 19:15:06\t6-Oct-2011 19:15:10 (4sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000004_1\n" + "attempt_1317928501754_0001_m_000003_1\t6-Oct-2011 19:15:05\t6-Oct-2011 19:15:08 (3sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000003_1\n" + "attempt_1317928501754_0001_m_000007_1\t6-Oct-2011 19:15:09\t6-Oct-2011 19:15:16 (7sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000007_1\n" + "\n" + "REDUCE task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tShuffleFinished\tSortFinished\tFinishTime\tHostName\tError\tTaskLogs\n" + "====================================================\n" + "attempt_1317928501754_0001_r_000008_1\t6-Oct-2011 19:15:10\t6-Oct-2011 19:15:18 (8sec)\t6-Oct-2011 19:15:18 (0sec)6-Oct-2011 19:15:18 (8sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_r_000008_1\n" + "\n" + "JOB_CLEANUP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tHostName\tError\tTaskLogs\n" + "====================================================\n" + "attempt_1317928501754_0001_c_000009_1\t6-Oct-2011 19:15:11\t6-Oct-2011 19:15:20 (9sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_c_000009_1\n" + "\n" + "FAILED task attempts by nodes\n" + "Hostname\tFailedTasks\n" + "===============================\n" + "localhost\ttask_1317928501754_0001_m_000002, \n", outStr); } else { Assert.assertEquals("\n" + "Hadoop job: job_1317928501754_0001\n" + "=====================================\n" + "User: rkanter\n" + "JobName: my job\n" + "JobConf: /tmp/job.xml\n" + "Submitted At: 6-Oct-2011 19:15:01\n" + "Launched At: 6-Oct-2011 19:15:02 (1sec)\n" + "Finished At: 6-Oct-2011 19:15:16 (14sec)\n" + "Status: SUCCEEDED\n" + "Counters: \n" + "\n" + "|Group Name |Counter name |Map Value |Reduce Value|Total Value|\n" + "---------------------------------------------------------------------------------------\n" + "|group1 |counter1 |5 |5 |5 \n" + "|group1 |counter2 |10 |10 |10 \n" + "|group2 |counter1 |15 |15 |15 \n" + "\n" + "=====================================\n" + "\n" + "Task Summary\n" + "============================\n" + "Kind\tTotal\tSuccessful\tFailed\tKilled\tStartTime\tFinishTime\n" + "\n" + "Setup\t1\t1\t\t0\t0\t6-Oct-2011 19:15:03\t6-Oct-2011 19:15:04 (1sec)\n" + "Map\t6\t5\t\t1\t0\t6-Oct-2011 19:15:04\t6-Oct-2011 19:15:16 (12sec)\n" + "Reduce\t1\t1\t\t0\t0\t6-Oct-2011 19:15:10\t6-Oct-2011 19:15:18 (8sec)\n" + "Cleanup\t1\t1\t\t0\t0\t6-Oct-2011 19:15:11\t6-Oct-2011 19:15:20 (9sec)\n" + "============================\n" + "\n" + "\n" + "Analysis\n" + "=========\n" + "\n" + "Time taken by best performing map task task_1317928501754_0001_m_000003: 3sec\n" + "Average time taken by map tasks: 5sec\n" + "Worse performing map tasks: \n" + "TaskId\t\tTimetaken\n" + "task_1317928501754_0001_m_000007 7sec\n" + "task_1317928501754_0001_m_000006 6sec\n" + "task_1317928501754_0001_m_000005 5sec\n" + "task_1317928501754_0001_m_000004 4sec\n" + "task_1317928501754_0001_m_000003 3sec\n" + "The last map task task_1317928501754_0001_m_000007 finished at (relative to the Job launch time): 6-Oct-2011 19:15:16 (14sec)\n" + "\n" + "Time taken by best performing shuffle task task_1317928501754_0001_r_000008: 8sec\n" + "Average time taken by shuffle tasks: 8sec\n" + "Worse performing shuffle tasks: \n" + "TaskId\t\tTimetaken\n" + "task_1317928501754_0001_r_000008 8sec\n" + "The last shuffle task task_1317928501754_0001_r_000008 finished at (relative to the Job launch time): 6-Oct-2011 19:15:18 (16sec)\n" + "\n" + "Time taken by best performing reduce task task_1317928501754_0001_r_000008: 0sec\n" + "Average time taken by reduce tasks: 0sec\n" + "Worse performing reduce tasks: \n" + "TaskId\t\tTimetaken\n" + "task_1317928501754_0001_r_000008 0sec\n" + "The last reduce task task_1317928501754_0001_r_000008 finished at (relative to the Job launch time): 6-Oct-2011 19:15:18 (16sec)\n" + "=========\n" + "\n" + "FAILED MAP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" + "====================================================\n" + "task_1317928501754_0001_m_000002\t6-Oct-2011 19:15:04\t6-Oct-2011 19:15:06 (2sec)\t\t\n" + "\n" + "SUCCEEDED JOB_SETUP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\n" + "====================================================\n" + "task_1317928501754_0001_s_000001\t6-Oct-2011 19:15:03\t6-Oct-2011 19:15:04 (1sec)\t\n" + "\n" + "SUCCEEDED MAP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" + "====================================================\n" + "task_1317928501754_0001_m_000007\t6-Oct-2011 19:15:09\t6-Oct-2011 19:15:16 (7sec)\t\t\n" + "\n" + "SUCCEEDED MAP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" + "====================================================\n" + "task_1317928501754_0001_m_000006\t6-Oct-2011 19:15:08\t6-Oct-2011 19:15:14 (6sec)\t\t\n" + "\n" + "SUCCEEDED MAP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" + "====================================================\n" + "task_1317928501754_0001_m_000005\t6-Oct-2011 19:15:07\t6-Oct-2011 19:15:12 (5sec)\t\t\n" + "\n" + "SUCCEEDED MAP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" + "====================================================\n" + "task_1317928501754_0001_m_000004\t6-Oct-2011 19:15:06\t6-Oct-2011 19:15:10 (4sec)\t\t\n" + "\n" + "SUCCEEDED MAP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" + "====================================================\n" + "task_1317928501754_0001_m_000003\t6-Oct-2011 19:15:05\t6-Oct-2011 19:15:08 (3sec)\t\t\n" + "\n" + "SUCCEEDED REDUCE task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\n" + "====================================================\n" + "task_1317928501754_0001_r_000008\t6-Oct-2011 19:15:10\t6-Oct-2011 19:15:18 (8sec)\t\n" + "\n" + "SUCCEEDED JOB_CLEANUP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tError\n" + "====================================================\n" + "task_1317928501754_0001_c_000009\t6-Oct-2011 19:15:11\t6-Oct-2011 19:15:20 (9sec)\t\n" + "\n" + "JOB_SETUP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tHostName\tError\tTaskLogs\n" + "====================================================\n" + "attempt_1317928501754_0001_s_000001_1\t6-Oct-2011 19:15:03\t6-Oct-2011 19:15:04 (1sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_s_000001_1\n" + "\n" + "MAP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tHostName\tError\tTaskLogs\n" + "====================================================\n" + "attempt_1317928501754_0001_m_000007_1\t6-Oct-2011 19:15:09\t6-Oct-2011 19:15:16 (7sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000007_1\n" + "attempt_1317928501754_0001_m_000002_1\t6-Oct-2011 19:15:04\t6-Oct-2011 19:15:06 (2sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000002_1\n" + "attempt_1317928501754_0001_m_000006_1\t6-Oct-2011 19:15:08\t6-Oct-2011 19:15:14 (6sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000006_1\n" + "attempt_1317928501754_0001_m_000005_1\t6-Oct-2011 19:15:07\t6-Oct-2011 19:15:12 (5sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000005_1\n" + "attempt_1317928501754_0001_m_000004_1\t6-Oct-2011 19:15:06\t6-Oct-2011 19:15:10 (4sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000004_1\n" + "attempt_1317928501754_0001_m_000003_1\t6-Oct-2011 19:15:05\t6-Oct-2011 19:15:08 (3sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000003_1\n" + "\n" + "REDUCE task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tShuffleFinished\tSortFinished\tFinishTime\tHostName\tError\tTaskLogs\n" + "====================================================\n" + "attempt_1317928501754_0001_r_000008_1\t6-Oct-2011 19:15:10\t6-Oct-2011 19:15:18 (8sec)\t6-Oct-2011 19:15:18 (0sec)6-Oct-2011 19:15:18 (8sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_r_000008_1\n" + "\n" + "JOB_CLEANUP task list for job_1317928501754_0001\n" + "TaskId\t\tStartTime\tFinishTime\tHostName\tError\tTaskLogs\n" + "====================================================\n" + "attempt_1317928501754_0001_c_000009_1\t6-Oct-2011 19:15:11\t6-Oct-2011 19:15:20 (9sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_c_000009_1\n" + "\n" + "FAILED task attempts by nodes\n" + "Hostname\tFailedTasks\n" + "===============================\n" + "localhost\ttask_1317928501754_0001_m_000002, \n", outStr); } } @Test public void testJSONPrinter() throws Exception { JobHistoryParser.JobInfo job = createJobInfo(); JSONHistoryViewerPrinter printer = new JSONHistoryViewerPrinter(job, false, "http://"); String outStr = run(printer); JSONAssert.assertEquals("{\n" + " \"counters\": {\n" + " \"group1\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"mapValue\": 5,\n" + " \"reduceValue\": 5,\n" + " \"totalValue\": 5\n" + " },\n" + " {\n" + " \"counterName\": \"counter2\",\n" + " \"mapValue\": 10,\n" + " \"reduceValue\": 10,\n" + " \"totalValue\": 10\n" + " }\n" + " ],\n" + " \"group2\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"mapValue\": 15,\n" + " \"reduceValue\": 15,\n" + " \"totalValue\": 15\n" + " }\n" + " ]\n" + " },\n" + " \"finishedAt\": 1317928516754,\n" + " \"hadoopJob\": \"job_1317928501754_0001\",\n" + " \"jobConf\": \"/tmp/job.xml\",\n" + " \"jobName\": \"my job\",\n" + " \"launchedAt\": 1317928502754,\n" + " \"status\": \"SUCCEEDED\",\n" + " \"submittedAt\": 1317928501754,\n" + " \"taskSummary\": {\n" + " \"cleanup\": {\n" + " \"failed\": 0,\n" + " \"finishTime\": 1317928520754,\n" + " \"killed\": 0,\n" + " \"startTime\": 1317928511754,\n" + " \"successful\": 1,\n" + " \"total\": 1\n" + " },\n" + " \"map\": {\n" + " \"failed\": 1,\n" + " \"finishTime\": 1317928516754,\n" + " \"killed\": 0,\n" + " \"startTime\": 1317928504754,\n" + " \"successful\": 5,\n" + " \"total\": 6\n" + " },\n" + " \"reduce\": {\n" + " \"failed\": 0,\n" + " \"finishTime\": 1317928518754,\n" + " \"killed\": 0,\n" + " \"startTime\": 1317928510754,\n" + " \"successful\": 1,\n" + " \"total\": 1\n" + " },\n" + " \"setup\": {\n" + " \"failed\": 0,\n" + " \"finishTime\": 1317928504754,\n" + " \"killed\": 0,\n" + " \"startTime\": 1317928503754,\n" + " \"successful\": 1,\n" + " \"total\": 1\n" + " }\n" + " },\n" + " \"tasks\": [\n" + " {\n" + " \"finishTime\": 1317928506754,\n" + " \"inputSplits\": \"\",\n" + " \"startTime\": 1317928504754,\n" + " \"status\": \"FAILED\",\n" + " \"taskId\": \"task_1317928501754_0001_m_000002\",\n" + " \"type\": \"MAP\"\n" + " }\n" + " ],\n" + " \"user\": \"rkanter\"\n" + "}\n", outStr, JSONCompareMode.NON_EXTENSIBLE); } @Test public void testJSONPrinterAll() throws Exception { JobHistoryParser.JobInfo job = createJobInfo(); JSONHistoryViewerPrinter printer = new JSONHistoryViewerPrinter(job, true, "http://"); String outStr = run(printer); JSONAssert.assertEquals("{\n" + " \"counters\": {\n" + " \"group1\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"mapValue\": 5,\n" + " \"reduceValue\": 5,\n" + " \"totalValue\": 5\n" + " },\n" + " {\n" + " \"counterName\": \"counter2\",\n" + " \"mapValue\": 10,\n" + " \"reduceValue\": 10,\n" + " \"totalValue\": 10\n" + " }\n" + " ],\n" + " \"group2\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"mapValue\": 15,\n" + " \"reduceValue\": 15,\n" + " \"totalValue\": 15\n" + " }\n" + " ]\n" + " },\n" + " \"finishedAt\": 1317928516754,\n" + " \"hadoopJob\": \"job_1317928501754_0001\",\n" + " \"jobConf\": \"/tmp/job.xml\",\n" + " \"jobName\": \"my job\",\n" + " \"launchedAt\": 1317928502754,\n" + " \"status\": \"SUCCEEDED\",\n" + " \"submittedAt\": 1317928501754,\n" + " \"taskSummary\": {\n" + " \"cleanup\": {\n" + " \"failed\": 0,\n" + " \"finishTime\": 1317928520754,\n" + " \"killed\": 0,\n" + " \"startTime\": 1317928511754,\n" + " \"successful\": 1,\n" + " \"total\": 1\n" + " },\n" + " \"map\": {\n" + " \"failed\": 1,\n" + " \"finishTime\": 1317928516754,\n" + " \"killed\": 0,\n" + " \"startTime\": 1317928504754,\n" + " \"successful\": 5,\n" + " \"total\": 6\n" + " },\n" + " \"reduce\": {\n" + " \"failed\": 0,\n" + " \"finishTime\": 1317928518754,\n" + " \"killed\": 0,\n" + " \"startTime\": 1317928510754,\n" + " \"successful\": 1,\n" + " \"total\": 1\n" + " },\n" + " \"setup\": {\n" + " \"failed\": 0,\n" + " \"finishTime\": 1317928504754,\n" + " \"killed\": 0,\n" + " \"startTime\": 1317928503754,\n" + " \"successful\": 1,\n" + " \"total\": 1\n" + " }\n" + " },\n" + " \"tasks\": [\n" + " {\n" + " \"attempts\": {\n" + " \"attemptId\": \"attempt_1317928501754_0001_m_000002_1\",\n" + " \"finishTime\": 1317928506754,\n" + " \"hostName\": \"localhost\",\n" + " \"startTime\": 1317928504754,\n" + " \"taskLogs\": \"http://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000002_1\"\n" + " },\n" + " \"counters\": {\n" + " \"group1\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 5\n" + " },\n" + " {\n" + " \"counterName\": \"counter2\",\n" + " \"value\": 10\n" + " }\n" + " ],\n" + " \"group2\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 15\n" + " }\n" + " ]\n" + " },\n" + " \"finishTime\": 1317928506754,\n" + " \"inputSplits\": \"\",\n" + " \"startTime\": 1317928504754,\n" + " \"status\": \"FAILED\",\n" + " \"taskId\": \"task_1317928501754_0001_m_000002\",\n" + " \"type\": \"MAP\"\n" + " },\n" + " {\n" + " \"attempts\": {\n" + " \"attemptId\": \"attempt_1317928501754_0001_s_000001_1\",\n" + " \"finishTime\": 1317928504754,\n" + " \"hostName\": \"localhost\",\n" + " \"startTime\": 1317928503754,\n" + " \"taskLogs\": \"http://t:1234/tasklog?attemptid=attempt_1317928501754_0001_s_000001_1\"\n" + " },\n" + " \"counters\": {\n" + " \"group1\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 5\n" + " },\n" + " {\n" + " \"counterName\": \"counter2\",\n" + " \"value\": 10\n" + " }\n" + " ],\n" + " \"group2\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 15\n" + " }\n" + " ]\n" + " },\n" + " \"finishTime\": 1317928504754,\n" + " \"startTime\": 1317928503754,\n" + " \"status\": \"SUCCEEDED\",\n" + " \"taskId\": \"task_1317928501754_0001_s_000001\",\n" + " \"type\": \"JOB_SETUP\"\n" + " },\n" + " {\n" + " \"attempts\": {\n" + " \"attemptId\": \"attempt_1317928501754_0001_m_000006_1\",\n" + " \"finishTime\": 1317928514754,\n" + " \"hostName\": \"localhost\",\n" + " \"startTime\": 1317928508754,\n" + " \"taskLogs\": \"http://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000006_1\"\n" + " },\n" + " \"counters\": {\n" + " \"group1\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 5\n" + " },\n" + " {\n" + " \"counterName\": \"counter2\",\n" + " \"value\": 10\n" + " }\n" + " ],\n" + " \"group2\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 15\n" + " }\n" + " ]\n" + " },\n" + " \"finishTime\": 1317928514754,\n" + " \"inputSplits\": \"\",\n" + " \"startTime\": 1317928508754,\n" + " \"status\": \"SUCCEEDED\",\n" + " \"taskId\": \"task_1317928501754_0001_m_000006\",\n" + " \"type\": \"MAP\"\n" + " },\n" + " {\n" + " \"attempts\": {\n" + " \"attemptId\": \"attempt_1317928501754_0001_m_000005_1\",\n" + " \"finishTime\": 1317928512754,\n" + " \"hostName\": \"localhost\",\n" + " \"startTime\": 1317928507754,\n" + " \"taskLogs\": \"http://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000005_1\"\n" + " },\n" + " \"counters\": {\n" + " \"group1\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 5\n" + " },\n" + " {\n" + " \"counterName\": \"counter2\",\n" + " \"value\": 10\n" + " }\n" + " ],\n" + " \"group2\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 15\n" + " }\n" + " ]\n" + " },\n" + " \"finishTime\": 1317928512754,\n" + " \"inputSplits\": \"\",\n" + " \"startTime\": 1317928507754,\n" + " \"status\": \"SUCCEEDED\",\n" + " \"taskId\": \"task_1317928501754_0001_m_000005\",\n" + " \"type\": \"MAP\"\n" + " },\n" + " {\n" + " \"attempts\": {\n" + " \"attemptId\": \"attempt_1317928501754_0001_m_000004_1\",\n" + " \"finishTime\": 1317928510754,\n" + " \"hostName\": \"localhost\",\n" + " \"startTime\": 1317928506754,\n" + " \"taskLogs\": \"http://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000004_1\"\n" + " },\n" + " \"counters\": {\n" + " \"group1\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 5\n" + " },\n" + " {\n" + " \"counterName\": \"counter2\",\n" + " \"value\": 10\n" + " }\n" + " ],\n" + " \"group2\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 15\n" + " }\n" + " ]\n" + " },\n" + " \"finishTime\": 1317928510754,\n" + " \"inputSplits\": \"\",\n" + " \"startTime\": 1317928506754,\n" + " \"status\": \"SUCCEEDED\",\n" + " \"taskId\": \"task_1317928501754_0001_m_000004\",\n" + " \"type\": \"MAP\"\n" + " },\n" + " {\n" + " \"attempts\": {\n" + " \"attemptId\": \"attempt_1317928501754_0001_m_000003_1\",\n" + " \"finishTime\": 1317928508754,\n" + " \"hostName\": \"localhost\",\n" + " \"startTime\": 1317928505754,\n" + " \"taskLogs\": \"http://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000003_1\"\n" + " },\n" + " \"counters\": {\n" + " \"group1\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 5\n" + " },\n" + " {\n" + " \"counterName\": \"counter2\",\n" + " \"value\": 10\n" + " }\n" + " ],\n" + " \"group2\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 15\n" + " }\n" + " ]\n" + " },\n" + " \"finishTime\": 1317928508754,\n" + " \"inputSplits\": \"\",\n" + " \"startTime\": 1317928505754,\n" + " \"status\": \"SUCCEEDED\",\n" + " \"taskId\": \"task_1317928501754_0001_m_000003\",\n" + " \"type\": \"MAP\"\n" + " },\n" + " {\n" + " \"attempts\": {\n" + " \"attemptId\": \"attempt_1317928501754_0001_c_000009_1\",\n" + " \"finishTime\": 1317928520754,\n" + " \"hostName\": \"localhost\",\n" + " \"startTime\": 1317928511754,\n" + " \"taskLogs\": \"http://t:1234/tasklog?attemptid=attempt_1317928501754_0001_c_000009_1\"\n" + " },\n" + " \"counters\": {\n" + " \"group1\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 5\n" + " },\n" + " {\n" + " \"counterName\": \"counter2\",\n" + " \"value\": 10\n" + " }\n" + " ],\n" + " \"group2\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 15\n" + " }\n" + " ]\n" + " },\n" + " \"finishTime\": 1317928520754,\n" + " \"startTime\": 1317928511754,\n" + " \"status\": \"SUCCEEDED\",\n" + " \"taskId\": \"task_1317928501754_0001_c_000009\",\n" + " \"type\": \"JOB_CLEANUP\"\n" + " },\n" + " {\n" + " \"attempts\": {\n" + " \"attemptId\": \"attempt_1317928501754_0001_m_000007_1\",\n" + " \"finishTime\": 1317928516754,\n" + " \"hostName\": \"localhost\",\n" + " \"startTime\": 1317928509754,\n" + " \"taskLogs\": \"http://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000007_1\"\n" + " },\n" + " \"counters\": {\n" + " \"group1\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 5\n" + " },\n" + " {\n" + " \"counterName\": \"counter2\",\n" + " \"value\": 10\n" + " }\n" + " ],\n" + " \"group2\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 15\n" + " }\n" + " ]\n" + " },\n" + " \"finishTime\": 1317928516754,\n" + " \"inputSplits\": \"\",\n" + " \"startTime\": 1317928509754,\n" + " \"status\": \"SUCCEEDED\",\n" + " \"taskId\": \"task_1317928501754_0001_m_000007\",\n" + " \"type\": \"MAP\"\n" + " },\n" + " {\n" + " \"attempts\": {\n" + " \"attemptId\": \"attempt_1317928501754_0001_r_000008_1\",\n" + " \"finishTime\": 1317928518754,\n" + " \"hostName\": \"localhost\",\n" + " \"shuffleFinished\": 1317928518754,\n" + " \"sortFinished\": 1317928518754,\n" + " \"startTime\": 1317928510754,\n" + " \"taskLogs\": \"http://t:1234/tasklog?attemptid=attempt_1317928501754_0001_r_000008_1\"\n" + " },\n" + " \"counters\": {\n" + " \"group1\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 5\n" + " },\n" + " {\n" + " \"counterName\": \"counter2\",\n" + " \"value\": 10\n" + " }\n" + " ],\n" + " \"group2\": [\n" + " {\n" + " \"counterName\": \"counter1\",\n" + " \"value\": 15\n" + " }\n" + " ]\n" + " },\n" + " \"finishTime\": 1317928518754,\n" + " \"startTime\": 1317928510754,\n" + " \"status\": \"SUCCEEDED\",\n" + " \"taskId\": \"task_1317928501754_0001_r_000008\",\n" + " \"type\": \"REDUCE\"\n" + " }\n" + " ],\n" + " \"user\": \"rkanter\"\n" + "}\n", outStr, JSONCompareMode.NON_EXTENSIBLE); } @Test public void testHumanDupePrinter() throws Exception { JobHistoryParser.JobInfo job = createJobInfo2(); // Counters are only part of the overview so printAll can be false or true // this does not affect the test, task counters are not printed HumanReadableHistoryViewerPrinter printer = new HumanReadableHistoryViewerPrinter(job, false, "http://", TimeZone.getTimeZone("GMT")); String outStr = run(printer); // We are not interested in anything but the duplicate counter int count1 = outStr.indexOf( "|Map-Reduce Framework |Map input records |"); Assert.assertNotEquals("First counter occurrence not found", -1, count1); int count2 = outStr.indexOf( "|Map-Reduce Framework |Map input records |", count1 + 1); Assert.assertEquals("Duplicate counter found at: " + count1 + " and " + count2, -1, count2); } @Test public void testJSONDupePrinter() throws Exception { JobHistoryParser.JobInfo job = createJobInfo2(); // Counters are part of the overview and task info // Tasks only have bogus counters in the test if that is changed printAll // must then be kept as false for this test to pass JSONHistoryViewerPrinter printer = new JSONHistoryViewerPrinter(job, false, "http://"); String outStr = run(printer); // We are not interested in anything but the duplicate counter int count1 = outStr.indexOf( "\"counterName\":\"MAP_INPUT_RECORDS\""); Assert.assertNotEquals("First counter occurrence not found", -1, count1); int count2 = outStr.indexOf( "\"counterName\":\"MAP_INPUT_RECORDS\"", count1 + 1); Assert.assertEquals("Duplicate counter found at: " + count1 + " and " + count2, -1, count2); } private String run(HistoryViewerPrinter printer) throws Exception { ByteArrayOutputStream boas = new ByteArrayOutputStream(); PrintStream out = new PrintStream(boas, true); printer.print(out); out.close(); String outStr = boas.toString("UTF-8"); LOG.info("out = " + outStr); return outStr; } private static JobHistoryParser.JobInfo createJobInfo() { JobHistoryParser.JobInfo job = new JobHistoryParser.JobInfo(); job.submitTime = 1317928501754L; job.finishTime = job.submitTime + 15000; job.jobid = JobID.forName("job_1317928501754_0001"); job.username = "rkanter"; job.jobname = "my job"; job.jobQueueName = "my queue"; job.jobConfPath = "/tmp/job.xml"; job.launchTime = job.submitTime + 1000; job.totalMaps = 5; job.totalReduces = 1; job.failedMaps = 1; job.failedReduces = 0; job.succeededMaps = 5; job.succeededReduces = 1; job.jobStatus = JobStatus.State.SUCCEEDED.name(); job.totalCounters = createCounters(); job.mapCounters = createCounters(); job.reduceCounters = createCounters(); job.tasksMap = new HashMap<>(); addTaskInfo(job, TaskType.JOB_SETUP, 1, TaskStatus.State.SUCCEEDED); addTaskInfo(job, TaskType.MAP, 2, TaskStatus.State.FAILED); addTaskInfo(job, TaskType.MAP, 3, TaskStatus.State.SUCCEEDED); addTaskInfo(job, TaskType.MAP, 4, TaskStatus.State.SUCCEEDED); addTaskInfo(job, TaskType.MAP, 5, TaskStatus.State.SUCCEEDED); addTaskInfo(job, TaskType.MAP, 6, TaskStatus.State.SUCCEEDED); addTaskInfo(job, TaskType.MAP, 7, TaskStatus.State.SUCCEEDED); addTaskInfo(job, TaskType.REDUCE, 8, TaskStatus.State.SUCCEEDED); addTaskInfo(job, TaskType.JOB_CLEANUP, 9, TaskStatus.State.SUCCEEDED); return job; } private static JobHistoryParser.JobInfo createJobInfo2() { JobHistoryParser.JobInfo job = new JobHistoryParser.JobInfo(); job.submitTime = 1317928501754L; job.finishTime = job.submitTime + 15000; job.jobid = JobID.forName("job_1317928501754_0001"); job.username = "test"; job.jobname = "Dupe counter output"; job.jobQueueName = "root.test"; job.jobConfPath = "/tmp/job.xml"; job.launchTime = job.submitTime + 1000; job.totalMaps = 1; job.totalReduces = 0; job.failedMaps = 0; job.failedReduces = 0; job.succeededMaps = 1; job.succeededReduces = 0; job.jobStatus = JobStatus.State.SUCCEEDED.name(); job.totalCounters = createDeprecatedCounters(); job.mapCounters = createDeprecatedCounters(); job.reduceCounters = createDeprecatedCounters(); job.tasksMap = new HashMap<>(); addTaskInfo(job, TaskType.JOB_SETUP, 1, TaskStatus.State.SUCCEEDED); addTaskInfo(job, TaskType.MAP, 2, TaskStatus.State.SUCCEEDED); addTaskInfo(job, TaskType.JOB_CLEANUP, 3, TaskStatus.State.SUCCEEDED); return job; } private static Counters createCounters() { Counters counters = new Counters(); counters.findCounter("group1", "counter1").setValue(5); counters.findCounter("group1", "counter2").setValue(10); counters.findCounter("group2", "counter1").setValue(15); return counters; } private static Counters createDeprecatedCounters() { Counters counters = new Counters(); // Deprecated counter: make sure it is only printed once counters.findCounter("org.apache.hadoop.mapred.Task$Counter", "MAP_INPUT_RECORDS").setValue(1); counters.findCounter("File System Counters", "FILE: Number of bytes read").setValue(1); return counters; } private static void addTaskInfo(JobHistoryParser.JobInfo job, TaskType type, int id, TaskStatus.State status) { JobHistoryParser.TaskInfo task = new JobHistoryParser.TaskInfo(); task.taskId = new TaskID(job.getJobId(), type, id); task.startTime = job.getLaunchTime() + id * 1000; task.finishTime = task.startTime + id * 1000; task.taskType = type; task.counters = createCounters(); task.status = status.name(); task.attemptsMap = new HashMap<>(); addTaskAttemptInfo(task, 1); job.tasksMap.put(task.getTaskId(), task); } private static void addTaskAttemptInfo( JobHistoryParser.TaskInfo task, int id) { JobHistoryParser.TaskAttemptInfo attempt = new JobHistoryParser.TaskAttemptInfo(); attempt.attemptId = new TaskAttemptID( TaskID.downgrade(task.getTaskId()), id); attempt.startTime = task.getStartTime(); attempt.finishTime = task.getFinishTime(); attempt.shuffleFinishTime = task.getFinishTime(); attempt.sortFinishTime = task.getFinishTime(); attempt.mapFinishTime = task.getFinishTime(); attempt.status = task.getTaskStatus(); attempt.taskType = task.getTaskType(); attempt.trackerName = "localhost"; attempt.httpPort = 1234; attempt.hostname = "localhost"; task.attemptsMap.put(attempt.getAttemptId(), attempt); } }
MAPREDUCE-7103. Fix TestHistoryViewerPrinter on windows due to a mismatch line separator. Contributed by Giovanni Matteo Fumarola.
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestHistoryViewerPrinter.java
MAPREDUCE-7103. Fix TestHistoryViewerPrinter on windows due to a mismatch line separator. Contributed by Giovanni Matteo Fumarola.
Java
apache-2.0
6b64c185675b34496102624fe3b7d0d2ea671c9b
0
PennState/scim,PennState/SCIMple-Identity
package edu.psu.swe.scim.server.filter; import java.io.IOException; import javax.servlet.FilterChain; import javax.servlet.FilterConfig; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.annotation.WebFilter; import javax.servlet.http.HttpServletResponse; @WebFilter(filterName = "ScimCorsFilter", urlPatterns = {"/*"}) public class ApiOriginFilter implements javax.servlet.Filter { @Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { HttpServletResponse res = (HttpServletResponse) response; res.addHeader("Access-Control-Allow-Origin", "*"); res.addHeader("Access-Control-Allow-Methods", "GET, POST, DELETE, PUT"); res.addHeader("Access-Control-Allow-Headers", "Content-Type, Authorization"); chain.doFilter(request, response); } @Override public void init(FilterConfig filterConfig) throws ServletException { } @Override public void destroy() { } }
scim-server/scim-server-common/src/main/java/edu/psu/swe/scim/server/filter/ApiOriginFilter.java
package edu.psu.swe.scim.server.filter; import java.io.IOException; import javax.servlet.FilterChain; import javax.servlet.FilterConfig; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.annotation.WebFilter; import javax.servlet.http.HttpServletResponse; @WebFilter(filterName = "HTML5CorsFilter", urlPatterns = {"/*"}) public class ApiOriginFilter implements javax.servlet.Filter { @Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { HttpServletResponse res = (HttpServletResponse) response; res.addHeader("Access-Control-Allow-Origin", "*"); res.addHeader("Access-Control-Allow-Methods", "GET, POST, DELETE, PUT"); res.addHeader("Access-Control-Allow-Headers", "Content-Type, Authorization"); chain.doFilter(request, response); } @Override public void init(FilterConfig filterConfig) throws ServletException { } @Override public void destroy() { } }
Changing name of CORS filter to avoid conflict
scim-server/scim-server-common/src/main/java/edu/psu/swe/scim/server/filter/ApiOriginFilter.java
Changing name of CORS filter to avoid conflict
Java
apache-2.0
1b0c2cfe7b4d043ab712be615160d9131ac7dab6
0
nyer/mybatis-3,Zzyong-5170/mybatis-3,jeasonyoung/mybatis-3,VioletLife/mybatis-3,SpringMybatis/mybatis-3,Xcorpio/mybatis-3,jackgao2016/mybatis-3_site,xiexingguang/mybatis-3,Alwayswithme/mybatis-3,hahaduo/mybatis-3,gaojinhua/mybatis-3,jackgao2016/mybatis-3_site,harawata/mybatis-3,LuBaijiang/mybatis-3,lknny/mybatis-3,vniu/mybatis-3,NanYoMy/mybatis-3,fangjz/mybatis-3,nyer/mybatis-3,zduantao/mybatis-3,chuyuqiao/mybatis-3,lousama/mybatis-3,langlan/mybatis-3,mway08/mybatis-3,VioletLife/mybatis-3,harawata/mybatis-3,jmurciego/mybatis-3,yaotj/mybatis-3,fromm0/mybatis-3,JuwinS1993/mybatis-3,E1110CNotFound/mybatis-3,open-source-explore/mybatis-3,danyXu/mybatis-3,qiuyesuifeng/mybatis-3,raupachz/mybatis-3,kkxx/mybatis-3,SeaSky0606/mybatis-3,salchemist/mybatis-3,mybatis/mybatis-3,jankill/mybatis-3,z744489075/mybatis-3,jingyuzhu/mybatis-3,Prymon/mybatis-3,iaiti/mybatis-3,mlc0202/mybatis-3,lxq1008/mybatis-3,wangype/mybatis-3,ningg/mybatis-3,wuwen5/mybatis-3,forestqqqq/mybatis-3,zhangwei5095/mybatis3-annotaion,fengsmith/mybatis-3,fromm0/mybatis-3,gigold/mybatis-3,yummy222/mybatis-3,keyeMyria/mybatis-3,raphaelmonteiro15/mybatis-3,liuqk/mybatis-3,hazendaz/mybatis-3,fengsmith/mybatis-3
/* * Copyright 2009-2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ibatis.executor; import java.sql.BatchUpdateException; import java.sql.Connection; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.apache.ibatis.executor.keygen.Jdbc3KeyGenerator; import org.apache.ibatis.executor.keygen.KeyGenerator; import org.apache.ibatis.executor.keygen.NoKeyGenerator; import org.apache.ibatis.executor.statement.StatementHandler; import org.apache.ibatis.mapping.BoundSql; import org.apache.ibatis.mapping.MappedStatement; import org.apache.ibatis.session.Configuration; import org.apache.ibatis.session.ResultHandler; import org.apache.ibatis.session.RowBounds; import org.apache.ibatis.transaction.Transaction; /** * @author Jeff Butler */ public class BatchExecutor extends BaseExecutor { public static final int BATCH_UPDATE_RETURN_VALUE = Integer.MIN_VALUE + 1002; private final List<Statement> statementList = new ArrayList<Statement>(); private final List<BatchResult> batchResultList = new ArrayList<BatchResult>(); private String currentSql; private MappedStatement currentStatement; public BatchExecutor(Configuration configuration, Transaction transaction) { super(configuration, transaction); } @Override public int doUpdate(MappedStatement ms, Object parameterObject) throws SQLException { final Configuration configuration = ms.getConfiguration(); final StatementHandler handler = configuration.newStatementHandler(this, ms, parameterObject, RowBounds.DEFAULT, null, null); final BoundSql boundSql = handler.getBoundSql(); final String sql = boundSql.getSql(); final Statement stmt; if (sql.equals(currentSql) && ms.equals(currentStatement)) { int last = statementList.size() - 1; stmt = statementList.get(last); handler.parameterize(stmt);//fix Issues 322 BatchResult batchResult = batchResultList.get(last); batchResult.addParameterObject(parameterObject); } else { Connection connection = getConnection(ms.getStatementLog()); stmt = handler.prepare(connection); handler.parameterize(stmt); //fix Issues 322 currentSql = sql; currentStatement = ms; statementList.add(stmt); batchResultList.add(new BatchResult(ms, sql, parameterObject)); } handler.batch(stmt); return BATCH_UPDATE_RETURN_VALUE; } @Override public <E> List<E> doQuery(MappedStatement ms, Object parameterObject, RowBounds rowBounds, ResultHandler resultHandler, BoundSql boundSql) throws SQLException { Statement stmt = null; try { flushStatements(); Configuration configuration = ms.getConfiguration(); StatementHandler handler = configuration.newStatementHandler(wrapper, ms, parameterObject, rowBounds, resultHandler, boundSql); Connection connection = getConnection(ms.getStatementLog()); stmt = handler.prepare(connection); handler.parameterize(stmt); return handler.<E>query(stmt, resultHandler); } finally { closeStatement(stmt); } } @Override public List<BatchResult> doFlushStatements(boolean isRollback) throws SQLException { try { List<BatchResult> results = new ArrayList<BatchResult>(); if (isRollback) { return Collections.emptyList(); } for (int i = 0, n = statementList.size(); i < n; i++) { Statement stmt = statementList.get(i); BatchResult batchResult = batchResultList.get(i); try { batchResult.setUpdateCounts(stmt.executeBatch()); MappedStatement ms = batchResult.getMappedStatement(); List<Object> parameterObjects = batchResult.getParameterObjects(); KeyGenerator keyGenerator = ms.getKeyGenerator(); if (Jdbc3KeyGenerator.class.equals(keyGenerator.getClass())) { Jdbc3KeyGenerator jdbc3KeyGenerator = (Jdbc3KeyGenerator) keyGenerator; jdbc3KeyGenerator.processBatch(ms, stmt, parameterObjects); } else if (!NoKeyGenerator.class.equals(keyGenerator.getClass())) { //issue #141 for (Object parameter : parameterObjects) { keyGenerator.processAfter(this, ms, stmt, parameter); } } } catch (BatchUpdateException e) { StringBuilder message = new StringBuilder(); message.append(batchResult.getMappedStatement().getId()) .append(" (batch index #") .append(i + 1) .append(")") .append(" failed."); if (i > 0) { message.append(" ") .append(i) .append(" prior sub executor(s) completed successfully, but will be rolled back."); } throw new BatchExecutorException(message.toString(), e, results, batchResult); } results.add(batchResult); } return results; } finally { for (Statement stmt : statementList) { closeStatement(stmt); } currentSql = null; statementList.clear(); batchResultList.clear(); } } }
src/main/java/org/apache/ibatis/executor/BatchExecutor.java
/* * Copyright 2009-2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ibatis.executor; import java.sql.BatchUpdateException; import java.sql.Connection; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.apache.ibatis.executor.keygen.Jdbc3KeyGenerator; import org.apache.ibatis.executor.keygen.KeyGenerator; import org.apache.ibatis.executor.keygen.NoKeyGenerator; import org.apache.ibatis.executor.statement.StatementHandler; import org.apache.ibatis.mapping.BoundSql; import org.apache.ibatis.mapping.MappedStatement; import org.apache.ibatis.session.Configuration; import org.apache.ibatis.session.ResultHandler; import org.apache.ibatis.session.RowBounds; import org.apache.ibatis.transaction.Transaction; /** * @author Jeff Butler */ public class BatchExecutor extends BaseExecutor { public static final int BATCH_UPDATE_RETURN_VALUE = Integer.MIN_VALUE + 1002; private final List<Statement> statementList = new ArrayList<Statement>(); private final List<BatchResult> batchResultList = new ArrayList<BatchResult>(); private String currentSql; private MappedStatement currentStatement; public BatchExecutor(Configuration configuration, Transaction transaction) { super(configuration, transaction); } @Override public int doUpdate(MappedStatement ms, Object parameterObject) throws SQLException { final Configuration configuration = ms.getConfiguration(); final StatementHandler handler = configuration.newStatementHandler(this, ms, parameterObject, RowBounds.DEFAULT, null, null); final BoundSql boundSql = handler.getBoundSql(); final String sql = boundSql.getSql(); final Statement stmt; if (sql.equals(currentSql) && ms.equals(currentStatement)) { int last = statementList.size() - 1; stmt = statementList.get(last); BatchResult batchResult = batchResultList.get(last); batchResult.addParameterObject(parameterObject); } else { Connection connection = getConnection(ms.getStatementLog()); stmt = handler.prepare(connection); currentSql = sql; currentStatement = ms; statementList.add(stmt); batchResultList.add(new BatchResult(ms, sql, parameterObject)); } handler.parameterize(stmt); handler.batch(stmt); return BATCH_UPDATE_RETURN_VALUE; } @Override public <E> List<E> doQuery(MappedStatement ms, Object parameterObject, RowBounds rowBounds, ResultHandler resultHandler, BoundSql boundSql) throws SQLException { Statement stmt = null; try { flushStatements(); Configuration configuration = ms.getConfiguration(); StatementHandler handler = configuration.newStatementHandler(wrapper, ms, parameterObject, rowBounds, resultHandler, boundSql); Connection connection = getConnection(ms.getStatementLog()); stmt = handler.prepare(connection); handler.parameterize(stmt); return handler.<E>query(stmt, resultHandler); } finally { closeStatement(stmt); } } @Override public List<BatchResult> doFlushStatements(boolean isRollback) throws SQLException { try { List<BatchResult> results = new ArrayList<BatchResult>(); if (isRollback) { return Collections.emptyList(); } for (int i = 0, n = statementList.size(); i < n; i++) { Statement stmt = statementList.get(i); BatchResult batchResult = batchResultList.get(i); try { batchResult.setUpdateCounts(stmt.executeBatch()); MappedStatement ms = batchResult.getMappedStatement(); List<Object> parameterObjects = batchResult.getParameterObjects(); KeyGenerator keyGenerator = ms.getKeyGenerator(); if (Jdbc3KeyGenerator.class.equals(keyGenerator.getClass())) { Jdbc3KeyGenerator jdbc3KeyGenerator = (Jdbc3KeyGenerator) keyGenerator; jdbc3KeyGenerator.processBatch(ms, stmt, parameterObjects); } else if (!NoKeyGenerator.class.equals(keyGenerator.getClass())) { //issue #141 for (Object parameter : parameterObjects) { keyGenerator.processAfter(this, ms, stmt, parameter); } } } catch (BatchUpdateException e) { StringBuilder message = new StringBuilder(); message.append(batchResult.getMappedStatement().getId()) .append(" (batch index #") .append(i + 1) .append(")") .append(" failed."); if (i > 0) { message.append(" ") .append(i) .append(" prior sub executor(s) completed successfully, but will be rolled back."); } throw new BatchExecutorException(message.toString(), e, results, batchResult); } results.add(batchResult); } return results; } finally { for (Statement stmt : statementList) { closeStatement(stmt); } currentSql = null; statementList.clear(); batchResultList.clear(); } } }
//fix Issues #322
src/main/java/org/apache/ibatis/executor/BatchExecutor.java
//fix Issues #322
Java
apache-2.0
44c98fea91165ba80ce1d9ef8305a8a2d9c5354a
0
T-Systems-MMS/perfsig-jenkins,T-Systems-MMS/perfsig-jenkins
/* * Copyright (c) 2014-2018 T-Systems Multimedia Solutions GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.tsystems.mms.apm.performancesignature.dynatracesaas.rest; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.JsonParseException; import de.tsystems.mms.apm.performancesignature.dynatracesaas.rest.auth.ApiKeyAuth; import okhttp3.OkHttpClient; import okhttp3.RequestBody; import okhttp3.ResponseBody; import okhttp3.logging.HttpLoggingInterceptor; import retrofit2.Call; import retrofit2.Converter; import retrofit2.Response; import retrofit2.Retrofit; import retrofit2.converter.gson.GsonConverterFactory; import retrofit2.converter.scalars.ScalarsConverterFactory; import javax.annotation.Nonnull; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLSocketFactory; import javax.net.ssl.TrustManager; import javax.net.ssl.X509TrustManager; import java.io.IOException; import java.lang.annotation.Annotation; import java.lang.reflect.Type; import java.net.Proxy; import java.security.GeneralSecurityException; import java.security.cert.X509Certificate; import java.util.concurrent.TimeUnit; public class ApiClient { private static final String REST_DF = "yyyy-MM-dd'T'HH:mm:ss.SSSXXX"; private static final String API_SUFFIX = "api/v1/"; private final OkHttpClient.Builder okBuilder; private boolean debugging = false; private boolean verifyingSsl; private Retrofit.Builder adapterBuilder; private HttpLoggingInterceptor loggingInterceptor; public ApiClient() { verifyingSsl = true; okBuilder = new OkHttpClient.Builder(); okBuilder.connectTimeout(30, TimeUnit.SECONDS); okBuilder.readTimeout(30, TimeUnit.SECONDS); String baseUrl = "https://localhost/" + API_SUFFIX; Gson gson = new GsonBuilder() .setDateFormat(ApiClient.REST_DF) .create(); adapterBuilder = new Retrofit .Builder() .baseUrl(baseUrl) .addConverterFactory(ScalarsConverterFactory.create()) .addConverterFactory(GsonCustomConverterFactory.create(gson)); } /** * Helper constructor for single api key * * @param apiKey API key */ public ApiClient(String apiKey) { this(); this.setApiKey(apiKey); } /** * Set base path * * @param basePath Base path of the URL (e.g https://localhost/api/v2 * @return An instance of OkHttpClient */ public ApiClient setBasePath(String basePath) { if (!basePath.endsWith("/")) { basePath += "/"; } adapterBuilder.baseUrl(basePath + API_SUFFIX); return this; } /** * Configure whether to verify certificate and hostname when making https requests. * Default to true. * NOTE: Do NOT set to false in production code, otherwise you would face multiple types of cryptographic attacks. * * @param verifyingSsl True to verify TLS/SSL connection * @return ApiClient */ public ApiClient setVerifyingSsl(boolean verifyingSsl) { this.verifyingSsl = verifyingSsl; applySslSettings(); return this; } /** * Apply SSL related settings to httpClient according to the current values of * verifyingSsl and sslCaCert. */ private void applySslSettings() { try { if (!verifyingSsl) { // Create a trust manager that does not validate certificate chains final TrustManager[] trustAllCerts = new TrustManager[]{ new X509TrustManager() { @Override public void checkClientTrusted(X509Certificate[] chain, String authType) { } @Override public void checkServerTrusted(X509Certificate[] chain, String authType) { } @Override public X509Certificate[] getAcceptedIssuers() { return new X509Certificate[]{}; } } }; // Install the all-trusting trust manager final SSLContext sslContext = SSLContext.getInstance("TLSv1.2"); sslContext.init(null, trustAllCerts, new java.security.SecureRandom()); // Create an ssl socket factory with our all-trusting manager final SSLSocketFactory sslSocketFactory = sslContext.getSocketFactory(); okBuilder.sslSocketFactory(sslSocketFactory, (X509TrustManager) trustAllCerts[0]); okBuilder.hostnameVerifier((hostname, session) -> true); } } catch (GeneralSecurityException e) { throw new RuntimeException(e); } } public ApiClient setProxy(Proxy proxy) { okBuilder.proxy(proxy); return this; } /** * Enable/disable debugging for this API client. * * @param debugging To enable (true) or disable (false) debugging * @return ApiClient */ public ApiClient setDebugging(boolean debugging) { if (debugging != this.debugging) { if (debugging) { loggingInterceptor = new HttpLoggingInterceptor(); loggingInterceptor.setLevel(HttpLoggingInterceptor.Level.BODY); okBuilder.addInterceptor(loggingInterceptor); } else { okBuilder.interceptors().remove(loggingInterceptor); loggingInterceptor = null; } } this.debugging = debugging; return this; } public <S> S createService(Class<S> serviceClass) { return adapterBuilder .client(okBuilder.build()) .build() .create(serviceClass); } /** * Helper method to configure the first api key found * * @param apiKey API key * @return ApiClient */ public ApiClient setApiKey(String apiKey) { ApiKeyAuth keyAuth = new ApiKeyAuth("header", "Authorization"); keyAuth.setApiKey(apiKey); okBuilder.addInterceptor(keyAuth); return this; } public Retrofit.Builder getAdapterBuilder() { return adapterBuilder; } public ApiClient setAdapterBuilder(Retrofit.Builder adapterBuilder) { this.adapterBuilder = adapterBuilder; return this; } public OkHttpClient.Builder getOkBuilder() { return okBuilder; } /** * Execute HTTP call and deserialize the HTTP response body into the given return type. * * @param call Call * @return ApiResponse object containing response status, headers and * data, which is a Java object deserialized from response body and would be null * when returnType is null. * @throws ApiException If fail to execute the call */ public <T> ApiResponse<T> execute(final Call<T> call) throws ApiException { try { Response<T> response = call.execute(); T data = handleResponse(response); return new ApiResponse<>(response.code(), response.headers().toMultimap(), data); } catch (IOException e) { throw new ApiException(e); } } /** * Handle the given response, return the deserialized object when the response is successful. * * @param <T> Type * @param response Response * @return Type * @throws ApiException If the response has a unsuccessful status code or * fail to deserialize the response body */ private <T> T handleResponse(final Response<T> response) throws ApiException { if (response.isSuccessful()) { return response.body(); } else { String respBody = null; if (response.errorBody() != null) { try { respBody = response.errorBody().string(); } catch (IOException e) { throw new ApiException(response.message(), e, response.code(), response.headers().toMultimap()); } } throw new ApiException(response.message(), response.code(), response.headers().toMultimap(), respBody); } } } /** * This wrapper is to take care of this case: * when the deserialization fails due to JsonParseException and the * expected type is String, then just return the body string. */ class GsonResponseBodyConverterToString<T> implements Converter<ResponseBody, T> { private final Gson gson; private final Type type; GsonResponseBodyConverterToString(Gson gson, Type type) { this.gson = gson; this.type = type; } @Override public T convert(@Nonnull ResponseBody value) throws IOException { String returned = value.string(); try { return gson.fromJson(returned, type); } catch (JsonParseException e) { return (T) returned; } } } class GsonCustomConverterFactory extends Converter.Factory { private final Gson gson; private final GsonConverterFactory gsonConverterFactory; private GsonCustomConverterFactory(Gson gson) { if (gson == null) { throw new NullPointerException("gson == null"); } this.gson = gson; this.gsonConverterFactory = GsonConverterFactory.create(gson); } public static GsonCustomConverterFactory create(Gson gson) { return new GsonCustomConverterFactory(gson); } @Override public Converter<ResponseBody, ?> responseBodyConverter(Type type, Annotation[] annotations, Retrofit retrofit) { if (type.equals(String.class)) { return new GsonResponseBodyConverterToString<>(gson, type); } return gsonConverterFactory.responseBodyConverter(type, annotations, retrofit); } @Override public Converter<?, RequestBody> requestBodyConverter(Type type, Annotation[] parameterAnnotations, Annotation[] methodAnnotations, Retrofit retrofit) { return gsonConverterFactory.requestBodyConverter(type, parameterAnnotations, methodAnnotations, retrofit); } }
dynatrace/src/main/java/de/tsystems/mms/apm/performancesignature/dynatracesaas/rest/ApiClient.java
/* * Copyright (c) 2014-2018 T-Systems Multimedia Solutions GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.tsystems.mms.apm.performancesignature.dynatracesaas.rest; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.JsonParseException; import de.tsystems.mms.apm.performancesignature.dynatracesaas.rest.auth.ApiKeyAuth; import okhttp3.OkHttpClient; import okhttp3.RequestBody; import okhttp3.ResponseBody; import okhttp3.logging.HttpLoggingInterceptor; import retrofit2.Call; import retrofit2.Converter; import retrofit2.Response; import retrofit2.Retrofit; import retrofit2.converter.gson.GsonConverterFactory; import retrofit2.converter.scalars.ScalarsConverterFactory; import javax.annotation.Nonnull; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLSocketFactory; import javax.net.ssl.TrustManager; import javax.net.ssl.X509TrustManager; import java.io.IOException; import java.lang.annotation.Annotation; import java.lang.reflect.Type; import java.net.Proxy; import java.security.GeneralSecurityException; import java.security.cert.X509Certificate; public class ApiClient { private static final String REST_DF = "yyyy-MM-dd'T'HH:mm:ss.SSSXXX"; private static final String API_SUFFIX = "api/v1/"; private boolean debugging = false; private boolean verifyingSsl; private final OkHttpClient.Builder okBuilder; private Retrofit.Builder adapterBuilder; private HttpLoggingInterceptor loggingInterceptor; public ApiClient() { verifyingSsl = true; okBuilder = new OkHttpClient.Builder(); String baseUrl = "https://localhost/" + API_SUFFIX; Gson gson = new GsonBuilder() .setDateFormat(ApiClient.REST_DF) .create(); adapterBuilder = new Retrofit .Builder() .baseUrl(baseUrl) .addConverterFactory(ScalarsConverterFactory.create()) .addConverterFactory(GsonCustomConverterFactory.create(gson)); } /** * Helper constructor for single api key * * @param apiKey API key */ public ApiClient(String apiKey) { this(); this.setApiKey(apiKey); } /** * Set base path * * @param basePath Base path of the URL (e.g https://localhost/api/v2 * @return An instance of OkHttpClient */ public ApiClient setBasePath(String basePath) { if (!basePath.endsWith("/")) { basePath += "/"; } adapterBuilder.baseUrl(basePath + API_SUFFIX); return this; } /** * Configure whether to verify certificate and hostname when making https requests. * Default to true. * NOTE: Do NOT set to false in production code, otherwise you would face multiple types of cryptographic attacks. * * @param verifyingSsl True to verify TLS/SSL connection * @return ApiClient */ public ApiClient setVerifyingSsl(boolean verifyingSsl) { this.verifyingSsl = verifyingSsl; applySslSettings(); return this; } /** * Apply SSL related settings to httpClient according to the current values of * verifyingSsl and sslCaCert. */ private void applySslSettings() { try { if (!verifyingSsl) { // Create a trust manager that does not validate certificate chains final TrustManager[] trustAllCerts = new TrustManager[]{ new X509TrustManager() { @Override public void checkClientTrusted(X509Certificate[] chain, String authType) { } @Override public void checkServerTrusted(X509Certificate[] chain, String authType) { } @Override public X509Certificate[] getAcceptedIssuers() { return new X509Certificate[]{}; } } }; // Install the all-trusting trust manager final SSLContext sslContext = SSLContext.getInstance("TLSv1.2"); sslContext.init(null, trustAllCerts, new java.security.SecureRandom()); // Create an ssl socket factory with our all-trusting manager final SSLSocketFactory sslSocketFactory = sslContext.getSocketFactory(); okBuilder.sslSocketFactory(sslSocketFactory, (X509TrustManager) trustAllCerts[0]); okBuilder.hostnameVerifier((hostname, session) -> true); } } catch (GeneralSecurityException e) { throw new RuntimeException(e); } } public ApiClient setProxy(Proxy proxy) { okBuilder.proxy(proxy); return this; } /** * Enable/disable debugging for this API client. * * @param debugging To enable (true) or disable (false) debugging * @return ApiClient */ public ApiClient setDebugging(boolean debugging) { if (debugging != this.debugging) { if (debugging) { loggingInterceptor = new HttpLoggingInterceptor(); loggingInterceptor.setLevel(HttpLoggingInterceptor.Level.BODY); okBuilder.addInterceptor(loggingInterceptor); } else { okBuilder.interceptors().remove(loggingInterceptor); loggingInterceptor = null; } } this.debugging = debugging; return this; } public <S> S createService(Class<S> serviceClass) { return adapterBuilder .client(okBuilder.build()) .build() .create(serviceClass); } /** * Helper method to configure the first api key found * * @param apiKey API key * @return ApiClient */ public ApiClient setApiKey(String apiKey) { ApiKeyAuth keyAuth = new ApiKeyAuth("header", "Authorization"); keyAuth.setApiKey(apiKey); okBuilder.addInterceptor(keyAuth); return this; } public Retrofit.Builder getAdapterBuilder() { return adapterBuilder; } public ApiClient setAdapterBuilder(Retrofit.Builder adapterBuilder) { this.adapterBuilder = adapterBuilder; return this; } public OkHttpClient.Builder getOkBuilder() { return okBuilder; } /** * Execute HTTP call and deserialize the HTTP response body into the given return type. * * @param call Call * @return ApiResponse object containing response status, headers and * data, which is a Java object deserialized from response body and would be null * when returnType is null. * @throws ApiException If fail to execute the call */ public <T> ApiResponse<T> execute(final Call<T> call) throws ApiException { try { Response<T> response = call.execute(); T data = handleResponse(response); return new ApiResponse<>(response.code(), response.headers().toMultimap(), data); } catch (IOException e) { throw new ApiException(e); } } /** * Handle the given response, return the deserialized object when the response is successful. * * @param <T> Type * @param response Response * @return Type * @throws ApiException If the response has a unsuccessful status code or * fail to deserialize the response body */ private <T> T handleResponse(final Response<T> response) throws ApiException { if (response.isSuccessful()) { return response.body(); } else { String respBody = null; if (response.errorBody() != null) { try { respBody = response.errorBody().string(); } catch (IOException e) { throw new ApiException(response.message(), e, response.code(), response.headers().toMultimap()); } } throw new ApiException(response.message(), response.code(), response.headers().toMultimap(), respBody); } } } /** * This wrapper is to take care of this case: * when the deserialization fails due to JsonParseException and the * expected type is String, then just return the body string. */ class GsonResponseBodyConverterToString<T> implements Converter<ResponseBody, T> { private final Gson gson; private final Type type; GsonResponseBodyConverterToString(Gson gson, Type type) { this.gson = gson; this.type = type; } @Override public T convert(@Nonnull ResponseBody value) throws IOException { String returned = value.string(); try { return gson.fromJson(returned, type); } catch (JsonParseException e) { return (T) returned; } } } class GsonCustomConverterFactory extends Converter.Factory { private final Gson gson; private final GsonConverterFactory gsonConverterFactory; private GsonCustomConverterFactory(Gson gson) { if (gson == null) { throw new NullPointerException("gson == null"); } this.gson = gson; this.gsonConverterFactory = GsonConverterFactory.create(gson); } public static GsonCustomConverterFactory create(Gson gson) { return new GsonCustomConverterFactory(gson); } @Override public Converter<ResponseBody, ?> responseBodyConverter(Type type, Annotation[] annotations, Retrofit retrofit) { if (type.equals(String.class)) { return new GsonResponseBodyConverterToString<>(gson, type); } return gsonConverterFactory.responseBodyConverter(type, annotations, retrofit); } @Override public Converter<?, RequestBody> requestBodyConverter(Type type, Annotation[] parameterAnnotations, Annotation[] methodAnnotations, Retrofit retrofit) { return gsonConverterFactory.requestBodyConverter(type, parameterAnnotations, methodAnnotations, retrofit); } }
increase connect & read timeout
dynatrace/src/main/java/de/tsystems/mms/apm/performancesignature/dynatracesaas/rest/ApiClient.java
increase connect & read timeout
Java
apache-2.0
7275ae7dd8853cde299bfedf6e263520d6854e31
0
spinnaker/halyard,spinnaker/halyard,spinnaker/halyard
/* * Copyright 2017 Google, Inc. * * Licensed under the Apache License, Version 2.0 (the "License") * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.spinnaker.halyard.deploy.services.v1; import com.amazonaws.util.IOUtils; import com.netflix.spinnaker.halyard.config.config.v1.RelaxedObjectMapper; import com.netflix.spinnaker.halyard.config.model.v1.node.DeploymentConfiguration; import com.netflix.spinnaker.halyard.config.problem.v1.ConfigProblemBuilder; import com.netflix.spinnaker.halyard.config.services.v1.DeploymentService; import com.netflix.spinnaker.halyard.config.services.v1.VersionsService; import com.netflix.spinnaker.halyard.core.error.v1.HalException; import com.netflix.spinnaker.halyard.core.registry.v1.BillOfMaterials; import com.netflix.spinnaker.halyard.core.registry.v1.Versions; import com.netflix.spinnaker.halyard.core.registry.v1.Versions.Version; import com.netflix.spinnaker.halyard.core.registry.v1.WriteableProfileRegistry; import com.netflix.spinnaker.halyard.deploy.spinnaker.v1.SpinnakerArtifact; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import org.yaml.snakeyaml.Yaml; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.nio.file.Paths; import java.util.Map; import java.util.stream.Collectors; import static com.netflix.spinnaker.halyard.core.problem.v1.Problem.Severity.FATAL; @Component public class ArtifactService { @Autowired(required = false) WriteableProfileRegistry writeableProfileRegistry; @Autowired Yaml yamlParser; @Autowired RelaxedObjectMapper relaxedObjectMapper; @Autowired DeploymentService deploymentService; @Autowired VersionsService versionsService; BillOfMaterials getBillOfMaterials(String deploymentName) { DeploymentConfiguration deploymentConfiguration = deploymentService.getDeploymentConfiguration(deploymentName); String version = deploymentConfiguration.getVersion(); return versionsService.getBillOfMaterials(version); } public String getArtifactVersion(String deploymentName, SpinnakerArtifact artifact) { return getBillOfMaterials(deploymentName).getArtifactVersion(artifact.getName()); } private void deleteVersion(Versions versionsCollection, String version) { versionsCollection.setVersions(versionsCollection.getVersions() .stream() .filter(other -> !other.getVersion().equals(version)) .collect(Collectors.toList())); } public void deprecateVersion(Version version) { if (writeableProfileRegistry == null) { throw new HalException(new ConfigProblemBuilder(FATAL, "You need to set the \"spinnaker.config.input.writerEnabled\" property to \"true\" to modify your halconfig bucket contents.").build()); } Versions versionsCollection = versionsService.getVersions(); deleteVersion(versionsCollection, version.getVersion()); writeableProfileRegistry.writeVersions(yamlParser.dump(relaxedObjectMapper.convertValue(versionsCollection, Map.class))); } public void publishVersion(Version version) { if (writeableProfileRegistry == null) { throw new HalException(new ConfigProblemBuilder(FATAL, "You need to set the \"spinnaker.config.input.writerEnabled\" property to \"true\" to modify your halconfig bucket contents.").build()); } Versions versionsCollection = versionsService.getVersions(); deleteVersion(versionsCollection, version.getVersion()); versionsCollection.getVersions().add(version); writeableProfileRegistry.writeVersions(yamlParser.dump(relaxedObjectMapper.convertValue(versionsCollection, Map.class))); } public void publishLatestSpinnaker(String latestSpinnaker) { if (writeableProfileRegistry == null) { throw new HalException(new ConfigProblemBuilder(FATAL, "You need to set the \"spinnaker.config.input.writerEnabled\" property to \"true\" to modify BOM contents.").build()); } Versions versionsCollection = versionsService.getVersions(); boolean hasLatest = versionsCollection.getVersions().stream().anyMatch(v -> v.getVersion().equals(latestSpinnaker)); if (!hasLatest) { throw new HalException(FATAL, "Version " + latestSpinnaker + " does not exist in the list of published versions"); } versionsCollection.setLatest(latestSpinnaker); versionsCollection.setLatestSpinnaker(latestSpinnaker); writeableProfileRegistry.writeVersions(yamlParser.dump(relaxedObjectMapper.convertValue(versionsCollection, Map.class))); } public void publishLatestHalyard(String latestHalyard) { if (writeableProfileRegistry == null) { throw new HalException(new ConfigProblemBuilder(FATAL, "You need to set the \"spinnaker.config.input.writerEnabled\" property to \"true\" to modify BOM contents.").build()); } Versions versionsCollection = versionsService.getVersions(); versionsCollection.setLatestHalyard(latestHalyard); writeableProfileRegistry.writeVersions(yamlParser.dump(relaxedObjectMapper.convertValue(versionsCollection, Map.class))); } public void writeBom(String bomPath) { if (writeableProfileRegistry == null) { throw new HalException(new ConfigProblemBuilder(FATAL, "You need to set the \"spinnaker.config.input.writerEnabled\" property to \"true\" to modify BOM contents.").build()); } BillOfMaterials bom; String bomContents; String version; try { bomContents = IOUtils.toString(new FileInputStream(bomPath)); bom = relaxedObjectMapper.convertValue( yamlParser.load(bomContents), BillOfMaterials.class); version = bom.getVersion(); } catch (IOException e) { throw new HalException(new ConfigProblemBuilder(FATAL, "Unable to load Bill of Materials: " + e.getMessage()).build() ); } if (version == null) { throw new HalException(new ConfigProblemBuilder(FATAL, "No version was supplied in this BOM.").build()); } writeableProfileRegistry.writeBom(bom.getVersion(), bomContents); } public void writeArtifactConfig(String bomPath, String artifactName, String profilePath) { if (writeableProfileRegistry == null) { throw new HalException(new ConfigProblemBuilder(FATAL, "You need to set the \"spinnaker.config.input.writerEnabled\" property to \"true\" to modify base-profiles.").build()); } BillOfMaterials bom; File profileFile = Paths.get(profilePath).toFile(); String profileContents; try { bom = relaxedObjectMapper.convertValue( yamlParser.load(IOUtils.toString(new FileInputStream(bomPath))), BillOfMaterials.class); } catch (IOException e) { throw new HalException(new ConfigProblemBuilder(FATAL, "Unable to load Bill of Materials: " + e.getMessage()).build() ); } try { profileContents = IOUtils.toString(new FileInputStream(profileFile)); } catch (IOException e) { throw new HalException(new ConfigProblemBuilder(FATAL, "Unable to load profile : " + e.getMessage()).build() ); } writeableProfileRegistry.writeArtifactConfig(bom, artifactName, profileFile.getName(), profileContents); } }
halyard-deploy/src/main/java/com/netflix/spinnaker/halyard/deploy/services/v1/ArtifactService.java
/* * Copyright 2017 Google, Inc. * * Licensed under the Apache License, Version 2.0 (the "License") * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.spinnaker.halyard.deploy.services.v1; import com.amazonaws.util.IOUtils; import com.netflix.spinnaker.halyard.config.config.v1.RelaxedObjectMapper; import com.netflix.spinnaker.halyard.config.config.v1.StrictObjectMapper; import com.netflix.spinnaker.halyard.config.model.v1.node.DeploymentConfiguration; import com.netflix.spinnaker.halyard.config.problem.v1.ConfigProblemBuilder; import com.netflix.spinnaker.halyard.config.services.v1.DeploymentService; import com.netflix.spinnaker.halyard.config.services.v1.VersionsService; import com.netflix.spinnaker.halyard.core.error.v1.HalException; import com.netflix.spinnaker.halyard.core.registry.v1.BillOfMaterials; import com.netflix.spinnaker.halyard.core.registry.v1.Versions; import com.netflix.spinnaker.halyard.core.registry.v1.Versions.Version; import com.netflix.spinnaker.halyard.core.registry.v1.WriteableProfileRegistry; import com.netflix.spinnaker.halyard.deploy.spinnaker.v1.SpinnakerArtifact; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import org.yaml.snakeyaml.Yaml; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.nio.file.Paths; import java.util.Map; import java.util.stream.Collectors; import static com.netflix.spinnaker.halyard.core.problem.v1.Problem.Severity.FATAL; @Component public class ArtifactService { @Autowired(required = false) WriteableProfileRegistry writeableProfileRegistry; @Autowired Yaml yamlParser; @Autowired RelaxedObjectMapper relaxedObjectMapper; @Autowired DeploymentService deploymentService; @Autowired VersionsService versionsService; BillOfMaterials getBillOfMaterials(String deploymentName) { DeploymentConfiguration deploymentConfiguration = deploymentService.getDeploymentConfiguration(deploymentName); String version = deploymentConfiguration.getVersion(); return versionsService.getBillOfMaterials(version); } public String getArtifactVersion(String deploymentName, SpinnakerArtifact artifact) { return getBillOfMaterials(deploymentName).getArtifactVersion(artifact.getName()); } private void deleteVersion(Versions versionsCollection, String version) { versionsCollection.setVersions(versionsCollection.getVersions() .stream() .filter(other -> !other.getVersion().equals(version)) .collect(Collectors.toList())); } public void deprecateVersion(Version version) { if (writeableProfileRegistry == null) { throw new HalException(new ConfigProblemBuilder(FATAL, "You need to set the \"spinnaker.config.input.writerEnabled\" property to \"true\" to modify your halconfig bucket contents.").build()); } Versions versionsCollection = versionsService.getVersions(); deleteVersion(versionsCollection, version.getVersion()); writeableProfileRegistry.writeVersions(yamlParser.dump(relaxedObjectMapper.convertValue(versionsCollection, Map.class))); } public void publishVersion(Version version) { if (writeableProfileRegistry == null) { throw new HalException(new ConfigProblemBuilder(FATAL, "You need to set the \"spinnaker.config.input.writerEnabled\" property to \"true\" to modify your halconfig bucket contents.").build()); } Versions versionsCollection = versionsService.getVersions(); deleteVersion(versionsCollection, version.getVersion()); versionsCollection.getVersions().add(version); writeableProfileRegistry.writeVersions(yamlParser.dump(relaxedObjectMapper.convertValue(versionsCollection, Map.class))); } public void publishLatestSpinnaker(String latestSpinnaker) { if (writeableProfileRegistry == null) { throw new HalException(new ConfigProblemBuilder(FATAL, "You need to set the \"spinnaker.config.input.writerEnabled\" property to \"true\" to modify BOM contents.").build()); } Versions versionsCollection = versionsService.getVersions(); boolean hasLatest = versionsCollection.getVersions().stream().anyMatch(v -> v.getVersion().equals(latestSpinnaker)); if (!hasLatest) { throw new HalException(FATAL, "Version " + latestSpinnaker + " does not exist in the list of published versions"); } versionsCollection.setLatest(latestSpinnaker); versionsCollection.setLatestSpinnaker(latestSpinnaker); writeableProfileRegistry.writeVersions(yamlParser.dump(strictObjectMapper.convertValue(versionsCollection, Map.class))); } public void publishLatestHalyard(String latestHalyard) { if (writeableProfileRegistry == null) { throw new HalException(new ConfigProblemBuilder(FATAL, "You need to set the \"spinnaker.config.input.writerEnabled\" property to \"true\" to modify BOM contents.").build()); } Versions versionsCollection = versionsService.getVersions(); versionsCollection.setLatestHalyard(latestHalyard); writeableProfileRegistry.writeVersions(yamlParser.dump(relaxedObjectMapper.convertValue(versionsCollection, Map.class))); } public void writeBom(String bomPath) { if (writeableProfileRegistry == null) { throw new HalException(new ConfigProblemBuilder(FATAL, "You need to set the \"spinnaker.config.input.writerEnabled\" property to \"true\" to modify BOM contents.").build()); } BillOfMaterials bom; String bomContents; String version; try { bomContents = IOUtils.toString(new FileInputStream(bomPath)); bom = relaxedObjectMapper.convertValue( yamlParser.load(bomContents), BillOfMaterials.class); version = bom.getVersion(); } catch (IOException e) { throw new HalException(new ConfigProblemBuilder(FATAL, "Unable to load Bill of Materials: " + e.getMessage()).build() ); } if (version == null) { throw new HalException(new ConfigProblemBuilder(FATAL, "No version was supplied in this BOM.").build()); } writeableProfileRegistry.writeBom(bom.getVersion(), bomContents); } public void writeArtifactConfig(String bomPath, String artifactName, String profilePath) { if (writeableProfileRegistry == null) { throw new HalException(new ConfigProblemBuilder(FATAL, "You need to set the \"spinnaker.config.input.writerEnabled\" property to \"true\" to modify base-profiles.").build()); } BillOfMaterials bom; File profileFile = Paths.get(profilePath).toFile(); String profileContents; try { bom = relaxedObjectMapper.convertValue( yamlParser.load(IOUtils.toString(new FileInputStream(bomPath))), BillOfMaterials.class); } catch (IOException e) { throw new HalException(new ConfigProblemBuilder(FATAL, "Unable to load Bill of Materials: " + e.getMessage()).build() ); } try { profileContents = IOUtils.toString(new FileInputStream(profileFile)); } catch (IOException e) { throw new HalException(new ConfigProblemBuilder(FATAL, "Unable to load profile : " + e.getMessage()).build() ); } writeableProfileRegistry.writeArtifactConfig(bom, artifactName, profileFile.getName(), profileContents); } }
fix(build): To few rebases spoil the build (#474)
halyard-deploy/src/main/java/com/netflix/spinnaker/halyard/deploy/services/v1/ArtifactService.java
fix(build): To few rebases spoil the build (#474)
Java
apache-2.0
0511d6c23fcd566edec123a049c6371dea6f9474
0
martint/airlift,zhenyuy-fb/airlift,cberner/airlift,erichwang/airlift,johngmyers/airlift,johngmyers/platform-rack,martint/airlift,johngmyers/airlift,cberner/airlift,proofpoint/platform,johngmyers/platform,daququ/airlift,dain/airlift,airlift/airlift,mono-plane/airlift,johngmyers/platform-rack,johngmyers/airlift,mono-plane/airlift,dain/airlift,erichwang/airlift,haozhun/airlift,johngmyers/platform-rack,erichwang/airlift,zhenyuy-fb/airlift,johngmyers/platform-rack,johngmyers/platform,zhenyuy-fb/airlift,gwittel/platform,martint/airlift,dain/airlift,johngmyers/platform-rack,zhenyuy-fb/airlift,gwittel/platform,cberner/airlift,electrum/airlift,daququ/airlift,proofpoint/platform,cberner/airlift,airlift/airlift-rack,haozhun/airlift,johngmyers/platform,mono-plane/airlift,proofpoint/platform,daququ/airlift,electrum/airlift,gwittel/platform,haozhun/airlift,daququ/airlift,electrum/airlift,airlift/airlift,johngmyers/airlift,johngmyers/platform-rack,airlift/airlift,haozhun/airlift,airlift/airlift,electrum/airlift,dain/airlift,airlift/airlift-rack,mono-plane/airlift
package com.proofpoint.event.client; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import org.codehaus.jackson.JsonGenerator; import java.io.IOException; import java.lang.reflect.Method; import java.util.ArrayDeque; import java.util.Comparator; import java.util.Deque; import java.util.List; import static com.google.common.base.Objects.firstNonNull; import static com.proofpoint.event.client.EventDataType.validateFieldValueType; class EventFieldMetadata { public static final Comparator<EventFieldMetadata> NAME_COMPARATOR = new Comparator<EventFieldMetadata>() { public int compare(EventFieldMetadata a, EventFieldMetadata b) { return a.name.compareTo(b.name); } }; private final String name; private final String v1Name; private final Method method; private final EventDataType eventDataType; private final EventTypeMetadata<?> nestedType; private final boolean iterable; EventFieldMetadata(String name, String v1Name, Method method, EventDataType eventDataType, EventTypeMetadata<?> nestedType, boolean iterable) { Preconditions.checkArgument((eventDataType != null) || (nestedType != null), "both eventDataType and nestedType are null"); Preconditions.checkArgument((eventDataType == null) || (nestedType == null), "both eventDataType and nestedType are set"); this.name = name; this.v1Name = v1Name; this.method = method; this.eventDataType = eventDataType; this.nestedType = nestedType; this.iterable = iterable; } @SuppressWarnings("ThrowableResultOfMethodCallIgnored") // IDEA-74322 private Object getValue(Object event) throws InvalidEventException { try { return method.invoke(event); } catch (Exception e) { throw new InvalidEventException(firstNonNull(e.getCause(), e), "Unable to get value of event field %s: Exception occurred while invoking [%s]", name, method.toGenericString()); } } public void writeField(JsonGenerator jsonGenerator, Object event) throws IOException { writeField(jsonGenerator, event, new ArrayDeque<Object>()); } private void writeField(JsonGenerator jsonGenerator, Object event, Deque<Object> objectStack) throws IOException { Object value = getValue(event); if (value != null) { jsonGenerator.writeFieldName(name); if (iterable) { validateFieldValueType(value, Iterable.class); jsonGenerator.writeStartArray(); for (Object item : (Iterable<?>) value) { writeFieldValue(jsonGenerator, item, objectStack); } jsonGenerator.writeEndArray(); } else { writeFieldValue(jsonGenerator, value, objectStack); } } } private void writeFieldValue(JsonGenerator jsonGenerator, Object value, Deque<Object> objectStack) throws IOException { if (eventDataType != null) { eventDataType.writeFieldValue(jsonGenerator, value); } else { validateFieldValueType(value, nestedType.getEventClass()); for (Object o : objectStack) { if (value == o) { List<Object> path = Lists.reverse(Lists.newArrayList(objectStack)); throw new InvalidEventException("Cycle detected in event data: %s", path); } } objectStack.push(value); jsonGenerator.writeStartObject(); for (EventFieldMetadata field : nestedType.getFields()) { field.writeField(jsonGenerator, value, objectStack); } jsonGenerator.writeEndObject(); objectStack.pop(); } } public void writeFieldV1(JsonGenerator jsonGenerator, Object event) throws IOException { Preconditions.checkState(!iterable, "iterable fields not supported for JSON V1"); Preconditions.checkState(nestedType == null, "nested types not supported for JSON V1"); Object value = getValue(event); if (value != null) { jsonGenerator.writeStringField("name", v1Name); jsonGenerator.writeFieldName("value"); eventDataType.writeFieldValue(jsonGenerator, value); } } }
event/src/main/java/com/proofpoint/event/client/EventFieldMetadata.java
package com.proofpoint.event.client; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import org.codehaus.jackson.JsonGenerator; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.ArrayDeque; import java.util.Comparator; import java.util.Deque; import java.util.List; import static com.proofpoint.event.client.EventDataType.validateFieldValueType; class EventFieldMetadata { public static final Comparator<EventFieldMetadata> NAME_COMPARATOR = new Comparator<EventFieldMetadata>() { public int compare(EventFieldMetadata a, EventFieldMetadata b) { return a.name.compareTo(b.name); } }; private final String name; private final String v1Name; private final Method method; private final EventDataType eventDataType; private final EventTypeMetadata<?> nestedType; private final boolean iterable; EventFieldMetadata(String name, String v1Name, Method method, EventDataType eventDataType, EventTypeMetadata<?> nestedType, boolean iterable) { Preconditions.checkArgument((eventDataType != null) || (nestedType != null), "both eventDataType and nestedType are null"); Preconditions.checkArgument((eventDataType == null) || (nestedType == null), "both eventDataType and nestedType are set"); this.name = name; this.v1Name = v1Name; this.method = method; this.eventDataType = eventDataType; this.nestedType = nestedType; this.iterable = iterable; } private Object getValue(Object event) throws InvalidEventException { try { return method.invoke(event); } catch (IllegalAccessException e) { throw new InvalidEventException(e, "Unexpected exception reading event field %s", name); } catch (InvocationTargetException e) { Throwable cause = e.getCause(); if (cause == null) { cause = e; } throw new InvalidEventException(cause, "Unable to get value of event field %s: Exception occurred while invoking [%s]", name, method.toGenericString()); } } public void writeField(JsonGenerator jsonGenerator, Object event) throws IOException { writeField(jsonGenerator, event, new ArrayDeque<Object>()); } private void writeField(JsonGenerator jsonGenerator, Object event, Deque<Object> objectStack) throws IOException { Object value = getValue(event); if (value != null) { jsonGenerator.writeFieldName(name); if (iterable) { validateFieldValueType(value, Iterable.class); jsonGenerator.writeStartArray(); for (Object item : (Iterable<?>) value) { writeFieldValue(jsonGenerator, item, objectStack); } jsonGenerator.writeEndArray(); } else { writeFieldValue(jsonGenerator, value, objectStack); } } } private void writeFieldValue(JsonGenerator jsonGenerator, Object value, Deque<Object> objectStack) throws IOException { if (eventDataType != null) { eventDataType.writeFieldValue(jsonGenerator, value); } else { validateFieldValueType(value, nestedType.getEventClass()); for (Object o : objectStack) { if (value == o) { List<Object> path = Lists.reverse(Lists.newArrayList(objectStack)); throw new InvalidEventException("Cycle detected in event data: %s", path); } } objectStack.push(value); jsonGenerator.writeStartObject(); for (EventFieldMetadata field : nestedType.getFields()) { field.writeField(jsonGenerator, value, objectStack); } jsonGenerator.writeEndObject(); objectStack.pop(); } } public void writeFieldV1(JsonGenerator jsonGenerator, Object event) throws IOException { Preconditions.checkState(!iterable, "iterable fields not supported for JSON V1"); Preconditions.checkState(nestedType == null, "nested types not supported for JSON V1"); Object value = getValue(event); if (value != null) { jsonGenerator.writeStringField("name", v1Name); jsonGenerator.writeFieldName("value"); eventDataType.writeFieldValue(jsonGenerator, value); } } }
Simplify exception handling for invoke
event/src/main/java/com/proofpoint/event/client/EventFieldMetadata.java
Simplify exception handling for invoke
Java
apache-2.0
de375002f4359a83c2fc395ef3ab13e95e12c16a
0
think-gem/FoxBPM,think-gem/FoxBPM,fashionsun/FoxBPM,FoxBPM/FoxBPM,think-gem/FoxBPM,FoxBPM/FoxBPM,fashionsun/FoxBPM,fashionsun/FoxBPM,fashionsun/FoxBPM,FoxBPM/FoxBPM
/** * Copyright 1996-2014 FoxBPM Co.,Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * @author kenshin * @author ych */ package org.foxbpm.engine.impl.bpmn.parser.model; import org.eclipse.bpmn2.Activity; import org.eclipse.bpmn2.BaseElement; import org.eclipse.bpmn2.LoopCharacteristics; import org.foxbpm.engine.impl.bpmn.behavior.ActivityBehavior; import org.foxbpm.engine.impl.bpmn.behavior.BaseElementBehavior; import org.foxbpm.engine.impl.bpmn.parser.BpmnBehaviorEMFConverter; import org.foxbpm.engine.impl.util.BpmnModelUtil; public class ActivityParser extends FlowNodeParser { @Override public BaseElementBehavior parser(BaseElement baseElement) { Activity activity = (Activity) baseElement; ActivityBehavior activityBehavior = (ActivityBehavior) baseElementBehavior; LoopCharacteristics loopCharacteristics = activity.getLoopCharacteristics(); if(loopCharacteristics!=null){ org.foxbpm.engine.impl.bpmn.behavior.LoopCharacteristics loopCharacteristicsbehavior = (org.foxbpm.engine.impl.bpmn.behavior.LoopCharacteristics) BpmnBehaviorEMFConverter .getBaseElementBehavior(loopCharacteristics, null); activityBehavior.setLoopCharacteristics(loopCharacteristicsbehavior); } activityBehavior.setSkipStrategy(BpmnModelUtil.getSkipStrategy(activity)); return super.parser(baseElement); } @Override public void init() { baseElementBehavior = new ActivityBehavior(); } }
modules/foxbpm-engine/src/main/java/org/foxbpm/engine/impl/bpmn/parser/model/ActivityParser.java
/** * Copyright 1996-2014 FoxBPM Co.,Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * @author kenshin * @author ych */ package org.foxbpm.engine.impl.bpmn.parser.model; import org.eclipse.bpmn2.Activity; import org.eclipse.bpmn2.BaseElement; import org.eclipse.bpmn2.LoopCharacteristics; import org.foxbpm.engine.impl.bpmn.behavior.ActivityBehavior; import org.foxbpm.engine.impl.bpmn.behavior.BaseElementBehavior; import org.foxbpm.engine.impl.bpmn.parser.BpmnBehaviorEMFConverter; import org.foxbpm.engine.impl.util.BpmnModelUtil; public class ActivityParser extends FlowNodeParser { @Override public BaseElementBehavior parser(BaseElement baseElement) { Activity activity = (Activity) baseElement; ActivityBehavior activityBehavior = (ActivityBehavior) baseElementBehavior; LoopCharacteristics loopCharacteristics = activity.getLoopCharacteristics(); org.foxbpm.engine.impl.bpmn.behavior.LoopCharacteristics loopCharacteristicsbehavior = (org.foxbpm.engine.impl.bpmn.behavior.LoopCharacteristics) BpmnBehaviorEMFConverter .getBaseElementBehavior(loopCharacteristics, null); activityBehavior.setLoopCharacteristics(loopCharacteristicsbehavior); activityBehavior.setSkipStrategy(BpmnModelUtil.getSkipStrategy(activity)); return super.parser(baseElement); } @Override public void init() { baseElementBehavior = new ActivityBehavior(); } }
修复了activity模型转换的bug
modules/foxbpm-engine/src/main/java/org/foxbpm/engine/impl/bpmn/parser/model/ActivityParser.java
修复了activity模型转换的bug
Java
apache-2.0
b76895bc6c539e815e88084440d72b1b481121e0
0
PetrGasparik/midpoint,arnost-starosta/midpoint,arnost-starosta/midpoint,PetrGasparik/midpoint,arnost-starosta/midpoint,Pardus-Engerek/engerek,Pardus-Engerek/engerek,Pardus-Engerek/engerek,arnost-starosta/midpoint,arnost-starosta/midpoint,PetrGasparik/midpoint,PetrGasparik/midpoint,Pardus-Engerek/engerek
/* * Copyright (c) 2010-2016 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.evolveum.midpoint.model.impl.sync; import static com.evolveum.midpoint.schema.internals.InternalsConfig.consistencyChecks; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.xml.namespace.QName; import com.evolveum.midpoint.model.impl.expr.ModelExpressionThreadLocalHolder; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.Validate; import org.jetbrains.annotations.NotNull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.stereotype.Service; import com.evolveum.midpoint.common.SynchronizationUtils; import com.evolveum.midpoint.model.api.ModelExecuteOptions; import com.evolveum.midpoint.model.api.PolicyViolationException; import com.evolveum.midpoint.model.common.SystemObjectCache; import com.evolveum.midpoint.model.common.expression.ExpressionFactory; import com.evolveum.midpoint.model.common.expression.ExpressionUtil; import com.evolveum.midpoint.model.common.expression.ExpressionVariables; import com.evolveum.midpoint.model.impl.lens.Clockwork; import com.evolveum.midpoint.model.impl.lens.ContextFactory; import com.evolveum.midpoint.model.impl.lens.LensContext; import com.evolveum.midpoint.model.impl.lens.LensFocusContext; import com.evolveum.midpoint.model.impl.lens.LensProjectionContext; import com.evolveum.midpoint.model.impl.util.Utils; import com.evolveum.midpoint.prism.PrismObject; import com.evolveum.midpoint.prism.PrismProperty; import com.evolveum.midpoint.prism.PrismPropertyValue; import com.evolveum.midpoint.prism.delta.ChangeType; import com.evolveum.midpoint.prism.delta.ItemDelta; import com.evolveum.midpoint.prism.delta.ObjectDelta; import com.evolveum.midpoint.prism.delta.PropertyDelta; import com.evolveum.midpoint.prism.polystring.PolyString; import com.evolveum.midpoint.provisioning.api.ResourceObjectShadowChangeDescription; import com.evolveum.midpoint.repo.api.RepositoryService; import com.evolveum.midpoint.schema.GetOperationOptions; import com.evolveum.midpoint.schema.ResourceShadowDiscriminator; import com.evolveum.midpoint.schema.SelectorOptions; import com.evolveum.midpoint.schema.constants.ObjectTypes; import com.evolveum.midpoint.schema.constants.SchemaConstants; import com.evolveum.midpoint.schema.internals.InternalsConfig; import com.evolveum.midpoint.schema.result.OperationResult; import com.evolveum.midpoint.schema.result.OperationResultStatus; import com.evolveum.midpoint.schema.statistics.StatisticsUtil; import com.evolveum.midpoint.schema.statistics.SynchronizationInformation; import com.evolveum.midpoint.schema.util.ShadowUtil; import com.evolveum.midpoint.task.api.Task; import com.evolveum.midpoint.util.QNameUtil; import com.evolveum.midpoint.util.exception.CommunicationException; import com.evolveum.midpoint.util.exception.ConfigurationException; import com.evolveum.midpoint.util.exception.ExpressionEvaluationException; import com.evolveum.midpoint.util.exception.ObjectAlreadyExistsException; import com.evolveum.midpoint.util.exception.ObjectNotFoundException; import com.evolveum.midpoint.util.exception.SchemaException; import com.evolveum.midpoint.util.exception.SecurityViolationException; import com.evolveum.midpoint.util.exception.SystemException; import com.evolveum.midpoint.util.logging.LoggingUtils; import com.evolveum.midpoint.util.logging.Trace; import com.evolveum.midpoint.util.logging.TraceManager; import com.evolveum.midpoint.xml.ns._public.common.common_3.BeforeAfterType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ExpressionType; import com.evolveum.midpoint.xml.ns._public.common.common_3.FocusType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectReferenceType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectSynchronizationType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectTemplateType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ResourceType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowKindType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowType; import com.evolveum.midpoint.xml.ns._public.common.common_3.SynchronizationActionType; import com.evolveum.midpoint.xml.ns._public.common.common_3.SynchronizationReactionType; import com.evolveum.midpoint.xml.ns._public.common.common_3.SynchronizationSituationType; import com.evolveum.midpoint.xml.ns._public.common.common_3.SynchronizationType; import com.evolveum.midpoint.xml.ns._public.common.common_3.SystemConfigurationType; import com.evolveum.midpoint.xml.ns._public.common.common_3.UserType; /** * Synchronization service receives change notifications from provisioning. It * decides which synchronization policy to use and evaluates it (correlation, * confirmation, situations, reaction, ...) * * @author lazyman * @author Radovan Semancik * * Note: don't autowire this bean by implementing class, as it is * proxied by Spring AOP. Use the interface instead. */ @Service(value = "synchronizationService") public class SynchronizationServiceImpl implements SynchronizationService { private static final Trace LOGGER = TraceManager.getTrace(SynchronizationServiceImpl.class); @Autowired(required = true) private ActionManager<Action> actionManager; @Autowired private CorrelationConfirmationEvaluator correlationConfirmationEvaluator; @Autowired(required = true) @Qualifier("cacheRepositoryService") private RepositoryService repositoryService; @Autowired(required = true) private ContextFactory contextFactory; @Autowired(required = true) private Clockwork clockwork; @Autowired(required = true) private ExpressionFactory expressionFactory; @Autowired(required = true) private SystemObjectCache systemObjectCache; @Override public void notifyChange(ResourceObjectShadowChangeDescription change, Task task, OperationResult parentResult) { validate(change); Validate.notNull(parentResult, "Parent operation result must not be null."); boolean logDebug = isLogDebug(change); if (logDebug) { LOGGER.debug("SYNCHRONIZATION: received change notification {}", change); } else { LOGGER.trace("SYNCHRONIZATION: received change notification {}", change); } OperationResult subResult = parentResult.createSubresult(NOTIFY_CHANGE); PrismObject<? extends ShadowType> currentShadow = change.getCurrentShadow(); PrismObject<? extends ShadowType> applicableShadow = currentShadow; if (applicableShadow == null) { // We need this e.g. in case of delete applicableShadow = change.getOldShadow(); } SynchronizationEventInformation eventInfo = new SynchronizationEventInformation(applicableShadow, change.getSourceChannel(), task); try { ResourceType resourceType = change.getResource().asObjectable(); PrismObject<SystemConfigurationType> configuration = systemObjectCache.getSystemConfiguration(subResult); ObjectSynchronizationType synchronizationPolicy = determineSynchronizationPolicy(resourceType, applicableShadow, configuration, task, subResult); if (LOGGER.isTraceEnabled()) { String policyDesc = null; if (synchronizationPolicy != null) { if (synchronizationPolicy.getName() == null) { policyDesc = "(kind=" + synchronizationPolicy.getKind() + ", intent=" + synchronizationPolicy.getIntent() + ", objectclass=" + synchronizationPolicy.getObjectClass() + ")"; } else { policyDesc = synchronizationPolicy.getName(); } } LOGGER.trace("SYNCHRONIZATION determined policy: {}", policyDesc); } if (synchronizationPolicy == null) { String message = "SYNCHRONIZATION no matching policy for " + applicableShadow + " (" + applicableShadow.asObjectable().getObjectClass() + ") " + " on " + resourceType + ", ignoring change from channel " + change.getSourceChannel(); LOGGER.debug(message); subResult.recordStatus(OperationResultStatus.NOT_APPLICABLE, message); eventInfo.setNoSynchronizationPolicy(); eventInfo.record(task); return; } if (!isSynchronizationEnabled(synchronizationPolicy)) { String message = "SYNCHRONIZATION is not enabled for " + resourceType + " ignoring change from channel " + change.getSourceChannel(); LOGGER.debug(message); subResult.recordStatus(OperationResultStatus.NOT_APPLICABLE, message); eventInfo.setSynchronizationNotEnabled(); eventInfo.record(task); return; } // check if the kind/intent in the syncPolicy satisfy constraints // defined in task if (!satisfyTaskConstraints(synchronizationPolicy, task)) { LOGGER.trace( "SYNCHRONIZATION skipping {} because it does not match kind/intent defined in task", new Object[] { applicableShadow }); subResult.recordStatus(OperationResultStatus.NOT_APPLICABLE, "Skipped because it does not match objectClass/kind/intent"); eventInfo.setDoesNotMatchTaskSpecification(); eventInfo.record(task); return; } if (isProtected((PrismObject<ShadowType>) currentShadow)) { if (StringUtils.isNotBlank(synchronizationPolicy.getIntent())) { List<PropertyDelta<?>> modifications = SynchronizationUtils .createSynchronizationTimestampsDelta(currentShadow); PropertyDelta<String> intentDelta = PropertyDelta.createModificationReplaceProperty( ShadowType.F_INTENT, currentShadow.getDefinition(), synchronizationPolicy.getIntent()); modifications.add(intentDelta); try { repositoryService.modifyObject(ShadowType.class, currentShadow.getOid(), modifications, subResult); task.recordObjectActionExecuted(currentShadow, ChangeType.MODIFY, null); } catch (Throwable t) { task.recordObjectActionExecuted(currentShadow, ChangeType.MODIFY, t); } finally { task.markObjectActionExecutedBoundary(); } } subResult.recordSuccess(); eventInfo.record(task); LOGGER.debug("SYNCHRONIZATION: DONE (dry run) for protected shadow {}", currentShadow); return; } Class<? extends FocusType> focusType = determineFocusClass(synchronizationPolicy, resourceType); if (LOGGER.isTraceEnabled()) { LOGGER.trace("Synchronization is enabled, focus class: {}, found applicable policy: {}", focusType, Utils.getPolicyDesc(synchronizationPolicy)); } SynchronizationSituation situation = determineSituation(focusType, change, synchronizationPolicy, configuration.asObjectable(), task, subResult); if (logDebug) { LOGGER.debug("SYNCHRONIZATION: SITUATION: '{}', currentOwner={}, correlatedOwner={}", situation.getSituation().value(), situation.getCurrentOwner(), situation.getCorrelatedOwner()); } else { LOGGER.trace("SYNCHRONIZATION: SITUATION: '{}', currentOwner={}, correlatedOwner={}", situation.getSituation().value(), situation.getCurrentOwner(), situation.getCorrelatedOwner()); } eventInfo.setOriginalSituation(situation.getSituation()); eventInfo.setNewSituation(situation.getSituation()); // overwritten // later // (TODO fix // this!) if (change.isUnrelatedChange() || Utils.isDryRun(task)) { PrismObject object = null; if (change.getCurrentShadow() != null) { object = change.getCurrentShadow(); } else if (change.getOldShadow() != null) { object = change.getOldShadow(); } Collection modifications = SynchronizationUtils .createSynchronizationSituationAndDescriptionDelta(object, situation.getSituation(), task.getChannel(), false); if (StringUtils.isNotBlank(synchronizationPolicy.getIntent())) { modifications.add(PropertyDelta.createModificationReplaceProperty(ShadowType.F_INTENT, object.getDefinition(), synchronizationPolicy.getIntent())); } try { repositoryService.modifyObject(ShadowType.class, object.getOid(), modifications, subResult); task.recordObjectActionExecuted(object, ChangeType.MODIFY, null); } catch (Throwable t) { task.recordObjectActionExecuted(object, ChangeType.MODIFY, t); } finally { task.markObjectActionExecutedBoundary(); } subResult.recordSuccess(); eventInfo.record(task); LOGGER.debug("SYNCHRONIZATION: DONE (dry run) for {}", object); return; } // must be here, because when the reaction has no action, the // situation will be not set. PrismObject<ShadowType> newCurrentShadow = saveSyncMetadata( (PrismObject<ShadowType>) currentShadow, situation, change, synchronizationPolicy, task, parentResult); if (newCurrentShadow != null) { change.setCurrentShadow(newCurrentShadow); } SynchronizationSituationType newSituation = reactToChange(focusType, change, synchronizationPolicy, situation, resourceType, logDebug, configuration, task, subResult); eventInfo.setNewSituation(newSituation); eventInfo.record(task); subResult.computeStatus(); } catch (SystemException ex) { // avoid unnecessary re-wrap eventInfo.setException(ex); eventInfo.record(task); subResult.recordFatalError(ex); throw ex; } catch (Exception ex) { eventInfo.setException(ex); eventInfo.record(task); subResult.recordFatalError(ex); throw new SystemException(ex); } finally { task.markObjectActionExecutedBoundary(); // if (LOGGER.isTraceEnabled()) { // LOGGER.trace(subResult.dump()); // } } LOGGER.debug("SYNCHRONIZATION: DONE for {}", currentShadow); } private boolean satisfyTaskConstraints(ObjectSynchronizationType synchronizationPolicy, Task task) { PrismProperty<ShadowKindType> kind = task.getExtensionProperty(SchemaConstants.MODEL_EXTENSION_KIND); if (kind != null && !kind.isEmpty()) { ShadowKindType kindValue = kind.getRealValue(); ShadowKindType policyKind = synchronizationPolicy.getKind(); if (policyKind == null) { policyKind = ShadowKindType.ACCOUNT; // TODO is this ok? [med] } if (!policyKind.equals(kindValue)) { return false; } } PrismProperty<String> intent = task.getExtensionProperty(SchemaConstants.MODEL_EXTENSION_INTENT); if (intent != null && !intent.isEmpty()) { String intentValue = intent.getRealValue(); if (StringUtils.isEmpty(synchronizationPolicy.getIntent())) { return false; } if (!synchronizationPolicy.getIntent().equals(intentValue)) { return false; } } return true; } private boolean isProtected(PrismObject<ShadowType> shadow) { if (shadow == null) { return false; } ShadowType currentShadowType = shadow.asObjectable(); if (currentShadowType.isProtectedObject() == null) { return false; } return currentShadowType.isProtectedObject(); } private <F extends FocusType> Class<F> determineFocusClass( ObjectSynchronizationType synchronizationPolicy, ResourceType resource) throws ConfigurationException { if (synchronizationPolicy == null) { throw new IllegalStateException("synchronizationPolicy is null"); } QName focusTypeQName = synchronizationPolicy.getFocusType(); if (focusTypeQName == null) { return (Class<F>) UserType.class; } ObjectTypes objectType = ObjectTypes.getObjectTypeFromTypeQName(focusTypeQName); if (objectType == null) { throw new ConfigurationException( "Unknown focus type " + focusTypeQName + " in synchronization policy in " + resource); } return (Class<F>) objectType.getClassDefinition(); } @Override public ObjectSynchronizationType determineSynchronizationPolicy(ResourceType resourceType, PrismObject<? extends ShadowType> currentShadow, PrismObject<SystemConfigurationType> configuration, Task task, OperationResult result) throws SchemaException, ObjectNotFoundException, ExpressionEvaluationException { SynchronizationType synchronization = resourceType.getSynchronization(); if (synchronization == null) { return null; } for (ObjectSynchronizationType objectSynchronization : synchronization.getObjectSynchronization()) { if (isPolicyApplicable(currentShadow, objectSynchronization, resourceType.asPrismObject(), configuration, task, result)) { return objectSynchronization; } } return null; } private boolean isPolicyApplicable(PrismObject<? extends ShadowType> currentShadow, ObjectSynchronizationType synchronizationPolicy, PrismObject<ResourceType> resource, PrismObject<SystemConfigurationType> configuration, Task task, OperationResult result) throws SchemaException, ObjectNotFoundException, ExpressionEvaluationException { if (!SynchronizationUtils.isPolicyApplicable(currentShadow, synchronizationPolicy, resource)) { return false; } Boolean conditionResult = evaluateSynchronizationPolicyCondition(synchronizationPolicy, currentShadow, resource, configuration, task, result); if (conditionResult != null) { return conditionResult.booleanValue(); } return true; } private Boolean evaluateSynchronizationPolicyCondition(ObjectSynchronizationType synchronizationPolicy, PrismObject<? extends ShadowType> currentShadow, PrismObject<ResourceType> resource, PrismObject<SystemConfigurationType> configuration, Task task, OperationResult result) throws SchemaException, ExpressionEvaluationException, ObjectNotFoundException { if (synchronizationPolicy.getCondition() == null) { return null; } ExpressionType conditionExpressionType = synchronizationPolicy.getCondition(); String desc = "condition in object synchronization " + synchronizationPolicy.getName(); ExpressionVariables variables = Utils.getDefaultExpressionVariables(null, currentShadow, null, resource, configuration, null); try { ModelExpressionThreadLocalHolder.pushCurrentTask(task); ModelExpressionThreadLocalHolder.pushCurrentResult(result); PrismPropertyValue<Boolean> evaluateCondition = ExpressionUtil.evaluateCondition(variables, conditionExpressionType, expressionFactory, desc, task, result); return evaluateCondition.getValue(); } finally { ModelExpressionThreadLocalHolder.popCurrentResult(); ModelExpressionThreadLocalHolder.popCurrentTask(); } } private boolean isLogDebug(ResourceObjectShadowChangeDescription change) { // Reconciliation changes are routine. Do not let it polute the // logfiles. return !SchemaConstants.CHANGE_CHANNEL_RECON_URI.equals(change.getSourceChannel()); } private void validate(ResourceObjectShadowChangeDescription change) { Validate.notNull(change, "Resource object shadow change description must not be null."); Validate.isTrue(change.getCurrentShadow() != null || change.getObjectDelta() != null, "Object delta and current shadow are null. At least one must be provided."); Validate.notNull(change.getResource(), "Resource in change must not be null."); if (consistencyChecks) { if (change.getCurrentShadow() != null) { change.getCurrentShadow().checkConsistence(); ShadowUtil.checkConsistence(change.getCurrentShadow(), "current shadow in change description"); } if (change.getObjectDelta() != null) { change.getObjectDelta().checkConsistence(); } } } // @Override // public void notifyFailure(ResourceOperationFailureDescription // failureDescription, // Task task, OperationResult parentResult) { // Validate.notNull(failureDescription, "Resource object shadow failure // description must not be null."); // Validate.notNull(failureDescription.getCurrentShadow(), "Current shadow // in resource object shadow failure description must not be null."); // Validate.notNull(failureDescription.getObjectDelta(), "Delta in resource // object shadow failure description must not be null."); // Validate.notNull(failureDescription.getResource(), "Resource in failure // must not be null."); // Validate.notNull(failureDescription.getResult(), "Result in failure // description must not be null."); // Validate.notNull(parentResult, "Parent operation result must not be // null."); // // LOGGER.debug("SYNCHRONIZATION: received failure notifiation {}", // failureDescription); // // LOGGER.error("Provisioning error: {}", // failureDescription.getResult().getMessage()); // // // TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO // TODO TODO TODO TODO // } private boolean isSynchronizationEnabled(ObjectSynchronizationType synchronization) { if (synchronization == null || synchronization.isEnabled() == null) { return false; } return synchronization.isEnabled(); } /** * XXX: in situation when one account belongs to two different idm users * (repository returns only first user, method * {@link com.evolveum.midpoint.model.api.ModelService#findShadowOwner(String, Task, OperationResult)} * (String, com.evolveum.midpoint.schema.result.OperationResult)} ). It * should be changed because otherwise we can't find * {@link SynchronizationSituationType#DISPUTED} situation */ private <F extends FocusType> SynchronizationSituation determineSituation(Class<F> focusType, ResourceObjectShadowChangeDescription change, ObjectSynchronizationType synchronizationPolicy, SystemConfigurationType configurationType, Task task, OperationResult result) { OperationResult subResult = result.createSubresult(CHECK_SITUATION); LOGGER.trace("Determining situation for resource object shadow."); SynchronizationSituation situation = null; try { String shadowOid = getOidFromChange(change); Validate.notEmpty(shadowOid, "Couldn't get resource object shadow oid from change."); PrismObject<F> owner = repositoryService.searchShadowOwner(shadowOid, SelectorOptions.createCollection(GetOperationOptions.createAllowNotFound()), subResult); if (owner != null) { F ownerType = owner.asObjectable(); LOGGER.trace("Shadow OID {} does have owner: {}", shadowOid, ownerType.getName()); SynchronizationSituationType state = null; switch (getModificationType(change)) { case ADD: case MODIFY: // if user is found it means account/group is linked to // resource state = SynchronizationSituationType.LINKED; break; case DELETE: state = SynchronizationSituationType.DELETED; } situation = new SynchronizationSituation<>(ownerType, null, state); } else { LOGGER.trace("Resource object shadow doesn't have owner."); situation = determineSituationWithCorrelation(focusType, change, synchronizationPolicy, owner, configurationType, task, result); } } catch (Exception ex) { LOGGER.error("Error occurred during resource object shadow owner lookup."); throw new SystemException( "Error occurred during resource object shadow owner lookup, reason: " + ex.getMessage(), ex); } finally { subResult.computeStatus(); } return situation; } private String getOidFromChange(ResourceObjectShadowChangeDescription change) { if (change.getCurrentShadow() != null && StringUtils.isNotEmpty(change.getCurrentShadow().getOid())) { return change.getCurrentShadow().getOid(); } if (change.getOldShadow() != null && StringUtils.isNotEmpty(change.getOldShadow().getOid())) { return change.getOldShadow().getOid(); } if (change.getObjectDelta() == null || StringUtils.isEmpty(change.getObjectDelta().getOid())) { throw new IllegalArgumentException( "Oid was not defined in change (not in current, old shadow, delta)."); } return change.getObjectDelta().getOid(); } /** * Tries to match specified focus and shadow. Return true if it matches, * false otherwise. */ @Override public <F extends FocusType> boolean matchUserCorrelationRule(PrismObject<ShadowType> shadow, PrismObject<F> focus, ResourceType resourceType, PrismObject<SystemConfigurationType> configuration, Task task, OperationResult result) throws ConfigurationException, SchemaException, ObjectNotFoundException, ExpressionEvaluationException { ObjectSynchronizationType synchronizationPolicy = determineSynchronizationPolicy(resourceType, shadow, configuration, task, result); Class<F> focusClass; // TODO is this correct? The problem is that synchronizationPolicy can // be null... if (synchronizationPolicy != null) { focusClass = determineFocusClass(synchronizationPolicy, resourceType); } else { focusClass = (Class) focus.asObjectable().getClass(); } return correlationConfirmationEvaluator.matchUserCorrelationRule(focusClass, shadow, focus, synchronizationPolicy, resourceType, configuration == null ? null : configuration.asObjectable(), task, result); } /** * account is not linked to user. you have to use correlation and * confirmation rule to be sure user for this account doesn't exists * resourceShadow only contains the data that were in the repository before * the change. But the correlation/confirmation should work on the updated * data. Therefore let's apply the changes before running * correlation/confirmation */ private <F extends FocusType> SynchronizationSituation determineSituationWithCorrelation( Class<F> focusType, ResourceObjectShadowChangeDescription change, ObjectSynchronizationType synchronizationPolicy, PrismObject<F> owner, SystemConfigurationType configurationType, Task task, OperationResult result) throws SchemaException, ObjectNotFoundException, ExpressionEvaluationException { if (ChangeType.DELETE.equals(getModificationType(change))) { // account was deleted and we know it didn't have owner return new SynchronizationSituation<>(owner == null ? null : owner.asObjectable(), null, SynchronizationSituationType.DELETED); } PrismObject<? extends ShadowType> resourceShadow = change.getCurrentShadow(); ObjectDelta syncDelta = change.getObjectDelta(); if (resourceShadow == null && syncDelta != null && ChangeType.ADD.equals(syncDelta.getChangeType())) { LOGGER.trace("Trying to compute current shadow from change delta add."); PrismObject<ShadowType> shadow = syncDelta.computeChangedObject(syncDelta.getObjectToAdd()); resourceShadow = shadow; change.setCurrentShadow(shadow); } Validate.notNull(resourceShadow, "Current shadow must not be null."); ResourceType resource = change.getResource().asObjectable(); validateResourceInShadow(resourceShadow.asObjectable(), resource); SynchronizationSituationType state = null; LOGGER.trace( "SYNCHRONIZATION: CORRELATION: Looking for list of {} objects based on correlation rule.", focusType.getSimpleName()); List<PrismObject<F>> users = correlationConfirmationEvaluator.findFocusesByCorrelationRule(focusType, resourceShadow.asObjectable(), synchronizationPolicy.getCorrelation(), resource, configurationType, task, result); if (users == null) { users = new ArrayList<>(); } if (users.size() > 1) { if (synchronizationPolicy.getConfirmation() == null) { LOGGER.trace("SYNCHRONIZATION: CONFIRMATION: no confirmation defined."); } else { LOGGER.debug( "SYNCHRONIZATION: CONFIRMATION: Checking objects from correlation with confirmation rule."); users = correlationConfirmationEvaluator.findUserByConfirmationRule(focusType, users, resourceShadow.asObjectable(), resource, configurationType, synchronizationPolicy.getConfirmation(), task, result); } } F user = null; switch (users.size()) { case 0: state = SynchronizationSituationType.UNMATCHED; break; case 1: switch (getModificationType(change)) { case ADD: case MODIFY: state = SynchronizationSituationType.UNLINKED; break; case DELETE: state = SynchronizationSituationType.DELETED; break; } user = users.get(0).asObjectable(); break; default: state = SynchronizationSituationType.DISPUTED; } return new SynchronizationSituation(null, user, state); } private void validateResourceInShadow(ShadowType shadow, ResourceType resource) { if (shadow.getResource() != null || shadow.getResourceRef() != null) { return; } ObjectReferenceType reference = new ObjectReferenceType(); reference.setOid(resource.getOid()); reference.setType(ObjectTypes.RESOURCE.getTypeQName()); shadow.setResourceRef(reference); } /** * @param change * @return method checks change type in object delta if available, otherwise * returns {@link ChangeType#ADD} */ private ChangeType getModificationType(ResourceObjectShadowChangeDescription change) { if (change.getObjectDelta() != null) { return change.getObjectDelta().getChangeType(); } return ChangeType.ADD; } private <F extends FocusType> SynchronizationSituationType reactToChange(Class<F> focusClass, ResourceObjectShadowChangeDescription change, ObjectSynchronizationType synchronizationPolicy, SynchronizationSituation<F> situation, ResourceType resource, boolean logDebug, PrismObject<SystemConfigurationType> configuration, Task task, OperationResult parentResult) throws ConfigurationException, ObjectNotFoundException, SchemaException, PolicyViolationException, ExpressionEvaluationException, ObjectAlreadyExistsException, CommunicationException, SecurityViolationException { SynchronizationSituationType newSituation = situation.getSituation(); SynchronizationReactionType reactionDefinition = findReactionDefinition(synchronizationPolicy, situation, change.getSourceChannel(), resource); if (reactionDefinition == null) { LOGGER.trace("No reaction is defined for situation {} in {}", situation.getSituation(), resource); return newSituation; } // seems to be unused so commented it out [med] // PrismObject<? extends ObjectType> shadow = null; // if (change.getCurrentShadow() != null) { // shadow = change.getCurrentShadow(); // } else if (change.getOldShadow() != null) { // shadow = change.getOldShadow(); // } Boolean doReconciliation = determineReconciliation(synchronizationPolicy, reactionDefinition); if (doReconciliation == null) { // We have to do reconciliation if we have got a full shadow and no // delta. // There is no other good way how to reflect the changes from the // shadow. if (change.getObjectDelta() == null) { doReconciliation = true; } } Boolean limitPropagation = determinePropagationLimitation(synchronizationPolicy, reactionDefinition, change.getSourceChannel()); ModelExecuteOptions options = new ModelExecuteOptions(); options.setReconcile(doReconciliation); options.setLimitPropagation(limitPropagation); final boolean willSynchronize = isSynchronize(reactionDefinition); LensContext<F> lensContext = null; if (willSynchronize) { lensContext = createLensContext(focusClass, change, reactionDefinition, synchronizationPolicy, situation, options, configuration, parentResult); } if (LOGGER.isTraceEnabled() && lensContext != null) { LOGGER.trace("---[ SYNCHRONIZATION context before action execution ]-------------------------\n" + "{}\n------------------------------------------", lensContext.debugDump()); } if (willSynchronize) { // there's no point in calling executeAction without context - so // the actions are executed only if synchronize == true executeActions(reactionDefinition, lensContext, situation, BeforeAfterType.BEFORE, resource, logDebug, task, parentResult); Iterator<LensProjectionContext> iterator = lensContext.getProjectionContextsIterator(); LensProjectionContext originalProjectionContext = iterator.hasNext() ? iterator.next() : null; try { clockwork.run(lensContext, task, parentResult); } catch (ConfigurationException | ObjectNotFoundException | SchemaException | PolicyViolationException | ExpressionEvaluationException | ObjectAlreadyExistsException | CommunicationException | SecurityViolationException e) { LOGGER.error("SYNCHRONIZATION: Error in synchronization on {} for situation {}: {}: {}. Change was {}", new Object[] {resource, situation.getSituation(), e.getClass().getSimpleName(), e.getMessage(), change, e}); // what to do here? We cannot throw the error back. All that the notifyChange method // could do is to convert it to SystemException. But that indicates an internal error and it will // break whatever code called the notifyChange in the first place. We do not want that. // If the clockwork could not do anything with the exception then perhaps nothing can be done at all. // So just log the error (the error should be remembered in the result and task already) // and then just go on. } // note: actions "AFTER" seem to be useless here (basically they // modify lens context - which is relevant only if followed by // clockwork run) executeActions(reactionDefinition, lensContext, situation, BeforeAfterType.AFTER, resource, logDebug, task, parentResult); if (originalProjectionContext != null) { newSituation = originalProjectionContext.getSynchronizationSituationResolved(); } } else { LOGGER.trace("Skipping clockwork run on {} for situation {}, synchronize is set to false.", new Object[] { resource, situation.getSituation() }); } return newSituation; } private Boolean determineReconciliation(ObjectSynchronizationType synchronizationPolicy, SynchronizationReactionType reactionDefinition) { if (reactionDefinition.isReconcile() != null) { return reactionDefinition.isReconcile(); } if (synchronizationPolicy.isReconcile() != null) { return synchronizationPolicy.isReconcile(); } return null; } private Boolean determinePropagationLimitation(ObjectSynchronizationType synchronizationPolicy, SynchronizationReactionType reactionDefinition, String channel) { if (StringUtils.isNotBlank(channel)) { QName channelQName = QNameUtil.uriToQName(channel); // Discovery channel is used when compensating some inconsistent // state. Therefore we do not want to propagate changes to other // resources. We only want to resolve the problem and continue in // previous provisioning/synchronization during which this // compensation was triggered. if (SchemaConstants.CHANGE_CHANNEL_DISCOVERY.equals(channelQName) && SynchronizationSituationType.DELETED != reactionDefinition.getSituation()) { return true; } } if (reactionDefinition.isLimitPropagation() != null) { return reactionDefinition.isLimitPropagation(); } if (synchronizationPolicy.isLimitPropagation() != null) { return synchronizationPolicy.isLimitPropagation(); } return null; } @NotNull private <F extends FocusType> LensContext<F> createLensContext(Class<F> focusClass, ResourceObjectShadowChangeDescription change, SynchronizationReactionType reactionDefinition, ObjectSynchronizationType synchronizationPolicy, SynchronizationSituation<F> situation, ModelExecuteOptions options, PrismObject<SystemConfigurationType> configuration, OperationResult parentResult) throws ObjectNotFoundException, SchemaException { LensContext<F> context = contextFactory.createSyncContext(focusClass, change); context.setLazyAuditRequest(true); context.setSystemConfiguration(configuration); context.setOptions(options); ResourceType resource = change.getResource().asObjectable(); if (ModelExecuteOptions.isLimitPropagation(options)) { context.setTriggeredResource(resource); } context.rememberResource(resource); PrismObject<ShadowType> shadow = getShadowFromChange(change); if (InternalsConfig.consistencyChecks) shadow.checkConsistence(); // Projection context ShadowKindType kind = getKind(shadow, synchronizationPolicy); String intent = getIntent(shadow, synchronizationPolicy); boolean thombstone = isThombstone(change); ResourceShadowDiscriminator descr = new ResourceShadowDiscriminator(resource.getOid(), kind, intent, thombstone); LensProjectionContext projectionContext = context.createProjectionContext(descr); projectionContext.setResource(resource); projectionContext.setOid(getOidFromChange(change)); projectionContext.setSynchronizationSituationDetected(situation.getSituation()); // insert object delta if available in change ObjectDelta<? extends ShadowType> delta = change.getObjectDelta(); if (delta != null) { projectionContext.setSyncDelta((ObjectDelta<ShadowType>) delta); } else { projectionContext.setSyncAbsoluteTrigger(true); } // we insert account if available in change PrismObject<ShadowType> currentAccount = shadow; if (currentAccount != null) { projectionContext.setLoadedObject(currentAccount); if (!thombstone) { projectionContext.setFullShadow(true); } projectionContext.setFresh(true); } if (delta != null && delta.isDelete()) { projectionContext.setExists(false); } else { projectionContext.setExists(true); } projectionContext.setDoReconciliation(ModelExecuteOptions.isReconcile(options)); // Focus context if (situation.getCurrentOwner() != null) { F focusType = situation.getCurrentOwner(); LensFocusContext<F> focusContext = context.createFocusContext(); PrismObject<F> focusOld = focusType.asPrismObject(); focusContext.setLoadedObject(focusOld); } // Global stuff ObjectReferenceType objectTemplateRef = null; if (reactionDefinition.getObjectTemplateRef() != null) { objectTemplateRef = reactionDefinition.getObjectTemplateRef(); } else if (synchronizationPolicy.getObjectTemplateRef() != null) { objectTemplateRef = synchronizationPolicy.getObjectTemplateRef(); } if (objectTemplateRef != null) { ObjectTemplateType objectTemplate = repositoryService .getObject(ObjectTemplateType.class, objectTemplateRef.getOid(), null, parentResult) .asObjectable(); context.setFocusTemplate(objectTemplate); } return context; } protected PrismObject<ShadowType> getShadowFromChange(ResourceObjectShadowChangeDescription change) { if (change.getCurrentShadow() != null) { return (PrismObject<ShadowType>) change.getCurrentShadow(); } if (change.getOldShadow() != null) { return (PrismObject<ShadowType>) change.getOldShadow(); } return null; } private ShadowKindType getKind(PrismObject<ShadowType> shadow, ObjectSynchronizationType synchronizationPolicy) { ShadowKindType shadowKind = shadow.asObjectable().getKind(); if (shadowKind != null) { return shadowKind; } if (synchronizationPolicy.getKind() != null) { return synchronizationPolicy.getKind(); } return ShadowKindType.ACCOUNT; } private String getIntent(PrismObject<ShadowType> shadow, ObjectSynchronizationType synchronizationPolicy) { String shadowIntent = shadow.asObjectable().getIntent(); if (shadowIntent != null) { return shadowIntent; } return synchronizationPolicy.getIntent(); } private boolean isThombstone(ResourceObjectShadowChangeDescription change) { PrismObject<? extends ShadowType> shadow = null; if (change.getOldShadow() != null) { shadow = change.getOldShadow(); } else if (change.getCurrentShadow() != null) { shadow = change.getCurrentShadow(); } if (shadow != null) { if (shadow.asObjectable().isDead() != null) { return shadow.asObjectable().isDead().booleanValue(); } } ObjectDelta<? extends ShadowType> objectDelta = change.getObjectDelta(); if (objectDelta == null) { return false; } return objectDelta.isDelete(); } private boolean isSynchronize(SynchronizationReactionType reactionDefinition) { if (reactionDefinition.isSynchronize() != null) { return reactionDefinition.isSynchronize(); } return !reactionDefinition.getAction().isEmpty(); } private SynchronizationReactionType findReactionDefinition( ObjectSynchronizationType synchronizationPolicy, SynchronizationSituation situation, String channel, ResourceType resource) throws ConfigurationException { SynchronizationReactionType defaultReaction = null; for (SynchronizationReactionType reaction : synchronizationPolicy.getReaction()) { SynchronizationSituationType reactionSituation = reaction.getSituation(); if (reactionSituation == null) { throw new ConfigurationException("No situation defined for a reaction in " + resource); } if (reactionSituation.equals(situation.getSituation())) { if (reaction.getChannel() != null && !reaction.getChannel().isEmpty()) { if (reaction.getChannel().contains("") || reaction.getChannel().contains(null)) { defaultReaction = reaction; } if (reaction.getChannel().contains(channel)) { return reaction; } else { LOGGER.trace("Skipping reaction {} because the channel does not match {}", reaction, channel); continue; } } else { defaultReaction = reaction; } } } LOGGER.trace("Using default reaction {}", defaultReaction); return defaultReaction; } /** * Saves situation, timestamps, kind and intent (if needed) */ private PrismObject<ShadowType> saveSyncMetadata(PrismObject<ShadowType> shadow, SynchronizationSituation situation, ResourceObjectShadowChangeDescription change, ObjectSynchronizationType synchronizationPolicy, Task task, OperationResult parentResult) { if (shadow == null) { return null; } ShadowType shadowType = shadow.asObjectable(); // new situation description List<PropertyDelta<?>> deltas = SynchronizationUtils .createSynchronizationSituationAndDescriptionDelta(shadow, situation.getSituation(), change.getSourceChannel(), true); if (shadowType.getKind() == null) { ShadowKindType kind = synchronizationPolicy.getKind(); if (kind == null) { kind = ShadowKindType.ACCOUNT; } PropertyDelta<ShadowKindType> kindDelta = PropertyDelta.createReplaceDelta(shadow.getDefinition(), ShadowType.F_KIND, kind); deltas.add(kindDelta); } if (shadowType.getIntent() == null) { String intent = synchronizationPolicy.getIntent(); if (intent == null) { intent = SchemaConstants.INTENT_DEFAULT; } PropertyDelta<String> intentDelta = PropertyDelta.createReplaceDelta(shadow.getDefinition(), ShadowType.F_INTENT, intent); deltas.add(intentDelta); } try { repositoryService.modifyObject(shadowType.getClass(), shadow.getOid(), deltas, parentResult); ItemDelta.applyTo(deltas, shadow); task.recordObjectActionExecuted(shadow, ChangeType.MODIFY, null); return shadow; } catch (ObjectNotFoundException ex) { task.recordObjectActionExecuted(shadow, ChangeType.MODIFY, ex); // This may happen e.g. during some recon-livesync interactions. // If the shadow is gone then it is gone. No point in recording the // situation any more. LOGGER.debug( "Could not update situation in account, because shadow {} does not exist any more (this may be harmless)", shadow.getOid()); parentResult.getLastSubresult().setStatus(OperationResultStatus.HANDLED_ERROR); } catch (ObjectAlreadyExistsException | SchemaException ex) { task.recordObjectActionExecuted(shadow, ChangeType.MODIFY, ex); LoggingUtils.logException(LOGGER, "### SYNCHRONIZATION # notifyChange(..): Save of synchronization situation failed: could not modify shadow " + shadow.getOid() + ": " + ex.getMessage(), ex); parentResult.recordFatalError("Save of synchronization situation failed: could not modify shadow " + shadow.getOid() + ": " + ex.getMessage(), ex); throw new SystemException("Save of synchronization situation failed: could not modify shadow " + shadow.getOid() + ": " + ex.getMessage(), ex); } catch (Throwable t) { task.recordObjectActionExecuted(shadow, ChangeType.MODIFY, t); throw t; } return null; } private <F extends FocusType> void executeActions(SynchronizationReactionType reactionDef, LensContext<F> context, SynchronizationSituation<F> situation, BeforeAfterType order, ResourceType resource, boolean logDebug, Task task, OperationResult parentResult) throws ConfigurationException, SchemaException { for (SynchronizationActionType actionDef : reactionDef.getAction()) { if ((actionDef.getOrder() == null && order == BeforeAfterType.BEFORE) || (actionDef.getOrder() != null && actionDef.getOrder() == order)) { String handlerUri = actionDef.getHandlerUri(); if (handlerUri == null) { handlerUri = actionDef.getRef(); } if (handlerUri == null) { LOGGER.error("Action definition in resource {} doesn't contain handler URI", resource); throw new ConfigurationException( "Action definition in resource " + resource + " doesn't contain handler URI"); } Action action = actionManager.getActionInstance(handlerUri); if (action == null) { LOGGER.warn("Couldn't create action with uri '{}' in resource {}, skipping action.", new Object[] { handlerUri, resource }); continue; } // TODO: legacy userTemplate Map<QName, Object> parameters = null; if (actionDef.getParameters() != null) { // TODO: process parameters // parameters = actionDef.getParameters().getAny(); } if (logDebug) { LOGGER.debug("SYNCHRONIZATION: ACTION: Executing: {}.", new Object[] { action.getClass() }); } else { LOGGER.trace("SYNCHRONIZATION: ACTION: Executing: {}.", new Object[] { action.getClass() }); } action.handle(context, situation, parameters, task, parentResult); } } } /* * (non-Javadoc) * * @see com.evolveum.midpoint.provisioning.api.ResourceObjectChangeListener# * getName () */ @Override public String getName() { return "model synchronization service"; } private static class SynchronizationEventInformation { private String objectName; private String objectDisplayName; private String objectOid; private Throwable exception; private long started; private String channel; private SynchronizationInformation.Record originalStateIncrement = new SynchronizationInformation.Record(); private SynchronizationInformation.Record newStateIncrement = new SynchronizationInformation.Record(); public SynchronizationEventInformation(PrismObject<? extends ShadowType> currentShadow, String channel, Task task) { this.channel = channel; started = System.currentTimeMillis(); if (currentShadow != null) { final ShadowType shadow = currentShadow.asObjectable(); objectName = PolyString.getOrig(shadow.getName()); objectDisplayName = StatisticsUtil.getDisplayName(shadow); objectOid = currentShadow.getOid(); } task.recordSynchronizationOperationStart(objectName, objectDisplayName, ShadowType.COMPLEX_TYPE, objectOid); if (SchemaConstants.CHANGE_CHANNEL_LIVE_SYNC_URI.equals(channel)) { // livesync processing is not controlled via model -> so we // cannot do this in upper layers task.recordIterativeOperationStart(objectName, objectDisplayName, ShadowType.COMPLEX_TYPE, objectOid); } } public void setProtected() { originalStateIncrement.setCountProtected(1); newStateIncrement.setCountProtected(1); } public void setNoSynchronizationPolicy() { originalStateIncrement.setCountNoSynchronizationPolicy(1); newStateIncrement.setCountNoSynchronizationPolicy(1); } public void setSynchronizationNotEnabled() { originalStateIncrement.setCountSynchronizationDisabled(1); newStateIncrement.setCountSynchronizationDisabled(1); } public void setDoesNotMatchTaskSpecification() { originalStateIncrement.setCountNotApplicableForTask(1); newStateIncrement.setCountNotApplicableForTask(1); } private void setSituation(SynchronizationInformation.Record increment, SynchronizationSituationType situation) { if (situation != null) { switch (situation) { case LINKED: increment.setCountLinked(1); break; case UNLINKED: increment.setCountUnlinked(1); break; case DELETED: increment.setCountDeleted(1); break; case DISPUTED: increment.setCountDisputed(1); break; case UNMATCHED: increment.setCountUnmatched(1); break; default: // noop (or throw exception?) } } } public void setOriginalSituation(SynchronizationSituationType situation) { setSituation(originalStateIncrement, situation); } public void setNewSituation(SynchronizationSituationType situation) { newStateIncrement = new SynchronizationInformation.Record(); // brutal // hack, // TODO // fix // this! setSituation(newStateIncrement, situation); } public void setException(Exception ex) { exception = ex; } public void record(Task task) { task.recordSynchronizationOperationEnd(objectName, objectDisplayName, ShadowType.COMPLEX_TYPE, objectOid, started, exception, originalStateIncrement, newStateIncrement); if (SchemaConstants.CHANGE_CHANNEL_LIVE_SYNC_URI.equals(channel)) { // livesync processing is not controlled via model -> so we // cannot do this in upper layers task.recordIterativeOperationEnd(objectName, objectDisplayName, ShadowType.COMPLEX_TYPE, objectOid, started, exception); } } } }
model/model-impl/src/main/java/com/evolveum/midpoint/model/impl/sync/SynchronizationServiceImpl.java
/* * Copyright (c) 2010-2016 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.evolveum.midpoint.model.impl.sync; import static com.evolveum.midpoint.schema.internals.InternalsConfig.consistencyChecks; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.xml.namespace.QName; import com.evolveum.midpoint.model.impl.expr.ModelExpressionThreadLocalHolder; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.Validate; import org.jetbrains.annotations.NotNull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.stereotype.Service; import com.evolveum.midpoint.common.SynchronizationUtils; import com.evolveum.midpoint.model.api.ModelExecuteOptions; import com.evolveum.midpoint.model.api.PolicyViolationException; import com.evolveum.midpoint.model.common.SystemObjectCache; import com.evolveum.midpoint.model.common.expression.ExpressionFactory; import com.evolveum.midpoint.model.common.expression.ExpressionUtil; import com.evolveum.midpoint.model.common.expression.ExpressionVariables; import com.evolveum.midpoint.model.impl.lens.Clockwork; import com.evolveum.midpoint.model.impl.lens.ContextFactory; import com.evolveum.midpoint.model.impl.lens.LensContext; import com.evolveum.midpoint.model.impl.lens.LensFocusContext; import com.evolveum.midpoint.model.impl.lens.LensProjectionContext; import com.evolveum.midpoint.model.impl.util.Utils; import com.evolveum.midpoint.prism.PrismObject; import com.evolveum.midpoint.prism.PrismProperty; import com.evolveum.midpoint.prism.PrismPropertyValue; import com.evolveum.midpoint.prism.delta.ChangeType; import com.evolveum.midpoint.prism.delta.ItemDelta; import com.evolveum.midpoint.prism.delta.ObjectDelta; import com.evolveum.midpoint.prism.delta.PropertyDelta; import com.evolveum.midpoint.prism.polystring.PolyString; import com.evolveum.midpoint.provisioning.api.ResourceObjectShadowChangeDescription; import com.evolveum.midpoint.repo.api.RepositoryService; import com.evolveum.midpoint.schema.GetOperationOptions; import com.evolveum.midpoint.schema.ResourceShadowDiscriminator; import com.evolveum.midpoint.schema.SelectorOptions; import com.evolveum.midpoint.schema.constants.ObjectTypes; import com.evolveum.midpoint.schema.constants.SchemaConstants; import com.evolveum.midpoint.schema.internals.InternalsConfig; import com.evolveum.midpoint.schema.result.OperationResult; import com.evolveum.midpoint.schema.result.OperationResultStatus; import com.evolveum.midpoint.schema.statistics.StatisticsUtil; import com.evolveum.midpoint.schema.statistics.SynchronizationInformation; import com.evolveum.midpoint.schema.util.ShadowUtil; import com.evolveum.midpoint.task.api.Task; import com.evolveum.midpoint.util.QNameUtil; import com.evolveum.midpoint.util.exception.CommunicationException; import com.evolveum.midpoint.util.exception.ConfigurationException; import com.evolveum.midpoint.util.exception.ExpressionEvaluationException; import com.evolveum.midpoint.util.exception.ObjectAlreadyExistsException; import com.evolveum.midpoint.util.exception.ObjectNotFoundException; import com.evolveum.midpoint.util.exception.SchemaException; import com.evolveum.midpoint.util.exception.SecurityViolationException; import com.evolveum.midpoint.util.exception.SystemException; import com.evolveum.midpoint.util.logging.LoggingUtils; import com.evolveum.midpoint.util.logging.Trace; import com.evolveum.midpoint.util.logging.TraceManager; import com.evolveum.midpoint.xml.ns._public.common.common_3.BeforeAfterType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ExpressionType; import com.evolveum.midpoint.xml.ns._public.common.common_3.FocusType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectReferenceType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectSynchronizationType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectTemplateType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ResourceType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowKindType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowType; import com.evolveum.midpoint.xml.ns._public.common.common_3.SynchronizationActionType; import com.evolveum.midpoint.xml.ns._public.common.common_3.SynchronizationReactionType; import com.evolveum.midpoint.xml.ns._public.common.common_3.SynchronizationSituationType; import com.evolveum.midpoint.xml.ns._public.common.common_3.SynchronizationType; import com.evolveum.midpoint.xml.ns._public.common.common_3.SystemConfigurationType; import com.evolveum.midpoint.xml.ns._public.common.common_3.UserType; /** * Synchronization service receives change notifications from provisioning. It * decides which synchronization policy to use and evaluates it (correlation, * confirmation, situations, reaction, ...) * * @author lazyman * @author Radovan Semancik * * Note: don't autowire this bean by implementing class, as it is * proxied by Spring AOP. Use the interface instead. */ @Service(value = "synchronizationService") public class SynchronizationServiceImpl implements SynchronizationService { private static final Trace LOGGER = TraceManager.getTrace(SynchronizationServiceImpl.class); @Autowired(required = true) private ActionManager<Action> actionManager; @Autowired private CorrelationConfirmationEvaluator correlationConfirmationEvaluator; @Autowired(required = true) @Qualifier("cacheRepositoryService") private RepositoryService repositoryService; @Autowired(required = true) private ContextFactory contextFactory; @Autowired(required = true) private Clockwork clockwork; @Autowired(required = true) private ExpressionFactory expressionFactory; @Autowired(required = true) private SystemObjectCache systemObjectCache; @Override public void notifyChange(ResourceObjectShadowChangeDescription change, Task task, OperationResult parentResult) { validate(change); Validate.notNull(parentResult, "Parent operation result must not be null."); boolean logDebug = isLogDebug(change); if (logDebug) { LOGGER.debug("SYNCHRONIZATION: received change notification {}", change); } else { LOGGER.trace("SYNCHRONIZATION: received change notification {}", change); } OperationResult subResult = parentResult.createSubresult(NOTIFY_CHANGE); PrismObject<? extends ShadowType> currentShadow = change.getCurrentShadow(); PrismObject<? extends ShadowType> applicableShadow = currentShadow; if (applicableShadow == null) { // We need this e.g. in case of delete applicableShadow = change.getOldShadow(); } SynchronizationEventInformation eventInfo = new SynchronizationEventInformation(applicableShadow, change.getSourceChannel(), task); try { ResourceType resourceType = change.getResource().asObjectable(); PrismObject<SystemConfigurationType> configuration = systemObjectCache.getSystemConfiguration(subResult); ObjectSynchronizationType synchronizationPolicy = determineSynchronizationPolicy(resourceType, applicableShadow, configuration, task, subResult); if (LOGGER.isTraceEnabled()) { String policyDesc = null; if (synchronizationPolicy != null) { if (synchronizationPolicy.getName() == null) { policyDesc = "(kind=" + synchronizationPolicy.getKind() + ", intent=" + synchronizationPolicy.getIntent() + ", objectclass=" + synchronizationPolicy.getObjectClass() + ")"; } else { policyDesc = synchronizationPolicy.getName(); } } LOGGER.trace("SYNCHRONIZATION determined policy: {}", policyDesc); } if (synchronizationPolicy == null) { String message = "SYNCHRONIZATION no matching policy for " + applicableShadow + " (" + applicableShadow.asObjectable().getObjectClass() + ") " + " on " + resourceType + ", ignoring change from channel " + change.getSourceChannel(); LOGGER.debug(message); subResult.recordStatus(OperationResultStatus.NOT_APPLICABLE, message); eventInfo.setNoSynchronizationPolicy(); eventInfo.record(task); return; } if (!isSynchronizationEnabled(synchronizationPolicy)) { String message = "SYNCHRONIZATION is not enabled for " + resourceType + " ignoring change from channel " + change.getSourceChannel(); LOGGER.debug(message); subResult.recordStatus(OperationResultStatus.NOT_APPLICABLE, message); eventInfo.setSynchronizationNotEnabled(); eventInfo.record(task); return; } // check if the kind/intent in the syncPolicy satisfy constraints // defined in task if (!satisfyTaskConstraints(synchronizationPolicy, task)) { LOGGER.trace( "SYNCHRONIZATION skipping {} because it does not match kind/intent defined in task", new Object[] { applicableShadow }); subResult.recordStatus(OperationResultStatus.NOT_APPLICABLE, "Skipped because it does not match objectClass/kind/intent"); eventInfo.setDoesNotMatchTaskSpecification(); eventInfo.record(task); return; } if (isProtected((PrismObject<ShadowType>) currentShadow)) { if (StringUtils.isNotBlank(synchronizationPolicy.getIntent())) { List<PropertyDelta<?>> modifications = SynchronizationUtils .createSynchronizationTimestampsDelta(currentShadow); PropertyDelta<String> intentDelta = PropertyDelta.createModificationReplaceProperty( ShadowType.F_INTENT, currentShadow.getDefinition(), synchronizationPolicy.getIntent()); modifications.add(intentDelta); try { repositoryService.modifyObject(ShadowType.class, currentShadow.getOid(), modifications, subResult); task.recordObjectActionExecuted(currentShadow, ChangeType.MODIFY, null); } catch (Throwable t) { task.recordObjectActionExecuted(currentShadow, ChangeType.MODIFY, t); } finally { task.markObjectActionExecutedBoundary(); } } subResult.recordSuccess(); eventInfo.record(task); LOGGER.debug("SYNCHRONIZATION: DONE (dry run) for protected shadow {}", currentShadow); return; } Class<? extends FocusType> focusType = determineFocusClass(synchronizationPolicy, resourceType); if (LOGGER.isTraceEnabled()) { LOGGER.trace("Synchronization is enabled, focus class: {}, found applicable policy: {}", focusType, Utils.getPolicyDesc(synchronizationPolicy)); } SynchronizationSituation situation = determineSituation(focusType, change, synchronizationPolicy, configuration.asObjectable(), task, subResult); if (logDebug) { LOGGER.debug("SYNCHRONIZATION: SITUATION: '{}', currentOwner={}, correlatedOwner={}", situation.getSituation().value(), situation.getCurrentOwner(), situation.getCorrelatedOwner()); } else { LOGGER.trace("SYNCHRONIZATION: SITUATION: '{}', currentOwner={}, correlatedOwner={}", situation.getSituation().value(), situation.getCurrentOwner(), situation.getCorrelatedOwner()); } eventInfo.setOriginalSituation(situation.getSituation()); eventInfo.setNewSituation(situation.getSituation()); // overwritten // later // (TODO fix // this!) if (change.isUnrelatedChange() || Utils.isDryRun(task)) { PrismObject object = null; if (change.getCurrentShadow() != null) { object = change.getCurrentShadow(); } else if (change.getOldShadow() != null) { object = change.getOldShadow(); } Collection modifications = SynchronizationUtils .createSynchronizationSituationAndDescriptionDelta(object, situation.getSituation(), task.getChannel(), false); if (StringUtils.isNotBlank(synchronizationPolicy.getIntent())) { modifications.add(PropertyDelta.createModificationReplaceProperty(ShadowType.F_INTENT, object.getDefinition(), synchronizationPolicy.getIntent())); } try { repositoryService.modifyObject(ShadowType.class, object.getOid(), modifications, subResult); task.recordObjectActionExecuted(object, ChangeType.MODIFY, null); } catch (Throwable t) { task.recordObjectActionExecuted(object, ChangeType.MODIFY, t); } finally { task.markObjectActionExecutedBoundary(); } subResult.recordSuccess(); eventInfo.record(task); LOGGER.debug("SYNCHRONIZATION: DONE (dry run) for {}", object); return; } // must be here, because when the reaction has no action, the // situation will be not set. PrismObject<ShadowType> newCurrentShadow = saveSyncMetadata( (PrismObject<ShadowType>) currentShadow, situation, change, synchronizationPolicy, task, parentResult); if (newCurrentShadow != null) { change.setCurrentShadow(newCurrentShadow); } SynchronizationSituationType newSituation = reactToChange(focusType, change, synchronizationPolicy, situation, resourceType, logDebug, configuration, task, subResult); eventInfo.setNewSituation(newSituation); eventInfo.record(task); subResult.computeStatus(); } catch (SystemException ex) { // avoid unnecessary re-wrap eventInfo.setException(ex); eventInfo.record(task); subResult.recordFatalError(ex); throw ex; } catch (Exception ex) { eventInfo.setException(ex); eventInfo.record(task); subResult.recordFatalError(ex); throw new SystemException(ex); } finally { task.markObjectActionExecutedBoundary(); // if (LOGGER.isTraceEnabled()) { // LOGGER.trace(subResult.dump()); // } } LOGGER.debug("SYNCHRONIZATION: DONE for {}", currentShadow); } private boolean satisfyTaskConstraints(ObjectSynchronizationType synchronizationPolicy, Task task) { PrismProperty<ShadowKindType> kind = task.getExtensionProperty(SchemaConstants.MODEL_EXTENSION_KIND); if (kind != null && !kind.isEmpty()) { ShadowKindType kindValue = kind.getRealValue(); ShadowKindType policyKind = synchronizationPolicy.getKind(); if (policyKind == null) { policyKind = ShadowKindType.ACCOUNT; // TODO is this ok? [med] } if (!policyKind.equals(kindValue)) { return false; } } PrismProperty<String> intent = task.getExtensionProperty(SchemaConstants.MODEL_EXTENSION_INTENT); if (intent != null && !intent.isEmpty()) { String intentValue = intent.getRealValue(); if (StringUtils.isEmpty(synchronizationPolicy.getIntent())) { return false; } if (!synchronizationPolicy.getIntent().equals(intentValue)) { return false; } } return true; } private boolean isProtected(PrismObject<ShadowType> shadow) { if (shadow == null) { return false; } ShadowType currentShadowType = shadow.asObjectable(); if (currentShadowType.isProtectedObject() == null) { return false; } return currentShadowType.isProtectedObject(); } private <F extends FocusType> Class<F> determineFocusClass( ObjectSynchronizationType synchronizationPolicy, ResourceType resource) throws ConfigurationException { if (synchronizationPolicy == null) { throw new IllegalStateException("synchronizationPolicy is null"); } QName focusTypeQName = synchronizationPolicy.getFocusType(); if (focusTypeQName == null) { return (Class<F>) UserType.class; } ObjectTypes objectType = ObjectTypes.getObjectTypeFromTypeQName(focusTypeQName); if (objectType == null) { throw new ConfigurationException( "Unknown focus type " + focusTypeQName + " in synchronization policy in " + resource); } return (Class<F>) objectType.getClassDefinition(); } @Override public ObjectSynchronizationType determineSynchronizationPolicy(ResourceType resourceType, PrismObject<? extends ShadowType> currentShadow, PrismObject<SystemConfigurationType> configuration, Task task, OperationResult result) throws SchemaException, ObjectNotFoundException, ExpressionEvaluationException { SynchronizationType synchronization = resourceType.getSynchronization(); if (synchronization == null) { return null; } for (ObjectSynchronizationType objectSynchronization : synchronization.getObjectSynchronization()) { if (isPolicyApplicable(currentShadow, objectSynchronization, resourceType.asPrismObject(), configuration, task, result)) { return objectSynchronization; } } return null; } private boolean isPolicyApplicable(PrismObject<? extends ShadowType> currentShadow, ObjectSynchronizationType synchronizationPolicy, PrismObject<ResourceType> resource, PrismObject<SystemConfigurationType> configuration, Task task, OperationResult result) throws SchemaException, ObjectNotFoundException, ExpressionEvaluationException { if (!SynchronizationUtils.isPolicyApplicable(currentShadow, synchronizationPolicy, resource)) { return false; } Boolean conditionResult = evaluateSynchronizationPolicyCondition(synchronizationPolicy, currentShadow, resource, configuration, task, result); if (conditionResult != null) { return conditionResult.booleanValue(); } return true; } private Boolean evaluateSynchronizationPolicyCondition(ObjectSynchronizationType synchronizationPolicy, PrismObject<? extends ShadowType> currentShadow, PrismObject<ResourceType> resource, PrismObject<SystemConfigurationType> configuration, Task task, OperationResult result) throws SchemaException, ExpressionEvaluationException, ObjectNotFoundException { if (synchronizationPolicy.getCondition() == null) { return null; } ExpressionType conditionExpressionType = synchronizationPolicy.getCondition(); String desc = "condition in object synchronization " + synchronizationPolicy.getName(); ExpressionVariables variables = Utils.getDefaultExpressionVariables(null, currentShadow, null, resource, configuration, null); try { ModelExpressionThreadLocalHolder.pushCurrentTask(task); ModelExpressionThreadLocalHolder.pushCurrentResult(result); PrismPropertyValue<Boolean> evaluateCondition = ExpressionUtil.evaluateCondition(variables, conditionExpressionType, expressionFactory, desc, task, result); return evaluateCondition.getValue(); } finally { ModelExpressionThreadLocalHolder.popCurrentResult(); ModelExpressionThreadLocalHolder.popCurrentTask(); } } private boolean isLogDebug(ResourceObjectShadowChangeDescription change) { // Reconciliation changes are routine. Do not let it polute the // logfiles. return !SchemaConstants.CHANGE_CHANNEL_RECON_URI.equals(change.getSourceChannel()); } private void validate(ResourceObjectShadowChangeDescription change) { Validate.notNull(change, "Resource object shadow change description must not be null."); Validate.isTrue(change.getCurrentShadow() != null || change.getObjectDelta() != null, "Object delta and current shadow are null. At least one must be provided."); Validate.notNull(change.getResource(), "Resource in change must not be null."); if (consistencyChecks) { if (change.getCurrentShadow() != null) { change.getCurrentShadow().checkConsistence(); ShadowUtil.checkConsistence(change.getCurrentShadow(), "current shadow in change description"); } if (change.getObjectDelta() != null) { change.getObjectDelta().checkConsistence(); } } } // @Override // public void notifyFailure(ResourceOperationFailureDescription // failureDescription, // Task task, OperationResult parentResult) { // Validate.notNull(failureDescription, "Resource object shadow failure // description must not be null."); // Validate.notNull(failureDescription.getCurrentShadow(), "Current shadow // in resource object shadow failure description must not be null."); // Validate.notNull(failureDescription.getObjectDelta(), "Delta in resource // object shadow failure description must not be null."); // Validate.notNull(failureDescription.getResource(), "Resource in failure // must not be null."); // Validate.notNull(failureDescription.getResult(), "Result in failure // description must not be null."); // Validate.notNull(parentResult, "Parent operation result must not be // null."); // // LOGGER.debug("SYNCHRONIZATION: received failure notifiation {}", // failureDescription); // // LOGGER.error("Provisioning error: {}", // failureDescription.getResult().getMessage()); // // // TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO // TODO TODO TODO TODO // } private boolean isSynchronizationEnabled(ObjectSynchronizationType synchronization) { if (synchronization == null || synchronization.isEnabled() == null) { return false; } return synchronization.isEnabled(); } /** * XXX: in situation when one account belongs to two different idm users * (repository returns only first user, method * {@link com.evolveum.midpoint.model.api.ModelService#findShadowOwner(String, Task, OperationResult)} * (String, com.evolveum.midpoint.schema.result.OperationResult)} ). It * should be changed because otherwise we can't find * {@link SynchronizationSituationType#DISPUTED} situation */ private <F extends FocusType> SynchronizationSituation determineSituation(Class<F> focusType, ResourceObjectShadowChangeDescription change, ObjectSynchronizationType synchronizationPolicy, SystemConfigurationType configurationType, Task task, OperationResult result) { OperationResult subResult = result.createSubresult(CHECK_SITUATION); LOGGER.trace("Determining situation for resource object shadow."); SynchronizationSituation situation = null; try { String shadowOid = getOidFromChange(change); Validate.notEmpty(shadowOid, "Couldn't get resource object shadow oid from change."); PrismObject<F> owner = repositoryService.searchShadowOwner(shadowOid, SelectorOptions.createCollection(GetOperationOptions.createAllowNotFound()), subResult); if (owner != null) { F ownerType = owner.asObjectable(); LOGGER.trace("Shadow OID {} does have owner: {}", shadowOid, ownerType.getName()); SynchronizationSituationType state = null; switch (getModificationType(change)) { case ADD: case MODIFY: // if user is found it means account/group is linked to // resource state = SynchronizationSituationType.LINKED; break; case DELETE: state = SynchronizationSituationType.DELETED; } situation = new SynchronizationSituation<>(ownerType, null, state); } else { LOGGER.trace("Resource object shadow doesn't have owner."); situation = determineSituationWithCorrelation(focusType, change, synchronizationPolicy, owner, configurationType, task, result); } } catch (Exception ex) { LOGGER.error("Error occurred during resource object shadow owner lookup."); throw new SystemException( "Error occurred during resource object shadow owner lookup, reason: " + ex.getMessage(), ex); } finally { subResult.computeStatus(); } return situation; } private String getOidFromChange(ResourceObjectShadowChangeDescription change) { if (change.getCurrentShadow() != null && StringUtils.isNotEmpty(change.getCurrentShadow().getOid())) { return change.getCurrentShadow().getOid(); } if (change.getOldShadow() != null && StringUtils.isNotEmpty(change.getOldShadow().getOid())) { return change.getOldShadow().getOid(); } if (change.getObjectDelta() == null || StringUtils.isEmpty(change.getObjectDelta().getOid())) { throw new IllegalArgumentException( "Oid was not defined in change (not in current, old shadow, delta)."); } return change.getObjectDelta().getOid(); } /** * Tries to match specified focus and shadow. Return true if it matches, * false otherwise. */ @Override public <F extends FocusType> boolean matchUserCorrelationRule(PrismObject<ShadowType> shadow, PrismObject<F> focus, ResourceType resourceType, PrismObject<SystemConfigurationType> configuration, Task task, OperationResult result) throws ConfigurationException, SchemaException, ObjectNotFoundException, ExpressionEvaluationException { ObjectSynchronizationType synchronizationPolicy = determineSynchronizationPolicy(resourceType, shadow, configuration, task, result); Class<F> focusClass; // TODO is this correct? The problem is that synchronizationPolicy can // be null... if (synchronizationPolicy != null) { focusClass = determineFocusClass(synchronizationPolicy, resourceType); } else { focusClass = (Class) focus.asObjectable().getClass(); } return correlationConfirmationEvaluator.matchUserCorrelationRule(focusClass, shadow, focus, synchronizationPolicy, resourceType, configuration == null ? null : configuration.asObjectable(), task, result); } /** * account is not linked to user. you have to use correlation and * confirmation rule to be sure user for this account doesn't exists * resourceShadow only contains the data that were in the repository before * the change. But the correlation/confirmation should work on the updated * data. Therefore let's apply the changes before running * correlation/confirmation */ private <F extends FocusType> SynchronizationSituation determineSituationWithCorrelation( Class<F> focusType, ResourceObjectShadowChangeDescription change, ObjectSynchronizationType synchronizationPolicy, PrismObject<F> owner, SystemConfigurationType configurationType, Task task, OperationResult result) throws SchemaException, ObjectNotFoundException, ExpressionEvaluationException { if (ChangeType.DELETE.equals(getModificationType(change))) { // account was deleted and we know it didn't have owner return new SynchronizationSituation<>(owner == null ? null : owner.asObjectable(), null, SynchronizationSituationType.DELETED); } PrismObject<? extends ShadowType> resourceShadow = change.getCurrentShadow(); ObjectDelta syncDelta = change.getObjectDelta(); if (resourceShadow == null && syncDelta != null && ChangeType.ADD.equals(syncDelta.getChangeType())) { LOGGER.trace("Trying to compute current shadow from change delta add."); PrismObject<? extends ShadowType> shadow = syncDelta .computeChangedObject(syncDelta.getObjectToAdd()); resourceShadow = shadow; change.setCurrentShadow(shadow); } Validate.notNull(resourceShadow, "Current shadow must not be null."); ResourceType resource = change.getResource().asObjectable(); validateResourceInShadow(resourceShadow.asObjectable(), resource); SynchronizationSituationType state = null; LOGGER.trace( "SYNCHRONIZATION: CORRELATION: Looking for list of {} objects based on correlation rule.", focusType.getSimpleName()); List<PrismObject<F>> users = correlationConfirmationEvaluator.findFocusesByCorrelationRule(focusType, resourceShadow.asObjectable(), synchronizationPolicy.getCorrelation(), resource, configurationType, task, result); if (users == null) { users = new ArrayList<>(); } if (users.size() > 1) { if (synchronizationPolicy.getConfirmation() == null) { LOGGER.trace("SYNCHRONIZATION: CONFIRMATION: no confirmation defined."); } else { LOGGER.debug( "SYNCHRONIZATION: CONFIRMATION: Checking objects from correlation with confirmation rule."); users = correlationConfirmationEvaluator.findUserByConfirmationRule(focusType, users, resourceShadow.asObjectable(), resource, configurationType, synchronizationPolicy.getConfirmation(), task, result); } } F user = null; switch (users.size()) { case 0: state = SynchronizationSituationType.UNMATCHED; break; case 1: switch (getModificationType(change)) { case ADD: case MODIFY: state = SynchronizationSituationType.UNLINKED; break; case DELETE: state = SynchronizationSituationType.DELETED; break; } user = users.get(0).asObjectable(); break; default: state = SynchronizationSituationType.DISPUTED; } return new SynchronizationSituation(null, user, state); } private void validateResourceInShadow(ShadowType shadow, ResourceType resource) { if (shadow.getResource() != null || shadow.getResourceRef() != null) { return; } ObjectReferenceType reference = new ObjectReferenceType(); reference.setOid(resource.getOid()); reference.setType(ObjectTypes.RESOURCE.getTypeQName()); shadow.setResourceRef(reference); } /** * @param change * @return method checks change type in object delta if available, otherwise * returns {@link ChangeType#ADD} */ private ChangeType getModificationType(ResourceObjectShadowChangeDescription change) { if (change.getObjectDelta() != null) { return change.getObjectDelta().getChangeType(); } return ChangeType.ADD; } private <F extends FocusType> SynchronizationSituationType reactToChange(Class<F> focusClass, ResourceObjectShadowChangeDescription change, ObjectSynchronizationType synchronizationPolicy, SynchronizationSituation<F> situation, ResourceType resource, boolean logDebug, PrismObject<SystemConfigurationType> configuration, Task task, OperationResult parentResult) throws ConfigurationException, ObjectNotFoundException, SchemaException, PolicyViolationException, ExpressionEvaluationException, ObjectAlreadyExistsException, CommunicationException, SecurityViolationException { SynchronizationSituationType newSituation = situation.getSituation(); SynchronizationReactionType reactionDefinition = findReactionDefinition(synchronizationPolicy, situation, change.getSourceChannel(), resource); if (reactionDefinition == null) { LOGGER.trace("No reaction is defined for situation {} in {}", situation.getSituation(), resource); return newSituation; } // seems to be unused so commented it out [med] // PrismObject<? extends ObjectType> shadow = null; // if (change.getCurrentShadow() != null) { // shadow = change.getCurrentShadow(); // } else if (change.getOldShadow() != null) { // shadow = change.getOldShadow(); // } Boolean doReconciliation = determineReconciliation(synchronizationPolicy, reactionDefinition); if (doReconciliation == null) { // We have to do reconciliation if we have got a full shadow and no // delta. // There is no other good way how to reflect the changes from the // shadow. if (change.getObjectDelta() == null) { doReconciliation = true; } } Boolean limitPropagation = determinePropagationLimitation(synchronizationPolicy, reactionDefinition, change.getSourceChannel()); ModelExecuteOptions options = new ModelExecuteOptions(); options.setReconcile(doReconciliation); options.setLimitPropagation(limitPropagation); final boolean willSynchronize = isSynchronize(reactionDefinition); LensContext<F> lensContext = null; if (willSynchronize) { lensContext = createLensContext(focusClass, change, reactionDefinition, synchronizationPolicy, situation, options, configuration, parentResult); } if (LOGGER.isTraceEnabled() && lensContext != null) { LOGGER.trace("---[ SYNCHRONIZATION context before action execution ]-------------------------\n" + "{}\n------------------------------------------", lensContext.debugDump()); } if (willSynchronize) { // there's no point in calling executeAction without context - so // the actions are executed only if synchronize == true executeActions(reactionDefinition, lensContext, situation, BeforeAfterType.BEFORE, resource, logDebug, task, parentResult); Iterator<LensProjectionContext> iterator = lensContext.getProjectionContextsIterator(); LensProjectionContext originalProjectionContext = iterator.hasNext() ? iterator.next() : null; try { clockwork.run(lensContext, task, parentResult); } catch (ConfigurationException | ObjectNotFoundException | SchemaException | PolicyViolationException | ExpressionEvaluationException | ObjectAlreadyExistsException | CommunicationException | SecurityViolationException e) { LOGGER.error("SYNCHRONIZATION: Error in synchronization on {} for situation {}: {}: {}. Change was {}", new Object[] {resource, situation.getSituation(), e.getClass().getSimpleName(), e.getMessage(), change, e}); // what to do here? We cannot throw the error back. All that the notifyChange method // could do is to convert it to SystemException. But that indicates an internal error and it will // break whatever code called the notifyChange in the first place. We do not want that. // If the clockwork could not do anything with the exception then perhaps nothing can be done at all. // So just log the error (the error should be remembered in the result and task already) // and then just go on. } // note: actions "AFTER" seem to be useless here (basically they // modify lens context - which is relevant only if followed by // clockwork run) executeActions(reactionDefinition, lensContext, situation, BeforeAfterType.AFTER, resource, logDebug, task, parentResult); if (originalProjectionContext != null) { newSituation = originalProjectionContext.getSynchronizationSituationResolved(); } } else { LOGGER.trace("Skipping clockwork run on {} for situation {}, synchronize is set to false.", new Object[] { resource, situation.getSituation() }); } return newSituation; } private Boolean determineReconciliation(ObjectSynchronizationType synchronizationPolicy, SynchronizationReactionType reactionDefinition) { if (reactionDefinition.isReconcile() != null) { return reactionDefinition.isReconcile(); } if (synchronizationPolicy.isReconcile() != null) { return synchronizationPolicy.isReconcile(); } return null; } private Boolean determinePropagationLimitation(ObjectSynchronizationType synchronizationPolicy, SynchronizationReactionType reactionDefinition, String channel) { if (StringUtils.isNotBlank(channel)) { QName channelQName = QNameUtil.uriToQName(channel); // Discovery channel is used when compensating some inconsistent // state. Therefore we do not want to propagate changes to other // resources. We only want to resolve the problem and continue in // previous provisioning/synchronization during which this // compensation was triggered. if (SchemaConstants.CHANGE_CHANNEL_DISCOVERY.equals(channelQName) && SynchronizationSituationType.DELETED != reactionDefinition.getSituation()) { return true; } } if (reactionDefinition.isLimitPropagation() != null) { return reactionDefinition.isLimitPropagation(); } if (synchronizationPolicy.isLimitPropagation() != null) { return synchronizationPolicy.isLimitPropagation(); } return null; } @NotNull private <F extends FocusType> LensContext<F> createLensContext(Class<F> focusClass, ResourceObjectShadowChangeDescription change, SynchronizationReactionType reactionDefinition, ObjectSynchronizationType synchronizationPolicy, SynchronizationSituation<F> situation, ModelExecuteOptions options, PrismObject<SystemConfigurationType> configuration, OperationResult parentResult) throws ObjectNotFoundException, SchemaException { LensContext<F> context = contextFactory.createSyncContext(focusClass, change); context.setLazyAuditRequest(true); context.setSystemConfiguration(configuration); context.setOptions(options); ResourceType resource = change.getResource().asObjectable(); if (ModelExecuteOptions.isLimitPropagation(options)) { context.setTriggeredResource(resource); } context.rememberResource(resource); PrismObject<ShadowType> shadow = getShadowFromChange(change); if (InternalsConfig.consistencyChecks) shadow.checkConsistence(); // Projection context ShadowKindType kind = getKind(shadow, synchronizationPolicy); String intent = getIntent(shadow, synchronizationPolicy); boolean thombstone = isThombstone(change); ResourceShadowDiscriminator descr = new ResourceShadowDiscriminator(resource.getOid(), kind, intent, thombstone); LensProjectionContext projectionContext = context.createProjectionContext(descr); projectionContext.setResource(resource); projectionContext.setOid(getOidFromChange(change)); projectionContext.setSynchronizationSituationDetected(situation.getSituation()); // insert object delta if available in change ObjectDelta<? extends ShadowType> delta = change.getObjectDelta(); if (delta != null) { projectionContext.setSyncDelta((ObjectDelta<ShadowType>) delta); } else { projectionContext.setSyncAbsoluteTrigger(true); } // we insert account if available in change PrismObject<ShadowType> currentAccount = shadow; if (currentAccount != null) { projectionContext.setLoadedObject(currentAccount); if (!thombstone) { projectionContext.setFullShadow(true); } projectionContext.setFresh(true); } if (delta != null && delta.isDelete()) { projectionContext.setExists(false); } else { projectionContext.setExists(true); } projectionContext.setDoReconciliation(ModelExecuteOptions.isReconcile(options)); // Focus context if (situation.getCurrentOwner() != null) { F focusType = situation.getCurrentOwner(); LensFocusContext<F> focusContext = context.createFocusContext(); PrismObject<F> focusOld = focusType.asPrismObject(); focusContext.setLoadedObject(focusOld); } // Global stuff ObjectReferenceType objectTemplateRef = null; if (reactionDefinition.getObjectTemplateRef() != null) { objectTemplateRef = reactionDefinition.getObjectTemplateRef(); } else if (synchronizationPolicy.getObjectTemplateRef() != null) { objectTemplateRef = synchronizationPolicy.getObjectTemplateRef(); } if (objectTemplateRef != null) { ObjectTemplateType objectTemplate = repositoryService .getObject(ObjectTemplateType.class, objectTemplateRef.getOid(), null, parentResult) .asObjectable(); context.setFocusTemplate(objectTemplate); } return context; } protected PrismObject<ShadowType> getShadowFromChange(ResourceObjectShadowChangeDescription change) { if (change.getCurrentShadow() != null) { return (PrismObject<ShadowType>) change.getCurrentShadow(); } if (change.getOldShadow() != null) { return (PrismObject<ShadowType>) change.getOldShadow(); } return null; } private ShadowKindType getKind(PrismObject<ShadowType> shadow, ObjectSynchronizationType synchronizationPolicy) { ShadowKindType shadowKind = shadow.asObjectable().getKind(); if (shadowKind != null) { return shadowKind; } if (synchronizationPolicy.getKind() != null) { return synchronizationPolicy.getKind(); } return ShadowKindType.ACCOUNT; } private String getIntent(PrismObject<ShadowType> shadow, ObjectSynchronizationType synchronizationPolicy) { String shadowIntent = shadow.asObjectable().getIntent(); if (shadowIntent != null) { return shadowIntent; } return synchronizationPolicy.getIntent(); } private boolean isThombstone(ResourceObjectShadowChangeDescription change) { PrismObject<? extends ShadowType> shadow = null; if (change.getOldShadow() != null) { shadow = change.getOldShadow(); } else if (change.getCurrentShadow() != null) { shadow = change.getCurrentShadow(); } if (shadow != null) { if (shadow.asObjectable().isDead() != null) { return shadow.asObjectable().isDead().booleanValue(); } } ObjectDelta<? extends ShadowType> objectDelta = change.getObjectDelta(); if (objectDelta == null) { return false; } return objectDelta.isDelete(); } private boolean isSynchronize(SynchronizationReactionType reactionDefinition) { if (reactionDefinition.isSynchronize() != null) { return reactionDefinition.isSynchronize(); } return !reactionDefinition.getAction().isEmpty(); } private SynchronizationReactionType findReactionDefinition( ObjectSynchronizationType synchronizationPolicy, SynchronizationSituation situation, String channel, ResourceType resource) throws ConfigurationException { SynchronizationReactionType defaultReaction = null; for (SynchronizationReactionType reaction : synchronizationPolicy.getReaction()) { SynchronizationSituationType reactionSituation = reaction.getSituation(); if (reactionSituation == null) { throw new ConfigurationException("No situation defined for a reaction in " + resource); } if (reactionSituation.equals(situation.getSituation())) { if (reaction.getChannel() != null && !reaction.getChannel().isEmpty()) { if (reaction.getChannel().contains("") || reaction.getChannel().contains(null)) { defaultReaction = reaction; } if (reaction.getChannel().contains(channel)) { return reaction; } else { LOGGER.trace("Skipping reaction {} because the channel does not match {}", reaction, channel); continue; } } else { defaultReaction = reaction; } } } LOGGER.trace("Using default reaction {}", defaultReaction); return defaultReaction; } /** * Saves situation, timestamps, kind and intent (if needed) */ private PrismObject<ShadowType> saveSyncMetadata(PrismObject<ShadowType> shadow, SynchronizationSituation situation, ResourceObjectShadowChangeDescription change, ObjectSynchronizationType synchronizationPolicy, Task task, OperationResult parentResult) { if (shadow == null) { return null; } ShadowType shadowType = shadow.asObjectable(); // new situation description List<PropertyDelta<?>> deltas = SynchronizationUtils .createSynchronizationSituationAndDescriptionDelta(shadow, situation.getSituation(), change.getSourceChannel(), true); if (shadowType.getKind() == null) { ShadowKindType kind = synchronizationPolicy.getKind(); if (kind == null) { kind = ShadowKindType.ACCOUNT; } PropertyDelta<ShadowKindType> kindDelta = PropertyDelta.createReplaceDelta(shadow.getDefinition(), ShadowType.F_KIND, kind); deltas.add(kindDelta); } if (shadowType.getIntent() == null) { String intent = synchronizationPolicy.getIntent(); if (intent == null) { intent = SchemaConstants.INTENT_DEFAULT; } PropertyDelta<String> intentDelta = PropertyDelta.createReplaceDelta(shadow.getDefinition(), ShadowType.F_INTENT, intent); deltas.add(intentDelta); } try { repositoryService.modifyObject(shadowType.getClass(), shadow.getOid(), deltas, parentResult); ItemDelta.applyTo(deltas, shadow); task.recordObjectActionExecuted(shadow, ChangeType.MODIFY, null); return shadow; } catch (ObjectNotFoundException ex) { task.recordObjectActionExecuted(shadow, ChangeType.MODIFY, ex); // This may happen e.g. during some recon-livesync interactions. // If the shadow is gone then it is gone. No point in recording the // situation any more. LOGGER.debug( "Could not update situation in account, because shadow {} does not exist any more (this may be harmless)", shadow.getOid()); parentResult.getLastSubresult().setStatus(OperationResultStatus.HANDLED_ERROR); } catch (ObjectAlreadyExistsException | SchemaException ex) { task.recordObjectActionExecuted(shadow, ChangeType.MODIFY, ex); LoggingUtils.logException(LOGGER, "### SYNCHRONIZATION # notifyChange(..): Save of synchronization situation failed: could not modify shadow " + shadow.getOid() + ": " + ex.getMessage(), ex); parentResult.recordFatalError("Save of synchronization situation failed: could not modify shadow " + shadow.getOid() + ": " + ex.getMessage(), ex); throw new SystemException("Save of synchronization situation failed: could not modify shadow " + shadow.getOid() + ": " + ex.getMessage(), ex); } catch (Throwable t) { task.recordObjectActionExecuted(shadow, ChangeType.MODIFY, t); throw t; } return null; } private <F extends FocusType> void executeActions(SynchronizationReactionType reactionDef, LensContext<F> context, SynchronizationSituation<F> situation, BeforeAfterType order, ResourceType resource, boolean logDebug, Task task, OperationResult parentResult) throws ConfigurationException, SchemaException { for (SynchronizationActionType actionDef : reactionDef.getAction()) { if ((actionDef.getOrder() == null && order == BeforeAfterType.BEFORE) || (actionDef.getOrder() != null && actionDef.getOrder() == order)) { String handlerUri = actionDef.getHandlerUri(); if (handlerUri == null) { handlerUri = actionDef.getRef(); } if (handlerUri == null) { LOGGER.error("Action definition in resource {} doesn't contain handler URI", resource); throw new ConfigurationException( "Action definition in resource " + resource + " doesn't contain handler URI"); } Action action = actionManager.getActionInstance(handlerUri); if (action == null) { LOGGER.warn("Couldn't create action with uri '{}' in resource {}, skipping action.", new Object[] { handlerUri, resource }); continue; } // TODO: legacy userTemplate Map<QName, Object> parameters = null; if (actionDef.getParameters() != null) { // TODO: process parameters // parameters = actionDef.getParameters().getAny(); } if (logDebug) { LOGGER.debug("SYNCHRONIZATION: ACTION: Executing: {}.", new Object[] { action.getClass() }); } else { LOGGER.trace("SYNCHRONIZATION: ACTION: Executing: {}.", new Object[] { action.getClass() }); } action.handle(context, situation, parameters, task, parentResult); } } } /* * (non-Javadoc) * * @see com.evolveum.midpoint.provisioning.api.ResourceObjectChangeListener# * getName () */ @Override public String getName() { return "model synchronization service"; } private static class SynchronizationEventInformation { private String objectName; private String objectDisplayName; private String objectOid; private Throwable exception; private long started; private String channel; private SynchronizationInformation.Record originalStateIncrement = new SynchronizationInformation.Record(); private SynchronizationInformation.Record newStateIncrement = new SynchronizationInformation.Record(); public SynchronizationEventInformation(PrismObject<? extends ShadowType> currentShadow, String channel, Task task) { this.channel = channel; started = System.currentTimeMillis(); if (currentShadow != null) { final ShadowType shadow = currentShadow.asObjectable(); objectName = PolyString.getOrig(shadow.getName()); objectDisplayName = StatisticsUtil.getDisplayName(shadow); objectOid = currentShadow.getOid(); } task.recordSynchronizationOperationStart(objectName, objectDisplayName, ShadowType.COMPLEX_TYPE, objectOid); if (SchemaConstants.CHANGE_CHANNEL_LIVE_SYNC_URI.equals(channel)) { // livesync processing is not controlled via model -> so we // cannot do this in upper layers task.recordIterativeOperationStart(objectName, objectDisplayName, ShadowType.COMPLEX_TYPE, objectOid); } } public void setProtected() { originalStateIncrement.setCountProtected(1); newStateIncrement.setCountProtected(1); } public void setNoSynchronizationPolicy() { originalStateIncrement.setCountNoSynchronizationPolicy(1); newStateIncrement.setCountNoSynchronizationPolicy(1); } public void setSynchronizationNotEnabled() { originalStateIncrement.setCountSynchronizationDisabled(1); newStateIncrement.setCountSynchronizationDisabled(1); } public void setDoesNotMatchTaskSpecification() { originalStateIncrement.setCountNotApplicableForTask(1); newStateIncrement.setCountNotApplicableForTask(1); } private void setSituation(SynchronizationInformation.Record increment, SynchronizationSituationType situation) { if (situation != null) { switch (situation) { case LINKED: increment.setCountLinked(1); break; case UNLINKED: increment.setCountUnlinked(1); break; case DELETED: increment.setCountDeleted(1); break; case DISPUTED: increment.setCountDisputed(1); break; case UNMATCHED: increment.setCountUnmatched(1); break; default: // noop (or throw exception?) } } } public void setOriginalSituation(SynchronizationSituationType situation) { setSituation(originalStateIncrement, situation); } public void setNewSituation(SynchronizationSituationType situation) { newStateIncrement = new SynchronizationInformation.Record(); // brutal // hack, // TODO // fix // this! setSituation(newStateIncrement, situation); } public void setException(Exception ex) { exception = ex; } public void record(Task task) { task.recordSynchronizationOperationEnd(objectName, objectDisplayName, ShadowType.COMPLEX_TYPE, objectOid, started, exception, originalStateIncrement, newStateIncrement); if (SchemaConstants.CHANGE_CHANNEL_LIVE_SYNC_URI.equals(channel)) { // livesync processing is not controlled via model -> so we // cannot do this in upper layers task.recordIterativeOperationEnd(objectName, objectDisplayName, ShadowType.COMPLEX_TYPE, objectOid, started, exception); } } } }
Post-merge fix
model/model-impl/src/main/java/com/evolveum/midpoint/model/impl/sync/SynchronizationServiceImpl.java
Post-merge fix
Java
apache-2.0
6d2c712aa8260e7602271ea64df187022d111dd5
0
apache/uima-uimafit,apache/uima-uimafit,apache/uima-uimafit
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.uima.fit.component.initialize; import static org.apache.uima.fit.factory.ExternalResourceFactory.PREFIX_SEPARATOR; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.WeakHashMap; import org.apache.uima.UimaContext; import org.apache.uima.UimaContextAdmin; import org.apache.uima.fit.component.ExternalResourceAware; import org.apache.uima.fit.descriptor.ExternalResource; import org.apache.uima.fit.descriptor.ExternalResourceLocator; import org.apache.uima.fit.internal.ReflectionUtil; import org.apache.uima.fit.internal.ResourceList; import org.apache.uima.resource.ResourceAccessException; import org.apache.uima.resource.ResourceInitializationException; import org.apache.uima.resource.ResourceManager; import org.apache.uima.resource.impl.ResourceManager_impl; import org.springframework.beans.SimpleTypeConverter; /** * Configurator class for {@link ExternalResource} annotations. */ public final class ExternalResourceInitializer { private static final Object INITIALIZED = new Object(); private static Map<Object, Object> initializedResources = new WeakHashMap<Object, Object>(); private ExternalResourceInitializer() { // No instances } /** * Configure a component from the given context. * @param object * the component. * @param context * the UIMA context. * * @param <T> * the component type. * @throws ResourceInitializationException * if the external resource cannot be configured. */ public static <T> void initialize(T object, UimaContext context) throws ResourceInitializationException { configure(context, object.getClass(), object.getClass(), object); } /** * Helper method for recursively configuring super-classes. * * @param <T> * the component type. * @param context * the context containing the resource bindings. * @param baseCls * the class on which configuration started. * @param cls * the class currently being configured. * @param object * the object being configured. * @throws ResourceInitializationException * if required resources could not be bound. */ private static <T> void configure(UimaContext context, Class<?> baseCls, Class<?> cls, T object) throws ResourceInitializationException { if (cls.getSuperclass() != null) { configure(context, baseCls, cls.getSuperclass(), object); } else { // Try to initialize the external resources only once, not for each step of the // class hierarchy of a component. initializeNestedResources(context); } for (Field field : cls.getDeclaredFields()) { if (!ReflectionUtil.isAnnotationPresent(field, ExternalResource.class)) { continue; } ExternalResource era = ReflectionUtil.getAnnotation(field, ExternalResource.class); // Get the resource key. If it is a nested resource, also get the prefix. String key = era.key(); if (key.length() == 0) { key = field.getName(); } if (object instanceof ExternalResourceAware) { String prefix = ((ExternalResourceAware) object).getResourceName(); if (prefix != null) { key = prefix + PREFIX_SEPARATOR + key; } } // Obtain the resource Object value = getResourceObject(context, key); if (value instanceof ExternalResourceLocator) { value = ((ExternalResourceLocator) value).getResource(); } // Sanity checks if (value == null && era.mandatory()) { throw new ResourceInitializationException(new IllegalStateException("Mandatory resource [" + key + "] is not set on [" + baseCls + "]")); } // Now record the setting and optionally apply it to the given // instance. if (value != null) { field.setAccessible(true); try { if (value instanceof ResourceList) { // Value is a multi-valued resource ResourceList resList = (ResourceList) value; // We cannot do this in ResourceList because the resource doesn't have access to // the UIMA context we use here. Resources are initialize with their own contexts // by the UIMA framework! List<Object> elements = new ArrayList<Object>(); for (int i = 0; i < resList.getSize(); i++) { Object elementValue = getResourceObject(context, resList.getResourceName() + PREFIX_SEPARATOR + ResourceList.ELEMENT_KEY + "[" + i + "]"); elements.add(elementValue); } SimpleTypeConverter converter = new SimpleTypeConverter(); value = converter.convertIfNecessary(elements, field.getType()); } try { field.set(object, value); } catch (IllegalAccessException e) { throw new ResourceInitializationException(e); } } finally { field.setAccessible(false); } } } } private static Object getResourceObject(UimaContext aContext, String aKey) throws ResourceInitializationException { Object value; try { value = aContext.getResourceObject(aKey); } catch (ResourceAccessException e) { throw new ResourceInitializationException(e); } return value; } /** * Scan the context and initialize external resources injected into other external resources. * * @param aContext * the UIMA context. */ private static void initializeNestedResources(UimaContext aContext) throws ResourceInitializationException { List<ExternalResourceAware> awareResources = new ArrayList<ExternalResourceAware>(); // Initialize the resources - each resource must only be initialized once. We remember // if a resource has already been initialized in a weak hash map, so we automatically // forget about resources that are garbage collected. for (Object r : getResources(aContext)) { synchronized (initializedResources) { if (r instanceof ExternalResourceAware && !initializedResources.containsKey(r)) { // Already mark the resource as initialized so we do not run into an // endless recursive loop when initialize() is called again. initializedResources.put(r, INITIALIZED); initialize(r, aContext); awareResources.add((ExternalResourceAware) r); } } } // Notify the resources after everything has been configured for (ExternalResourceAware res : awareResources) { res.afterResourcesInitialized(); } } /** * Get all resources declared in the context. */ private static Collection<?> getResources(UimaContext aContext) throws ResourceInitializationException { if (!(aContext instanceof UimaContextAdmin)) { return Collections.emptyList(); } ResourceManager resMgr = ((UimaContextAdmin) aContext).getResourceManager(); if (!(resMgr instanceof ResourceManager_impl)) { // Unfortunately there is not official way to access the list of resources. Thus we // have to rely on the UIMA implementation details and access the internal resource // map via reflection. If the resource manager is not derived from the default // UIMA resource manager, then we cannot really do anything here. throw new IllegalStateException("Unsupported resource manager implementation [" + resMgr.getClass() + "]"); } return resMgr.getExternalResources(); } }
uimafit-core/src/main/java/org/apache/uima/fit/component/initialize/ExternalResourceInitializer.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.uima.fit.component.initialize; import static org.apache.uima.fit.factory.ExternalResourceFactory.PREFIX_SEPARATOR; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.WeakHashMap; import org.apache.uima.UimaContext; import org.apache.uima.UimaContextAdmin; import org.apache.uima.fit.component.ExternalResourceAware; import org.apache.uima.fit.descriptor.ExternalResource; import org.apache.uima.fit.descriptor.ExternalResourceLocator; import org.apache.uima.fit.internal.ReflectionUtil; import org.apache.uima.fit.internal.ResourceList; import org.apache.uima.resource.ResourceAccessException; import org.apache.uima.resource.ResourceInitializationException; import org.apache.uima.resource.ResourceManager; import org.apache.uima.resource.impl.ResourceManager_impl; import org.springframework.beans.SimpleTypeConverter; /** * Configurator class for {@link ExternalResource} annotations. */ public final class ExternalResourceInitializer { private static final Object INITIALIZED = new Object(); private static Map<Object, Object> initializedResources = new WeakHashMap<Object, Object>(); private ExternalResourceInitializer() { // No instances } /** * Configure a component from the given context. * @param object * the component. * @param context * the UIMA context. * * @param <T> * the component type. * @throws ResourceInitializationException * if the external resource cannot be configured. */ public static <T> void initialize(T object, UimaContext context) throws ResourceInitializationException { configure(context, object.getClass(), object.getClass(), object); } /** * Helper method for recursively configuring super-classes. * * @param <T> * the component type. * @param context * the context containing the resource bindings. * @param baseCls * the class on which configuration started. * @param cls * the class currently being configured. * @param object * the object being configured. * @throws ResourceInitializationException * if required resources could not be bound. */ private static <T> void configure(UimaContext context, Class<?> baseCls, Class<?> cls, T object) throws ResourceInitializationException { if (cls.getSuperclass() != null) { configure(context, baseCls, cls.getSuperclass(), object); } else { // Try to initialize the external resources only once, not for each step of the // class hierarchy of a component. initializeNestedResources(context); } for (Field field : cls.getDeclaredFields()) { if (!ReflectionUtil.isAnnotationPresent(field, ExternalResource.class)) { continue; } ExternalResource era = ReflectionUtil.getAnnotation(field, ExternalResource.class); // Get the resource key. If it is a nested resource, also get the prefix. String key = era.key(); if (key.length() == 0) { key = field.getName(); } if (object instanceof ExternalResourceAware) { String prefix = ((ExternalResourceAware) object).getResourceName(); if (prefix != null) { key = prefix + PREFIX_SEPARATOR + key; } } // Obtain the resource Object value = getResourceObject(context, key); if (value instanceof ExternalResourceLocator) { value = ((ExternalResourceLocator) value).getResource(); } // Sanity checks if (value == null && era.mandatory()) { throw new ResourceInitializationException(new IllegalStateException("Mandatory resource [" + key + "] is not set on [" + baseCls + "]")); } // Now record the setting and optionally apply it to the given // instance. if (value != null) { field.setAccessible(true); try { if (value instanceof ResourceList) { // Value is a multi-valued resource ResourceList resList = (ResourceList) value; // We cannot do this in ResourceList because the resource doesn't have access to // the UIMA context we use here. Resources are initialize with their own contexts // by the UIMA framework! List<Object> elements = new ArrayList<Object>(); for (int i = 0; i < resList.getSize(); i++) { Object elementValue = getResourceObject(context, resList.getResourceName() + PREFIX_SEPARATOR + ResourceList.ELEMENT_KEY + "[" + i + "]"); elements.add(elementValue); } SimpleTypeConverter converter = new SimpleTypeConverter(); value = converter.convertIfNecessary(elements, field.getType()); } try { field.set(object, value); } catch (IllegalAccessException e) { throw new ResourceInitializationException(e); } } finally { field.setAccessible(false); } } } } private static Object getResourceObject(UimaContext aContext, String aKey) throws ResourceInitializationException { Object value; try { value = aContext.getResourceObject(aKey); } catch (ResourceAccessException e) { throw new ResourceInitializationException(e); } return value; } /** * Scan the context and initialize external resources injected into other external resources. * * @param aContext * the UIMA context. */ private static void initializeNestedResources(UimaContext aContext) throws ResourceInitializationException { List<ExternalResourceAware> awareResources = new ArrayList<ExternalResourceAware>(); // Initialize the resources - each resource must only be initialized once. We remember // if a resource has already been initialized in a weak hash map, so we automatically // forget about resources that are garbage collected. for (Object r : getResources(aContext)) { synchronized (initializedResources) { if (r instanceof ExternalResourceAware && !initializedResources.containsKey(r)) { // Already mark the resource as initialized so we do not run into an // endless recursive loop when initialize() is called again. initializedResources.put(r, INITIALIZED); initialize(r, aContext); awareResources.add((ExternalResourceAware) r); } } } // Notify the resources after everything has been configured for (ExternalResourceAware res : awareResources) { res.afterResourcesInitialized(); } } /** * Get all resources declared in the context. */ private static Collection<?> getResources(UimaContext aContext) throws ResourceInitializationException { if (!(aContext instanceof UimaContextAdmin)) { return Collections.emptyList(); } ResourceManager resMgr = ((UimaContextAdmin) aContext).getResourceManager(); if (!(resMgr instanceof ResourceManager_impl)) { // Unfortunately there is not official way to access the list of resources. Thus we // have to rely on the UIMA implementation details and access the internal resource // map via reflection. If the resource manager is not derived from the default // UIMA resource manager, then we cannot really do anything here. throw new IllegalStateException("Unsupported resource manager implementation [" + resMgr.getClass() + "]"); } // UIMA-2903 - List resources in a ResourceManager / remove hack in uimaFIT // This is how we do it after upgrading to UIMA 2.10.0 // return resMgr.getExternalResources(); // For UIMA 2.9.0 and before, we need to do this Field resourceMapField = null; try { // Fetch the list of resources resourceMapField = ReflectionUtil.getField(resMgr, "mResourceMap"); resourceMapField.setAccessible(true); @SuppressWarnings("unchecked") Map<String, Object> resources = (Map<String, Object>) resourceMapField.get(resMgr); return resources.values(); } catch (SecurityException e) { throw new ResourceInitializationException(e); } catch (NoSuchFieldException e) { throw new ResourceInitializationException(e); } catch (IllegalArgumentException e) { throw new ResourceInitializationException(e); } catch (IllegalAccessException e) { throw new ResourceInitializationException(e); } finally { if (resourceMapField != null) { resourceMapField.setAccessible(false); } } } }
[UIMA-2903] List resources in a ResourceManager / remove hack in uimaFIT - Finally remove the hack in uimaFIT
uimafit-core/src/main/java/org/apache/uima/fit/component/initialize/ExternalResourceInitializer.java
[UIMA-2903] List resources in a ResourceManager / remove hack in uimaFIT
Java
apache-2.0
2d008623f479edd3aa54fd9395463f8cf82f0984
0
atomix/atomix,kuujo/copycat,kuujo/copycat,atomix/atomix
/* * Copyright 2017-present Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.atomix.protocols.gossip.protocol; import io.atomix.utils.Identifier; import java.util.concurrent.CompletableFuture; /** * Anti-entropy protocol dispatcher. */ public interface AntiEntropyProtocolDispatcher<T extends Identifier> extends GossipProtocolDispatcher<T> { /** * Sends an anti-entropy advertisement. * * @param identifier the location to which to send the advertisement * @param advertisement the anti-entropy advertisement to send * @return a future to be completed with the advertisement response */ <K> CompletableFuture<AntiEntropyResponse<K>> advertise(T identifier, AntiEntropyAdvertisement<K> advertisement); }
protocols/gossip/src/main/java/io/atomix/protocols/gossip/protocol/AntiEntropyProtocolDispatcher.java
/* * Copyright 2017-present Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.atomix.protocols.gossip.protocol; import io.atomix.cluster.NodeId; import io.atomix.utils.Identifier; import java.util.concurrent.CompletableFuture; /** * Anti-entropy protocol dispatcher. */ public interface AntiEntropyProtocolDispatcher<T extends Identifier> extends GossipProtocolDispatcher<T> { /** * Sends an anti-entropy advertisement. * * @param identifier the location to which to send the advertisement * @param advertisement the anti-entropy advertisement to send * @return a future to be completed with the advertisement response */ <K> CompletableFuture<AntiEntropyResponse<K>> advertise(T identifier, AntiEntropyAdvertisement<K> advertisement); }
Remove cluster dependency from gossip protocols.
protocols/gossip/src/main/java/io/atomix/protocols/gossip/protocol/AntiEntropyProtocolDispatcher.java
Remove cluster dependency from gossip protocols.
Java
apache-2.0
2e641a66fdd823a351d2309d884d4364bf3c0f1a
0
killbill/killbill,killbill/killbill,killbill/killbill,killbill/killbill,sbrossie/killbill,killbill/killbill,sbrossie/killbill,sbrossie/killbill,sbrossie/killbill,sbrossie/killbill
/* * Copyright 2010-2013 Ning, Inc. * Copyright 2014-2015 Groupon, Inc * Copyright 2014-2015 The Billing Project, LLC * * The Billing Project licenses this file to you under the Apache License, version 2.0 * (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package org.killbill.billing.server.notifications; import java.io.IOException; import java.util.List; import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import javax.inject.Inject; import org.joda.time.DateTime; import org.killbill.billing.ObjectType; import org.killbill.billing.callcontext.InternalTenantContext; import org.killbill.billing.jaxrs.json.NotificationJson; import org.killbill.billing.notification.plugin.api.ExtBusEvent; import org.killbill.billing.platform.api.KillbillService.KILLBILL_SERVICES; import org.killbill.billing.server.DefaultServerService; import org.killbill.billing.tenant.api.TenantApiException; import org.killbill.billing.tenant.api.TenantKV.TenantKey; import org.killbill.billing.tenant.api.TenantUserApi; import org.killbill.billing.util.callcontext.CallContextFactory; import org.killbill.billing.util.callcontext.InternalCallContextFactory; import org.killbill.billing.util.callcontext.TenantContext; import org.killbill.billing.util.config.definition.NotificationConfig; import org.killbill.clock.Clock; import org.killbill.notificationq.api.NotificationQueue; import org.killbill.notificationq.api.NotificationQueueService; import org.killbill.notificationq.api.NotificationQueueService.NoSuchNotificationQueue; import org.skife.config.TimeSpan; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.ning.http.client.AsyncCompletionHandler; import com.ning.http.client.AsyncHttpClient; import com.ning.http.client.AsyncHttpClient.BoundRequestBuilder; import com.ning.http.client.AsyncHttpClientConfig; import com.ning.http.client.ListenableFuture; import com.ning.http.client.Response; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.MoreObjects; import com.google.common.eventbus.AllowConcurrentEvents; import com.google.common.eventbus.Subscribe; public class PushNotificationListener { private static final Logger log = LoggerFactory.getLogger(PushNotificationListener.class); @VisibleForTesting public static final String HTTP_HEADER_CONTENT_TYPE = "Content-Type"; @VisibleForTesting public static final String CONTENT_TYPE_JSON = "application/json; charset=UTF-8"; private static final int TIMEOUT_NOTIFICATION = 15; // 15 seconds private final TenantUserApi tenantApi; private final CallContextFactory contextFactory; private final AsyncHttpClient httpClient; private final ObjectMapper mapper; private final NotificationQueueService notificationQueueService; private final InternalCallContextFactory internalCallContextFactory; private final Clock clock; private final NotificationConfig notificationConfig; @Inject public PushNotificationListener(final ObjectMapper mapper, final TenantUserApi tenantApi, final CallContextFactory contextFactory, final NotificationQueueService notificationQueueService, final InternalCallContextFactory internalCallContextFactory, final Clock clock, final NotificationConfig notificationConfig) { this.httpClient = new AsyncHttpClient(new AsyncHttpClientConfig.Builder() .setConnectTimeout(TIMEOUT_NOTIFICATION * 1000) .setRequestTimeout(TIMEOUT_NOTIFICATION * 1000).build()); this.tenantApi = tenantApi; this.contextFactory = contextFactory; this.mapper = mapper; this.notificationQueueService = notificationQueueService; this.internalCallContextFactory = internalCallContextFactory; this.clock = clock; this.notificationConfig = notificationConfig; } @AllowConcurrentEvents @Subscribe public void triggerPushNotifications(final ExtBusEvent event) { final TenantContext context = contextFactory.createTenantContext(event.getAccountId(), event.getTenantId()); try { final List<String> callbacks = getCallbacksForTenant(context); if (callbacks.isEmpty()) { // Optimization - see https://github.com/killbill/killbill/issues/297 return; } dispatchCallback(event.getTenantId(), event, callbacks); } catch (final TenantApiException e) { log.warn("Failed to retrieve push notification callback for tenant {}", event.getTenantId()); } catch (final IOException e) { log.warn("Failed to retrieve push notification callback for tenant {}", event.getTenantId()); } } public void shutdown() { httpClient.close(); } private void dispatchCallback(final UUID tenantId, final ExtBusEvent event, final Iterable<String> callbacks) throws IOException { final NotificationJson notification = new NotificationJson(event); final String body = mapper.writeValueAsString(notification); for (final String cur : callbacks) { doPost(tenantId, cur, body, notification, TIMEOUT_NOTIFICATION, 0); } } private boolean doPost(final UUID tenantId, final String url, final String body, final NotificationJson notification, final int timeoutSec, final int attemptRetryNumber) { log.info("Sending push notification url='{}', body='{}', attemptRetryNumber='{}'", url, body, attemptRetryNumber); final BoundRequestBuilder builder = httpClient.preparePost(url); builder.setBody(body == null ? "{}" : body); builder.addHeader(HTTP_HEADER_CONTENT_TYPE, CONTENT_TYPE_JSON); final Response response; try { final ListenableFuture<Response> futureStatus = builder.execute(new AsyncCompletionHandler<Response>() { @Override public Response onCompleted(final Response response) throws Exception { return response; } }); response = futureStatus.get(timeoutSec, TimeUnit.SECONDS); } catch (final Exception e) { log.warn("Failed to push notification url='{}', tenantId='{}'", url, tenantId, e); saveRetryPushNotificationInQueue(tenantId, url, notification, attemptRetryNumber, e.getMessage()); return false; } if (response.getStatusCode() >= 200 && response.getStatusCode() < 300) { return true; } else { saveRetryPushNotificationInQueue(tenantId, url, notification, attemptRetryNumber, "statusCode=" + response.getStatusCode()); return false; } } public void resendPushNotification(final PushNotificationKey key) throws JsonProcessingException { final NotificationJson notification = new NotificationJson(key.getEventType(), key.getAccountId(), key.getObjectType() != null ? key.getObjectType().toString() : null, key.getObjectId(), key.getMetaData()); final String body = mapper.writeValueAsString(notification); doPost(key.getTenantId(), key.getUrl(), body, notification, TIMEOUT_NOTIFICATION, key.getAttemptNumber()); } private void saveRetryPushNotificationInQueue(final UUID tenantId, final String url, final NotificationJson notificationJson, final int attemptRetryNumber, final String reason) { final PushNotificationKey key = new PushNotificationKey(tenantId, notificationJson.getAccountId(), notificationJson.getEventType(), notificationJson.getObjectType(), notificationJson.getObjectId(), attemptRetryNumber + 1, notificationJson.getMetaData(), url); final TenantContext tenantContext = contextFactory.createTenantContext(null, tenantId); final DateTime nextNotificationTime = getNextNotificationTime(key.getAttemptNumber(), internalCallContextFactory.createInternalTenantContextWithoutAccountRecordId(tenantContext)); if (nextNotificationTime == null) { log.warn("Max attempt number reached for push notification url='{}', tenantId='{}'", key.getUrl(), key.getTenantId()); return; } log.warn("Push notification {} is re-scheduled to be sent at {}, url='{}', reason='{}'", key, nextNotificationTime, key.getUrl(), reason); final Long accountRecordId = internalCallContextFactory.getRecordIdFromObject(key.getAccountId(), ObjectType.ACCOUNT, tenantContext); final Long tenantRecordId = internalCallContextFactory.getRecordIdFromObject(key.getTenantId(), ObjectType.TENANT, tenantContext); try { final NotificationQueue notificationQueue = notificationQueueService.getNotificationQueue(KILLBILL_SERVICES.SERVER_SERVICE.getServiceName(), PushNotificationRetryService.QUEUE_NAME); notificationQueue.recordFutureNotification(nextNotificationTime, key, null, MoreObjects.firstNonNull(accountRecordId, new Long(0)), tenantRecordId); } catch (final NoSuchNotificationQueue noSuchNotificationQueue) { log.error("Failed to push notification url='{}', tenantId='{}'", key.getUrl(), key.getTenantId(), noSuchNotificationQueue); } catch (final IOException e) { log.error("Failed to push notification url='{}', tenantId='{}'", key.getUrl(), key.getTenantId(), e); } } private DateTime getNextNotificationTime(final int attemptNumber, final InternalTenantContext tenantContext) { final List<TimeSpan> retries = notificationConfig.getPushNotificationsRetries(tenantContext); if (attemptNumber > retries.size()) { return null; } final TimeSpan nextDelay = retries.get(attemptNumber - 1); return clock.getUTCNow().plusMillis((int) nextDelay.getMillis()); } private List<String> getCallbacksForTenant(final TenantContext context) throws TenantApiException { return tenantApi.getTenantValuesForKey(TenantKey.PUSH_NOTIFICATION_CB.toString(), context); } }
profiles/killbill/src/main/java/org/killbill/billing/server/notifications/PushNotificationListener.java
/* * Copyright 2010-2013 Ning, Inc. * Copyright 2014-2015 Groupon, Inc * Copyright 2014-2015 The Billing Project, LLC * * The Billing Project licenses this file to you under the Apache License, version 2.0 * (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package org.killbill.billing.server.notifications; import java.io.IOException; import java.util.List; import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import javax.inject.Inject; import org.joda.time.DateTime; import org.killbill.billing.ObjectType; import org.killbill.billing.callcontext.InternalTenantContext; import org.killbill.billing.jaxrs.json.NotificationJson; import org.killbill.billing.notification.plugin.api.ExtBusEvent; import org.killbill.billing.platform.api.KillbillService.KILLBILL_SERVICES; import org.killbill.billing.server.DefaultServerService; import org.killbill.billing.tenant.api.TenantApiException; import org.killbill.billing.tenant.api.TenantKV.TenantKey; import org.killbill.billing.tenant.api.TenantUserApi; import org.killbill.billing.util.callcontext.CallContextFactory; import org.killbill.billing.util.callcontext.InternalCallContextFactory; import org.killbill.billing.util.callcontext.TenantContext; import org.killbill.billing.util.config.definition.NotificationConfig; import org.killbill.clock.Clock; import org.killbill.notificationq.api.NotificationQueue; import org.killbill.notificationq.api.NotificationQueueService; import org.killbill.notificationq.api.NotificationQueueService.NoSuchNotificationQueue; import org.skife.config.TimeSpan; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.ning.http.client.AsyncCompletionHandler; import com.ning.http.client.AsyncHttpClient; import com.ning.http.client.AsyncHttpClient.BoundRequestBuilder; import com.ning.http.client.AsyncHttpClientConfig; import com.ning.http.client.ListenableFuture; import com.ning.http.client.Response; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.MoreObjects; import com.google.common.eventbus.AllowConcurrentEvents; import com.google.common.eventbus.Subscribe; public class PushNotificationListener { private static final Logger log = LoggerFactory.getLogger(PushNotificationListener.class); @VisibleForTesting public static final String HTTP_HEADER_CONTENT_TYPE = "Content-Type"; @VisibleForTesting public static final String CONTENT_TYPE_JSON = "application/json; charset=UTF-8"; private static final int TIMEOUT_NOTIFICATION = 15; // 15 seconds private final TenantUserApi tenantApi; private final CallContextFactory contextFactory; private final AsyncHttpClient httpClient; private final ObjectMapper mapper; private final NotificationQueueService notificationQueueService; private final InternalCallContextFactory internalCallContextFactory; private final Clock clock; private final NotificationConfig notificationConfig; @Inject public PushNotificationListener(final ObjectMapper mapper, final TenantUserApi tenantApi, final CallContextFactory contextFactory, final NotificationQueueService notificationQueueService, final InternalCallContextFactory internalCallContextFactory, final Clock clock, final NotificationConfig notificationConfig) { this.httpClient = new AsyncHttpClient(new AsyncHttpClientConfig.Builder().setRequestTimeout(TIMEOUT_NOTIFICATION * 1000).build()); this.tenantApi = tenantApi; this.contextFactory = contextFactory; this.mapper = mapper; this.notificationQueueService = notificationQueueService; this.internalCallContextFactory = internalCallContextFactory; this.clock = clock; this.notificationConfig = notificationConfig; } @AllowConcurrentEvents @Subscribe public void triggerPushNotifications(final ExtBusEvent event) { final TenantContext context = contextFactory.createTenantContext(event.getAccountId(), event.getTenantId()); try { final List<String> callbacks = getCallbacksForTenant(context); if (callbacks.isEmpty()) { // Optimization - see https://github.com/killbill/killbill/issues/297 return; } dispatchCallback(event.getTenantId(), event, callbacks); } catch (final TenantApiException e) { log.warn("Failed to retrieve push notification callback for tenant {}", event.getTenantId()); } catch (final IOException e) { log.warn("Failed to retrieve push notification callback for tenant {}", event.getTenantId()); } } public void shutdown() { httpClient.close(); } private void dispatchCallback(final UUID tenantId, final ExtBusEvent event, final Iterable<String> callbacks) throws IOException { final NotificationJson notification = new NotificationJson(event); final String body = mapper.writeValueAsString(notification); for (final String cur : callbacks) { doPost(tenantId, cur, body, notification, TIMEOUT_NOTIFICATION, 0); } } private boolean doPost(final UUID tenantId, final String url, final String body, final NotificationJson notification, final int timeoutSec, final int attemptRetryNumber) { log.info("Sending push notification url='{}', body='{}', attemptRetryNumber='{}'", url, body, attemptRetryNumber); final BoundRequestBuilder builder = httpClient.preparePost(url); builder.setBody(body == null ? "{}" : body); builder.addHeader(HTTP_HEADER_CONTENT_TYPE, CONTENT_TYPE_JSON); final Response response; try { final ListenableFuture<Response> futureStatus = builder.execute(new AsyncCompletionHandler<Response>() { @Override public Response onCompleted(final Response response) throws Exception { return response; } }); response = futureStatus.get(timeoutSec, TimeUnit.SECONDS); } catch (final TimeoutException toe) { saveRetryPushNotificationInQueue(tenantId, url, notification, attemptRetryNumber, "Timeout"); return false; } catch (final Exception e) { log.warn("Failed to push notification url='{}', tenantId='{}'", url, tenantId, e); return false; } if (response.getStatusCode() >= 200 && response.getStatusCode() < 300) { return true; } else { saveRetryPushNotificationInQueue(tenantId, url, notification, attemptRetryNumber, "statusCode=" + response.getStatusCode()); return false; } } public void resendPushNotification(final PushNotificationKey key) throws JsonProcessingException { final NotificationJson notification = new NotificationJson(key.getEventType() != null ? key.getEventType().toString() : null, key.getAccountId(), key.getObjectType() != null ? key.getObjectType().toString() : null, key.getObjectId(), key.getMetaData()); final String body = mapper.writeValueAsString(notification); doPost(key.getTenantId(), key.getUrl(), body, notification, TIMEOUT_NOTIFICATION, key.getAttemptNumber()); } private void saveRetryPushNotificationInQueue(final UUID tenantId, final String url, final NotificationJson notificationJson, final int attemptRetryNumber, final String reason) { final PushNotificationKey key = new PushNotificationKey(tenantId, notificationJson.getAccountId(), notificationJson.getEventType(), notificationJson.getObjectType(), notificationJson.getObjectId(), attemptRetryNumber + 1, notificationJson.getMetaData(), url); final TenantContext tenantContext = contextFactory.createTenantContext(null, tenantId); final DateTime nextNotificationTime = getNextNotificationTime(key.getAttemptNumber(), internalCallContextFactory.createInternalTenantContextWithoutAccountRecordId(tenantContext)); if (nextNotificationTime == null) { log.warn("Max attempt number reached for push notification url='{}', tenantId='{}'", key.getUrl(), key.getTenantId()); return; } log.warn("Push notification {} is re-scheduled to be sent at {}, url='{}', reason='{}'", key, nextNotificationTime, key.getUrl(), reason); final Long accountRecordId = internalCallContextFactory.getRecordIdFromObject(key.getAccountId(), ObjectType.ACCOUNT, tenantContext); final Long tenantRecordId = internalCallContextFactory.getRecordIdFromObject(key.getTenantId(), ObjectType.TENANT, tenantContext); try { final NotificationQueue notificationQueue = notificationQueueService.getNotificationQueue(KILLBILL_SERVICES.SERVER_SERVICE.getServiceName(), PushNotificationRetryService.QUEUE_NAME); notificationQueue.recordFutureNotification(nextNotificationTime, key, null, MoreObjects.firstNonNull(accountRecordId, new Long(0)), tenantRecordId); } catch (final NoSuchNotificationQueue noSuchNotificationQueue) { log.error("Failed to push notification url='{}', tenantId='{}'", key.getUrl(), key.getTenantId(), noSuchNotificationQueue); } catch (final IOException e) { log.error("Failed to push notification url='{}', tenantId='{}'", key.getUrl(), key.getTenantId(), e); } } private DateTime getNextNotificationTime(final int attemptNumber, final InternalTenantContext tenantContext) { final List<TimeSpan> retries = notificationConfig.getPushNotificationsRetries(tenantContext); if (attemptNumber > retries.size()) { return null; } final TimeSpan nextDelay = retries.get(attemptNumber - 1); return clock.getUTCNow().plusMillis((int) nextDelay.getMillis()); } private List<String> getCallbacksForTenant(final TenantContext context) throws TenantApiException { return tenantApi.getTenantValuesForKey(TenantKey.PUSH_NOTIFICATION_CB.toString(), context); } }
server: improve push notifications errors handling * Increase HTTP client connect timeout from 5s to 15s * Make sure we always retry regardless of the exception Signed-off-by: Pierre-Alexandre Meyer <[email protected]>
profiles/killbill/src/main/java/org/killbill/billing/server/notifications/PushNotificationListener.java
server: improve push notifications errors handling
Java
apache-2.0
95b040c2220e065411f01664baff553414209c21
0
MTA-SZTAKI/longneck-core
package hu.sztaki.ilab.longneck.process; import hu.sztaki.ilab.longneck.Field; import hu.sztaki.ilab.longneck.Record; import hu.sztaki.ilab.longneck.process.block.*; import hu.sztaki.ilab.longneck.process.constraint.CheckResult; import hu.sztaki.ilab.longneck.process.mapping.MappedRecord; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Deque; import java.util.List; import org.apache.log4j.Logger; /** * * @author Molnár Péter <[email protected]> */ public class Kernel { private final static Logger LOG = Logger.getLogger(Kernel.class); /** Local queue for cloned records. */ private final List<Record> localCloneQueue; /** The top level sequence of blocks. */ private final Sequence topLevelSequence; /** The frame address resolver. */ private final FrameAddressResolver frameAddressResolver; public Kernel(Sequence topLevelSequence, FrameAddressResolver frameAddressResolver, List<Record> localCloneQueue) { this.topLevelSequence = topLevelSequence; this.frameAddressResolver = frameAddressResolver; this.localCloneQueue = localCloneQueue; } public KernelState newKernelState() { KernelState kernelState = new KernelState(); ExecutionFrame currentFrame = new ExecutionFrame(topLevelSequence, new ExecutionFrame()); kernelState.frameStack.addLast(currentFrame); return kernelState; } public void process(Record record) throws FailException, FilterException { // Get kernel state from record KernelState kernelState = record.getKernelState(); // Create new kernel state, if record doesn't carry one if (kernelState == null || kernelState.isAfterProcessing()) { kernelState = newKernelState(); } RecordContainer rc = new RecordContainer(record); Block currentBlock; ExecutionFrame currentFrame = null; // Iterate sequence for (;;) { try { currentFrame = kernelState.frameStack.getLast(); if (currentFrame.hostBlock.hasPosition(currentFrame.position)) { currentBlock = currentFrame.hostBlock.getBlocks().get(currentFrame.position); // If compound, go into it if (currentBlock instanceof CompoundBlock) { ExecutionFrame childFrame = new ExecutionFrame((CompoundBlock) currentBlock, currentFrame); kernelState.frameStack.addLast(childFrame); currentFrame = childFrame; // Apply block changes currentBlock.apply(rc.record, currentFrame.variables); // Startup handlers if (currentFrame.startHandler) { try { ((StartHandler) currentFrame.control).beforeChildren(kernelState, rc); } catch (RedirectException ex) { handleRedirect(kernelState, ex); } } } else if (currentBlock instanceof BlockReference) { ExecutionFrame childFrame = new ExecutionFrame( (BlockReference) currentBlock, currentFrame); kernelState.frameStack.addLast(childFrame); currentFrame = childFrame; // Startup handlers if (currentFrame.startHandler) { try { ((StartHandler) currentFrame.control).beforeChildren(kernelState, rc); } catch (RedirectException ex) { handleRedirect(kernelState, ex); } } } else { // Atomic block processing if (currentBlock instanceof CloneRecord) { // Clone the record Record clone = ((CloneRecord) currentBlock).getClonedRecord( rc.record, currentFrame.variables); // Clone the current kernel state and increase position KernelState cloneState = new KernelState(kernelState); cloneState.frameStack.getLast().position += 1; clone.setKernelState(cloneState); localCloneQueue.add(clone); } else { // Apply block changes currentBlock.apply(rc.record, currentFrame.variables); } // Success handler for atomic blocks if (currentFrame.successHandler && currentBlock instanceof Atomic) { try { ((SuccessHandler) currentFrame.control).onSuccess( kernelState, rc); } catch (RedirectException ex) { handleRedirect(kernelState, ex); } } // Increase position ++currentFrame.position; } } else { // Execute control after children if (currentFrame.endHandler) { ((EndHandler) currentFrame.control).afterChildren(kernelState, rc); } // Pop last frame kernelState.frameStack.removeLast(); // Exit, if no more frames if (kernelState.frameStack.isEmpty()) { break; } // Reassign current frame, and increase counter currentFrame = kernelState.frameStack.getLast(); ++currentFrame.position; // Success handler for compound blocks if (currentFrame.successHandler) { try { ((SuccessHandler) currentFrame.control).onSuccess(kernelState, rc); } catch (RedirectException ex) { handleRedirect(kernelState, ex); } } } } catch (CheckError ex) { LOG.debug("Check error.", ex); // Set exception as last error kernelState.lastError = ex; // Repeat until error has been handled, or no more frames ExecutionFrame errorFrame; while (kernelState.lastError != null && ! kernelState.frameStack.isEmpty()) { // Assign error frame errorFrame = kernelState.frameStack.getLast(); // Check if current control is an error handler and handle error, or pop frame if (errorFrame.errorHandler) { try { ((ErrorHandler) errorFrame.control).onError(kernelState, rc); } catch (CheckError ex2) { // Error was propagated to next level kernelState.lastError = ex2; kernelState.frameStack.removeLast(); } catch (RedirectException ex2) { handleRedirect(kernelState, ex2); break; } } else { kernelState.frameStack.removeLast(); } } if (kernelState.lastError != null) { // Add to record record.getErrors().add(kernelState.lastError.getCheckResult()); // Exit main loop break; } // Increase position kernelState.frameStack.getLast().position += 1; } catch (BreakException ex) { LOG.debug("Break.", ex); // Pop frames until block ref while (! kernelState.frameStack.isEmpty()) { ExecutionFrame breakFrame = kernelState.frameStack.getLast(); // Check frame type and is it imlement BreakHandler. if (breakFrame.hostBlock instanceof GenericBlock && breakFrame.breakHandler) { // Advance position past the last item in the blocks breakFrame.position = ((GenericBlock) breakFrame.hostBlock).getBlocks().size(); ((BreakHandler)breakFrame.control).onBreak(kernelState, rc); break; } // Pop frame and try again until the first GenericBlock is reached kernelState.frameStack.removeLast(); } } } } /** * Handles redirects including symbolic address resolving. * * @param kernelState The current kernel state. * @param ex The exception that triggered the redirection. */ private void handleRedirect(KernelState kernelState, RedirectException ex) { ExecutionFrame currentFrame = kernelState.frameStack.getLast(); FrameAddress redirectAddress = ex.getAddress(); // Resolve symbolic address if (FrameAddress.RETURN.equals(redirectAddress)) { // Set to end of compound currentFrame.position = (currentFrame.hostBlock.getBlocks() != null)?currentFrame.hostBlock.getBlocks().size():0; return; } // Replace child frame ExecutionFrame redirectFrame; if (ex.isSubframe()) { redirectFrame = new ExecutionFrame( (CompoundBlock) frameAddressResolver.get(redirectAddress), currentFrame); } else { // Inplace redirection redirectFrame = new ExecutionFrame( (CompoundBlock) frameAddressResolver.get(redirectAddress), currentFrame.parentFrame); // Remove current frame kernelState.frameStack.removeLast(); } kernelState.frameStack.addLast(redirectFrame); } /** * Kernel state. * * Contains processing context information, */ public static class KernelState { /** The execution frame stack. */ private Deque<ExecutionFrame> frameStack = new ArrayDeque<ExecutionFrame>(); /** The last error (immutable). */ private CheckError lastError; public KernelState() { } public KernelState(KernelState other) { ExecutionFrame parent = null, current; for (ExecutionFrame frame : other.frameStack) { current = new ExecutionFrame(frame, parent); this.frameStack.addLast(current); parent = current; } } /** * Handles an error by adding it to the record. * * @param record The record under processing. */ private void handleError(Record record) { record.getErrors().add(lastError.getCheckResult()); lastError = null; } /** * Clears the current error. */ private void clearError() { lastError = null; } /** * Returns, if this kernel state is after processing of a record. * * @return True, if this kernel state is at the end of processing. */ private boolean isAfterProcessing() { return frameStack.isEmpty(); } } private static class RecordContainer { private Record record; public RecordContainer() { } public RecordContainer(Record record) { this.record = record; } } /** * Execution frame. * * Maintains information about the currently executed compound block. */ private static class ExecutionFrame { /** The parent execution frame. */ private final ExecutionFrame parentFrame; /** The parent compound block. */ private final CompoundBlock hostBlock; /** The associated control object, if any. */ private final ControlStructure control; /** The current variable space. */ private final VariableSpace variables; /** Cached has control flag. */ private final boolean hasControl; /** Cached is start handler flag. */ private final boolean startHandler; /** Cached is end handler flag. */ private final boolean endHandler; /** Cached is error handler flag. */ private final boolean errorHandler; /** Cached is success handler flag. */ private final boolean successHandler; /** Cached is break handler flag. */ private final boolean breakHandler; /** The current position in the parent compound block. */ private int position; public ExecutionFrame() { position = 0; parentFrame = null; hostBlock = null; control = getControl(null); variables = null; hasControl = false; startHandler = false; endHandler = false; errorHandler = false; successHandler = false; breakHandler = false; } public ExecutionFrame(CompoundBlock block, ExecutionFrame parentFrame) { position = 0; this.parentFrame = parentFrame; variables = new VariableSpace(parentFrame.variables); hostBlock = block; control = getControl(block); hasControl = (this.control instanceof ControlStructure); startHandler = (this.hasControl && this.control instanceof StartHandler); endHandler = (this.hasControl && this.control instanceof EndHandler); errorHandler = (this.hasControl && this.control instanceof ErrorHandler); successHandler = (this.hasControl && this.control instanceof SuccessHandler); breakHandler = (this.hasControl && this.control instanceof BreakHandler); } public ExecutionFrame(BlockReference blockRef, ExecutionFrame parentFrame) { position = 0; this.parentFrame = parentFrame; variables = new VariableSpace(parentFrame.variables); hostBlock = blockRef.getReferredBlock(); control = getControl(blockRef); hasControl = (this.control instanceof ControlStructure); startHandler = (this.hasControl && this.control instanceof StartHandler); endHandler = (this.hasControl && this.control instanceof EndHandler); errorHandler = (this.hasControl && this.control instanceof ErrorHandler); successHandler = (this.hasControl && this.control instanceof SuccessHandler); breakHandler = (this.hasControl && this.control instanceof BreakHandler); } /** * Copy constructor. * * @param other The frame to copy. * @param parent The parent frame that has already been copied, since it's final. */ public ExecutionFrame(ExecutionFrame other, ExecutionFrame parent) { position = other.position; parentFrame = parent; if (parent != null) { variables = new VariableSpace(other.variables, parent.variables); } else { variables = null; } hostBlock = other.hostBlock; control = other.control == null ? null : other.control.clone(); hasControl = other.hasControl; startHandler = other.startHandler; endHandler = other.endHandler; errorHandler = other.errorHandler; successHandler = other.successHandler; breakHandler = other.breakHandler; } public static ControlStructure getControl(Block block) { if (block instanceof If) { return new IfControl((If) block); } else if (block instanceof Switch) { return new SwitchControl(); } else if (block instanceof SwitchStrict) { return new SwitchStrictControl((SwitchStrict) block); } else if (block instanceof TryAll) { return new TryAllControl(); } else if (block instanceof BlockReference) { return new BlockReferenceControl((BlockReference) block); } return null; } } private interface ControlStructure extends Cloneable { public ControlStructure clone(); } private interface StartHandler extends ControlStructure { public void beforeChildren(KernelState kernelState, RecordContainer rc) throws CheckError, RedirectException; } private interface EndHandler extends ControlStructure { public void afterChildren(KernelState kernelState, RecordContainer rc) throws CheckError; } private interface ErrorHandler extends ControlStructure { public void onError(KernelState kernelState, RecordContainer rc) throws CheckError, RedirectException; } private interface SuccessHandler extends ControlStructure { public void onSuccess(KernelState kernelState, RecordContainer rc) throws CheckError, RedirectException; } private interface BreakHandler extends ControlStructure { public void onBreak(KernelState kernelState, RecordContainer rc); } private static class IfControl implements StartHandler, SuccessHandler { /** The If block that is controlled. */ private final If ifObj; public IfControl(If ifObj) { this.ifObj = ifObj; } @Override public void beforeChildren(KernelState kernelState, RecordContainer rc) throws RedirectException { CheckResult result = ifObj.getCondition().check( rc.record, kernelState.frameStack.getLast().variables); if (result.isPassed() && ifObj.getThenBranch() != null) { throw new RedirectException(ifObj.getThenBranch().getFrameAddress(), false); } else if (! result.isPassed() && ifObj.getElseBranch() != null) { throw new RedirectException(ifObj.getElseBranch().getFrameAddress(), false); } else { throw new RedirectException(FrameAddress.RETURN, false); } } @Override public void onSuccess(KernelState kernelState, RecordContainer rc) throws RedirectException { throw new RedirectException(FrameAddress.RETURN, false); } @Override public IfControl clone() { return this; } } private static class SwitchStrictControl implements StartHandler, SuccessHandler, ErrorHandler { /** The switch object under execution. */ private final SwitchStrict switchObj; /** List of errors that occured during execution. */ private List<CheckResult> errors = new ArrayList<CheckResult>(); /** Last executed case. */ private int lastCase = 0; public SwitchStrictControl(SwitchStrict switchObj) { this.switchObj = switchObj; } @Override public void beforeChildren(KernelState kernelState, RecordContainer rc) { rc.record.saveState(); } @Override public void onSuccess(KernelState kernelState, RecordContainer rc) throws RedirectException { // Clean exit rc.record.removeState(); // Jump to next frame throw new RedirectException(FrameAddress.RETURN); } @Override public void onError(KernelState kernelState, RecordContainer rc) throws CheckError { // Roll back changes rc.record.restoreState(); rc.record.saveState(); // Add errors to error state errors.add(kernelState.lastError.getCheckResult()); kernelState.clearError(); // Increase case counter ++lastCase; if (lastCase >= switchObj.getCases().size()) { throw new CheckError( new CheckResult(switchObj, false, null, null, "All cases failed.", errors)); } } @Override public SwitchStrictControl clone() { try { SwitchStrictControl copy = (SwitchStrictControl) super.clone(); // Copy errors copy.errors = new ArrayList<CheckResult>(); for (CheckResult cr : errors) { copy.errors.add(cr); } return copy; } catch (CloneNotSupportedException ex) { throw new AssertionError(ex); } } } private static class SwitchControl implements StartHandler, SuccessHandler, ErrorHandler { /** Last executed case. */ private int lastCase = 0; public SwitchControl() { } @Override public void beforeChildren(KernelState kernelState, RecordContainer rc) { rc.record.saveState(); } @Override public void onSuccess(KernelState kernelState, RecordContainer rc) throws RedirectException { // Clean exit rc.record.removeState(); // Jump to next frame throw new RedirectException(FrameAddress.RETURN); } @Override public void onError(KernelState kernelState, RecordContainer rc) throws CheckError { // Roll back changes rc.record.restoreState(); rc.record.saveState(); // Clear error kernelState.clearError(); ++lastCase; } @Override public SwitchControl clone() { try { return (SwitchControl) super.clone(); } catch (CloneNotSupportedException ex) { throw new AssertionError(ex); } } } private static class TryAllControl implements StartHandler, SuccessHandler, ErrorHandler { /** Last executed case. */ private int lastCase = 0; public TryAllControl() { } @Override public void beforeChildren(KernelState kernelState, RecordContainer rc) { rc.record.saveState(); } @Override public void onSuccess(KernelState kernelState, RecordContainer rc) { rc.record.removeState(); rc.record.saveState(); } @Override public void onError(KernelState kernelState, RecordContainer rc) { // Roll back changes rc.record.restoreState(); rc.record.saveState(); // Clear error kernelState.clearError(); ++lastCase; } @Override public TryAllControl clone() { try { return (TryAllControl) super.clone(); } catch (CloneNotSupportedException ex) { throw new AssertionError(ex); } } } private static class BlockReferenceControl implements StartHandler, ErrorHandler, EndHandler, BreakHandler { private final BlockReference blockRef; private boolean wasError; private boolean breaked; public BlockReferenceControl(BlockReference blockRef) { this.blockRef = blockRef; } @Override public void beforeChildren(KernelState kernelState, RecordContainer rc) throws RedirectException, CheckError { if (blockRef.getMapping().hasRules()) { rc.record = new MappedRecord(rc.record, blockRef.getMapping()); } GenericBlock referredBlock = blockRef.getReferredBlock(); if (referredBlock.getInputConstraints() != null) { referredBlock.getInputConstraints().apply(rc.record, kernelState.frameStack.getLast().variables); } } @Override public void onError(KernelState kernelState, RecordContainer rc) throws CheckError, RedirectException { if (blockRef.isPropagateFailure()) { throw kernelState.lastError; } kernelState.handleError(rc.record); wasError = true; throw new RedirectException(FrameAddress.RETURN); } @Override public void afterChildren(KernelState kernelState, RecordContainer rc) throws CheckError { GenericBlock referredBlock = blockRef.getReferredBlock(); if (referredBlock.getOutputConstraints() != null && !wasError && !breaked) { referredBlock.getOutputConstraints().apply(rc.record, kernelState.frameStack.getLast().variables); } if (blockRef.getMapping().hasRules()) { rc.record = ((MappedRecord) rc.record).getParent(); } } @Override public BlockReferenceControl clone() { return this; } @Override public void onBreak(KernelState kernelState, RecordContainer rc) { breaked = true; } } private static class CachingControl implements StartHandler, EndHandler { /** The logger instance. */ private final Logger LOG = Logger.getLogger(Kernel.CachingControl.class); /** The caching instance from the project. */ private final Caching caching; /** True, if the currently processed record was a hit. */ private boolean hit = false; public CachingControl(Caching caching) { this.caching = caching; } @Override public void beforeChildren(KernelState kernelState, RecordContainer rc) throws RedirectException { VariableSpace variables = kernelState.frameStack.getLast().variables; String cacheKey = BlockUtils.getValue(caching.getApplyTo().get(0), rc.record, variables); List<Field> cacheValue = caching.getCacheElement(cacheKey); //cache hit if (cacheValue != null) { for (Field f : cacheValue) { rc.record.add(f); } hit = true; LOG.debug("Cache hit."); throw new RedirectException(FrameAddress.RETURN); } LOG.debug("Cache miss."); } @Override public void afterChildren(KernelState kernelState, RecordContainer rc) throws CheckError { if (hit == false) { VariableSpace variables = kernelState.frameStack.getLast().variables; String cacheKey = BlockUtils.getValue(caching.getApplyTo().get(0), rc.record, variables); List<Field> cacheValue = new ArrayList<Field>(caching.getOutputFields().size()); for (String s : caching.getOutputFields()) { Field f = rc.record.get(s); if (f == null) { f = new Field(s); } cacheValue.add(f); } caching.putCacheElement(cacheKey, cacheValue); } } @Override public CachingControl clone() { try { return (CachingControl) super.clone(); } catch (CloneNotSupportedException ex) { throw new AssertionError(ex); } } } }
src/main/java/hu/sztaki/ilab/longneck/process/Kernel.java
package hu.sztaki.ilab.longneck.process; import hu.sztaki.ilab.longneck.Field; import hu.sztaki.ilab.longneck.Record; import hu.sztaki.ilab.longneck.process.block.*; import hu.sztaki.ilab.longneck.process.constraint.CheckResult; import hu.sztaki.ilab.longneck.process.mapping.MappedRecord; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Deque; import java.util.List; import org.apache.log4j.Logger; /** * * @author Molnár Péter <[email protected]> */ public class Kernel { private final static Logger LOG = Logger.getLogger(Kernel.class); /** Local queue for cloned records. */ private final List<Record> localCloneQueue; /** The top level sequence of blocks. */ private final Sequence topLevelSequence; /** The frame address resolver. */ private final FrameAddressResolver frameAddressResolver; public Kernel(Sequence topLevelSequence, FrameAddressResolver frameAddressResolver, List<Record> localCloneQueue) { this.topLevelSequence = topLevelSequence; this.frameAddressResolver = frameAddressResolver; this.localCloneQueue = localCloneQueue; } public KernelState newKernelState() { KernelState kernelState = new KernelState(); ExecutionFrame currentFrame = new ExecutionFrame(topLevelSequence, new ExecutionFrame()); kernelState.frameStack.addLast(currentFrame); return kernelState; } public void process(Record record) throws FailException, FilterException { // Get kernel state from record KernelState kernelState = record.getKernelState(); // Create new kernel state, if record doesn't carry one if (kernelState == null || kernelState.isAfterProcessing()) { kernelState = new KernelState(); ExecutionFrame currentFrame = new ExecutionFrame(topLevelSequence, new ExecutionFrame()); kernelState.frameStack.addLast(currentFrame); } RecordContainer rc = new RecordContainer(record); Block currentBlock; ExecutionFrame currentFrame = null; // Iterate sequence for (;;) { try { currentFrame = kernelState.frameStack.getLast(); if (currentFrame.hostBlock.hasPosition(currentFrame.position)) { currentBlock = currentFrame.hostBlock.getBlocks().get(currentFrame.position); // If compound, go into it if (currentBlock instanceof CompoundBlock) { ExecutionFrame childFrame = new ExecutionFrame((CompoundBlock) currentBlock, currentFrame); kernelState.frameStack.addLast(childFrame); currentFrame = childFrame; // Apply block changes currentBlock.apply(rc.record, currentFrame.variables); // Startup handlers if (currentFrame.startHandler) { try { ((StartHandler) currentFrame.control).beforeChildren(kernelState, rc); } catch (RedirectException ex) { handleRedirect(kernelState, ex); } } } else if (currentBlock instanceof BlockReference) { ExecutionFrame childFrame = new ExecutionFrame( (BlockReference) currentBlock, currentFrame); kernelState.frameStack.addLast(childFrame); currentFrame = childFrame; // Startup handlers if (currentFrame.startHandler) { try { ((StartHandler) currentFrame.control).beforeChildren(kernelState, rc); } catch (RedirectException ex) { handleRedirect(kernelState, ex); } } } else { // Atomic block processing if (currentBlock instanceof CloneRecord) { // Clone the record Record clone = ((CloneRecord) currentBlock).getClonedRecord( rc.record, currentFrame.variables); // Clone the current kernel state and increase position KernelState cloneState = new KernelState(kernelState); cloneState.frameStack.getLast().position += 1; clone.setKernelState(cloneState); localCloneQueue.add(clone); } else { // Apply block changes currentBlock.apply(rc.record, currentFrame.variables); } // Success handler for atomic blocks if (currentFrame.successHandler && currentBlock instanceof Atomic) { try { ((SuccessHandler) currentFrame.control).onSuccess( kernelState, rc); } catch (RedirectException ex) { handleRedirect(kernelState, ex); } } // Increase position ++currentFrame.position; } } else { // Execute control after children if (currentFrame.endHandler) { ((EndHandler) currentFrame.control).afterChildren(kernelState, rc); } // Pop last frame kernelState.frameStack.removeLast(); // Exit, if no more frames if (kernelState.frameStack.isEmpty()) { break; } // Reassign current frame, and increase counter currentFrame = kernelState.frameStack.getLast(); ++currentFrame.position; // Success handler for compound blocks if (currentFrame.successHandler) { try { ((SuccessHandler) currentFrame.control).onSuccess(kernelState, rc); } catch (RedirectException ex) { handleRedirect(kernelState, ex); } } } } catch (CheckError ex) { LOG.debug("Check error.", ex); // Set exception as last error kernelState.lastError = ex; // Repeat until error has been handled, or no more frames ExecutionFrame errorFrame; while (kernelState.lastError != null && ! kernelState.frameStack.isEmpty()) { // Assign error frame errorFrame = kernelState.frameStack.getLast(); // Check if current control is an error handler and handle error, or pop frame if (errorFrame.errorHandler) { try { ((ErrorHandler) errorFrame.control).onError(kernelState, rc); } catch (CheckError ex2) { // Error was propagated to next level kernelState.lastError = ex2; kernelState.frameStack.removeLast(); } catch (RedirectException ex2) { handleRedirect(kernelState, ex2); break; } } else { kernelState.frameStack.removeLast(); } } if (kernelState.lastError != null) { // Add to record record.getErrors().add(kernelState.lastError.getCheckResult()); // Exit main loop break; } // Increase position kernelState.frameStack.getLast().position += 1; } catch (BreakException ex) { LOG.debug("Break.", ex); // Pop frames until block ref while (! kernelState.frameStack.isEmpty()) { ExecutionFrame breakFrame = kernelState.frameStack.getLast(); // Check frame type and is it imlement BreakHandler. if (breakFrame.hostBlock instanceof GenericBlock && breakFrame.breakHandler) { // Advance position past the last item in the blocks breakFrame.position = ((GenericBlock) breakFrame.hostBlock).getBlocks().size(); ((BreakHandler)breakFrame.control).onBreak(kernelState, rc); break; } // Pop frame and try again until the first GenericBlock is reached kernelState.frameStack.removeLast(); } } } } /** * Handles redirects including symbolic address resolving. * * @param kernelState The current kernel state. * @param ex The exception that triggered the redirection. */ private void handleRedirect(KernelState kernelState, RedirectException ex) { ExecutionFrame currentFrame = kernelState.frameStack.getLast(); FrameAddress redirectAddress = ex.getAddress(); // Resolve symbolic address if (FrameAddress.RETURN.equals(redirectAddress)) { // Set to end of compound currentFrame.position = (currentFrame.hostBlock.getBlocks() != null)?currentFrame.hostBlock.getBlocks().size():0; return; } // Replace child frame ExecutionFrame redirectFrame; if (ex.isSubframe()) { redirectFrame = new ExecutionFrame( (CompoundBlock) frameAddressResolver.get(redirectAddress), currentFrame); } else { // Inplace redirection redirectFrame = new ExecutionFrame( (CompoundBlock) frameAddressResolver.get(redirectAddress), currentFrame.parentFrame); // Remove current frame kernelState.frameStack.removeLast(); } kernelState.frameStack.addLast(redirectFrame); } /** * Kernel state. * * Contains processing context information, */ public static class KernelState { /** The execution frame stack. */ private Deque<ExecutionFrame> frameStack = new ArrayDeque<ExecutionFrame>(); /** The last error (immutable). */ private CheckError lastError; public KernelState() { } public KernelState(KernelState other) { ExecutionFrame parent = null, current; for (ExecutionFrame frame : other.frameStack) { current = new ExecutionFrame(frame, parent); this.frameStack.addLast(current); parent = current; } } /** * Handles an error by adding it to the record. * * @param record The record under processing. */ private void handleError(Record record) { record.getErrors().add(lastError.getCheckResult()); lastError = null; } /** * Clears the current error. */ private void clearError() { lastError = null; } /** * Returns, if this kernel state is after processing of a record. * * @return True, if this kernel state is at the end of processing. */ private boolean isAfterProcessing() { return frameStack.isEmpty(); } } private static class RecordContainer { private Record record; public RecordContainer() { } public RecordContainer(Record record) { this.record = record; } } /** * Execution frame. * * Maintains information about the currently executed compound block. */ private static class ExecutionFrame { /** The parent execution frame. */ private final ExecutionFrame parentFrame; /** The parent compound block. */ private final CompoundBlock hostBlock; /** The associated control object, if any. */ private final ControlStructure control; /** The current variable space. */ private final VariableSpace variables; /** Cached has control flag. */ private final boolean hasControl; /** Cached is start handler flag. */ private final boolean startHandler; /** Cached is end handler flag. */ private final boolean endHandler; /** Cached is error handler flag. */ private final boolean errorHandler; /** Cached is success handler flag. */ private final boolean successHandler; /** Cached is break handler flag. */ private final boolean breakHandler; /** The current position in the parent compound block. */ private int position; public ExecutionFrame() { position = 0; parentFrame = null; hostBlock = null; control = getControl(null); variables = null; hasControl = false; startHandler = false; endHandler = false; errorHandler = false; successHandler = false; breakHandler = false; } public ExecutionFrame(CompoundBlock block, ExecutionFrame parentFrame) { position = 0; this.parentFrame = parentFrame; variables = new VariableSpace(parentFrame.variables); hostBlock = block; control = getControl(block); hasControl = (this.control instanceof ControlStructure); startHandler = (this.hasControl && this.control instanceof StartHandler); endHandler = (this.hasControl && this.control instanceof EndHandler); errorHandler = (this.hasControl && this.control instanceof ErrorHandler); successHandler = (this.hasControl && this.control instanceof SuccessHandler); breakHandler = (this.hasControl && this.control instanceof BreakHandler); } public ExecutionFrame(BlockReference blockRef, ExecutionFrame parentFrame) { position = 0; this.parentFrame = parentFrame; variables = new VariableSpace(parentFrame.variables); hostBlock = blockRef.getReferredBlock(); control = getControl(blockRef); hasControl = (this.control instanceof ControlStructure); startHandler = (this.hasControl && this.control instanceof StartHandler); endHandler = (this.hasControl && this.control instanceof EndHandler); errorHandler = (this.hasControl && this.control instanceof ErrorHandler); successHandler = (this.hasControl && this.control instanceof SuccessHandler); breakHandler = (this.hasControl && this.control instanceof BreakHandler); } /** * Copy constructor. * * @param other The frame to copy. * @param parent The parent frame that has already been copied, since it's final. */ public ExecutionFrame(ExecutionFrame other, ExecutionFrame parent) { position = other.position; parentFrame = parent; if (parent != null) { variables = new VariableSpace(other.variables, parent.variables); } else { variables = null; } hostBlock = other.hostBlock; control = other.control == null ? null : other.control.clone(); hasControl = other.hasControl; startHandler = other.startHandler; endHandler = other.endHandler; errorHandler = other.errorHandler; successHandler = other.successHandler; breakHandler = other.breakHandler; } public static ControlStructure getControl(Block block) { if (block instanceof If) { return new IfControl((If) block); } else if (block instanceof Switch) { return new SwitchControl(); } else if (block instanceof SwitchStrict) { return new SwitchStrictControl((SwitchStrict) block); } else if (block instanceof TryAll) { return new TryAllControl(); } else if (block instanceof BlockReference) { return new BlockReferenceControl((BlockReference) block); } return null; } } private interface ControlStructure extends Cloneable { public ControlStructure clone(); } private interface StartHandler extends ControlStructure { public void beforeChildren(KernelState kernelState, RecordContainer rc) throws CheckError, RedirectException; } private interface EndHandler extends ControlStructure { public void afterChildren(KernelState kernelState, RecordContainer rc) throws CheckError; } private interface ErrorHandler extends ControlStructure { public void onError(KernelState kernelState, RecordContainer rc) throws CheckError, RedirectException; } private interface SuccessHandler extends ControlStructure { public void onSuccess(KernelState kernelState, RecordContainer rc) throws CheckError, RedirectException; } private interface BreakHandler extends ControlStructure { public void onBreak(KernelState kernelState, RecordContainer rc); } private static class IfControl implements StartHandler, SuccessHandler { /** The If block that is controlled. */ private final If ifObj; public IfControl(If ifObj) { this.ifObj = ifObj; } @Override public void beforeChildren(KernelState kernelState, RecordContainer rc) throws RedirectException { CheckResult result = ifObj.getCondition().check( rc.record, kernelState.frameStack.getLast().variables); if (result.isPassed() && ifObj.getThenBranch() != null) { throw new RedirectException(ifObj.getThenBranch().getFrameAddress(), false); } else if (! result.isPassed() && ifObj.getElseBranch() != null) { throw new RedirectException(ifObj.getElseBranch().getFrameAddress(), false); } else { throw new RedirectException(FrameAddress.RETURN, false); } } @Override public void onSuccess(KernelState kernelState, RecordContainer rc) throws RedirectException { throw new RedirectException(FrameAddress.RETURN, false); } @Override public IfControl clone() { return this; } } private static class SwitchStrictControl implements StartHandler, SuccessHandler, ErrorHandler { /** The switch object under execution. */ private final SwitchStrict switchObj; /** List of errors that occured during execution. */ private List<CheckResult> errors = new ArrayList<CheckResult>(); /** Last executed case. */ private int lastCase = 0; public SwitchStrictControl(SwitchStrict switchObj) { this.switchObj = switchObj; } @Override public void beforeChildren(KernelState kernelState, RecordContainer rc) { rc.record.saveState(); } @Override public void onSuccess(KernelState kernelState, RecordContainer rc) throws RedirectException { // Clean exit rc.record.removeState(); // Jump to next frame throw new RedirectException(FrameAddress.RETURN); } @Override public void onError(KernelState kernelState, RecordContainer rc) throws CheckError { // Roll back changes rc.record.restoreState(); rc.record.saveState(); // Add errors to error state errors.add(kernelState.lastError.getCheckResult()); kernelState.clearError(); // Increase case counter ++lastCase; if (lastCase >= switchObj.getCases().size()) { throw new CheckError( new CheckResult(switchObj, false, null, null, "All cases failed.", errors)); } } @Override public SwitchStrictControl clone() { try { SwitchStrictControl copy = (SwitchStrictControl) super.clone(); // Copy errors copy.errors = new ArrayList<CheckResult>(); for (CheckResult cr : errors) { copy.errors.add(cr); } return copy; } catch (CloneNotSupportedException ex) { throw new AssertionError(ex); } } } private static class SwitchControl implements StartHandler, SuccessHandler, ErrorHandler { /** Last executed case. */ private int lastCase = 0; public SwitchControl() { } @Override public void beforeChildren(KernelState kernelState, RecordContainer rc) { rc.record.saveState(); } @Override public void onSuccess(KernelState kernelState, RecordContainer rc) throws RedirectException { // Clean exit rc.record.removeState(); // Jump to next frame throw new RedirectException(FrameAddress.RETURN); } @Override public void onError(KernelState kernelState, RecordContainer rc) throws CheckError { // Roll back changes rc.record.restoreState(); rc.record.saveState(); // Clear error kernelState.clearError(); ++lastCase; } @Override public SwitchControl clone() { try { return (SwitchControl) super.clone(); } catch (CloneNotSupportedException ex) { throw new AssertionError(ex); } } } private static class TryAllControl implements StartHandler, SuccessHandler, ErrorHandler { /** Last executed case. */ private int lastCase = 0; public TryAllControl() { } @Override public void beforeChildren(KernelState kernelState, RecordContainer rc) { rc.record.saveState(); } @Override public void onSuccess(KernelState kernelState, RecordContainer rc) { rc.record.removeState(); rc.record.saveState(); } @Override public void onError(KernelState kernelState, RecordContainer rc) { // Roll back changes rc.record.restoreState(); rc.record.saveState(); // Clear error kernelState.clearError(); ++lastCase; } @Override public TryAllControl clone() { try { return (TryAllControl) super.clone(); } catch (CloneNotSupportedException ex) { throw new AssertionError(ex); } } } private static class BlockReferenceControl implements StartHandler, ErrorHandler, EndHandler, BreakHandler { private final BlockReference blockRef; private boolean wasErrror; private boolean breaked; public BlockReferenceControl(BlockReference blockRef) { this.blockRef = blockRef; } @Override public void beforeChildren(KernelState kernelState, RecordContainer rc) throws RedirectException, CheckError { if (blockRef.getMapping().hasRules()) { rc.record = new MappedRecord(rc.record, blockRef.getMapping()); } GenericBlock referredBlock = blockRef.getReferredBlock(); if (referredBlock.getInputConstraints() != null) { referredBlock.getInputConstraints().apply(rc.record, kernelState.frameStack.getLast().variables); } } @Override public void onError(KernelState kernelState, RecordContainer rc) throws CheckError, RedirectException { if (blockRef.isPropagateFailure()) { throw kernelState.lastError; } kernelState.handleError(rc.record); wasErrror = true; throw new RedirectException(FrameAddress.RETURN); } @Override public void afterChildren(KernelState kernelState, RecordContainer rc) throws CheckError { GenericBlock referredBlock = blockRef.getReferredBlock(); if (referredBlock.getOutputConstraints() != null && !wasErrror && !breaked) { referredBlock.getOutputConstraints().apply(rc.record, kernelState.frameStack.getLast().variables); } if (blockRef.getMapping().hasRules()) { rc.record = ((MappedRecord) rc.record).getParent(); } } @Override public BlockReferenceControl clone() { return this; } @Override public void onBreak(KernelState kernelState, RecordContainer rc) { breaked = true; } } private static class CachingControl implements StartHandler, EndHandler { /** The logger instance. */ private final Logger LOG = Logger.getLogger(Kernel.CachingControl.class); /** The caching instance from the project. */ private final Caching caching; /** True, if the currently processed record was a hit. */ private boolean hit = false; public CachingControl(Caching caching) { this.caching = caching; } @Override public void beforeChildren(KernelState kernelState, RecordContainer rc) throws RedirectException { VariableSpace variables = kernelState.frameStack.getLast().variables; String cacheKey = BlockUtils.getValue(caching.getApplyTo().get(0), rc.record, variables); List<Field> cacheValue = caching.getCacheElement(cacheKey); //cache hit if (cacheValue != null) { for (Field f : cacheValue) { rc.record.add(f); } hit = true; LOG.debug("Cache hit."); throw new RedirectException(FrameAddress.RETURN); } LOG.debug("Cache miss."); } @Override public void afterChildren(KernelState kernelState, RecordContainer rc) throws CheckError { if (hit == false) { VariableSpace variables = kernelState.frameStack.getLast().variables; String cacheKey = BlockUtils.getValue(caching.getApplyTo().get(0), rc.record, variables); List<Field> cacheValue = new ArrayList<Field>(caching.getOutputFields().size()); for (String s : caching.getOutputFields()) { Field f = rc.record.get(s); if (f == null) { f = new Field(s); } cacheValue.add(f); } caching.putCacheElement(cacheKey, cacheValue); } } @Override public CachingControl clone() { try { return (CachingControl) super.clone(); } catch (CloneNotSupportedException ex) { throw new AssertionError(ex); } } } }
Some little typo fix.
src/main/java/hu/sztaki/ilab/longneck/process/Kernel.java
Some little typo fix.
Java
apache-2.0
e4a1cbe1107cb68f75876a314f01c123fbb9c9a1
0
malinthaprasan/carbon-apimgt,harsha89/carbon-apimgt,bhathiya/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,isharac/carbon-apimgt,tharikaGitHub/carbon-apimgt,tharindu1st/carbon-apimgt,prasa7/carbon-apimgt,tharindu1st/carbon-apimgt,nuwand/carbon-apimgt,wso2/carbon-apimgt,uvindra/carbon-apimgt,Rajith90/carbon-apimgt,malinthaprasan/carbon-apimgt,fazlan-nazeem/carbon-apimgt,harsha89/carbon-apimgt,wso2/carbon-apimgt,Rajith90/carbon-apimgt,uvindra/carbon-apimgt,chamilaadhi/carbon-apimgt,chamilaadhi/carbon-apimgt,fazlan-nazeem/carbon-apimgt,tharikaGitHub/carbon-apimgt,ruks/carbon-apimgt,bhathiya/carbon-apimgt,chamindias/carbon-apimgt,fazlan-nazeem/carbon-apimgt,bhathiya/carbon-apimgt,tharikaGitHub/carbon-apimgt,nuwand/carbon-apimgt,pubudu538/carbon-apimgt,nuwand/carbon-apimgt,malinthaprasan/carbon-apimgt,wso2/carbon-apimgt,nuwand/carbon-apimgt,jaadds/carbon-apimgt,chamilaadhi/carbon-apimgt,pubudu538/carbon-apimgt,uvindra/carbon-apimgt,pubudu538/carbon-apimgt,isharac/carbon-apimgt,uvindra/carbon-apimgt,jaadds/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,jaadds/carbon-apimgt,Rajith90/carbon-apimgt,Rajith90/carbon-apimgt,chamilaadhi/carbon-apimgt,tharikaGitHub/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,praminda/carbon-apimgt,pubudu538/carbon-apimgt,chamindias/carbon-apimgt,prasa7/carbon-apimgt,malinthaprasan/carbon-apimgt,ruks/carbon-apimgt,tharindu1st/carbon-apimgt,praminda/carbon-apimgt,harsha89/carbon-apimgt,chamindias/carbon-apimgt,ruks/carbon-apimgt,tharindu1st/carbon-apimgt,prasa7/carbon-apimgt,ruks/carbon-apimgt,fazlan-nazeem/carbon-apimgt,bhathiya/carbon-apimgt,harsha89/carbon-apimgt,isharac/carbon-apimgt,chamindias/carbon-apimgt,wso2/carbon-apimgt,jaadds/carbon-apimgt,praminda/carbon-apimgt,isharac/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,prasa7/carbon-apimgt
/* * Copyright (c) 2005-2011, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.apimgt.impl.utils; import com.google.gson.Gson; import org.apache.axiom.om.OMElement; import org.apache.axiom.om.impl.builder.StAXOMBuilder; import org.apache.axiom.om.util.AXIOMUtil; import org.apache.axis2.AxisFault; import org.apache.axis2.Constants; import org.apache.axis2.client.Options; import org.apache.axis2.client.ServiceClient; import org.apache.axis2.context.ConfigurationContext; import org.apache.axis2.engine.AxisConfiguration; import org.apache.axis2.transport.http.HTTPConstants; import org.apache.axis2.util.JavaUtils; import org.apache.commons.codec.binary.Base64; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.http.HttpHeaders; import org.apache.http.client.HttpClient; import org.apache.http.conn.scheme.PlainSocketFactory; import org.apache.http.conn.scheme.Scheme; import org.apache.http.conn.scheme.SchemeRegistry; import org.apache.http.conn.ssl.SSLSocketFactory; import org.apache.http.conn.ssl.X509HostnameVerifier; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.impl.conn.tsccm.ThreadSafeClientConnManager; import org.apache.http.params.BasicHttpParams; import org.apache.http.params.HttpParams; import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocumentList; import org.apache.xerces.util.SecurityManager; import org.json.simple.JSONArray; import org.json.simple.JSONObject; import org.json.simple.parser.JSONParser; import org.json.simple.parser.ParseException; import org.w3c.dom.Document; import org.wso2.carbon.CarbonConstants; import org.wso2.carbon.apimgt.api.APIManagementException; import org.wso2.carbon.apimgt.api.doc.model.APIDefinition; import org.wso2.carbon.apimgt.api.doc.model.APIResource; import org.wso2.carbon.apimgt.api.doc.model.Operation; import org.wso2.carbon.apimgt.api.doc.model.Parameter; import org.wso2.carbon.apimgt.api.model.API; import org.wso2.carbon.apimgt.api.model.APIIdentifier; import org.wso2.carbon.apimgt.api.model.APIPublisher; import org.wso2.carbon.apimgt.api.model.APIStatus; import org.wso2.carbon.apimgt.api.model.APIStore; import org.wso2.carbon.apimgt.api.model.Application; import org.wso2.carbon.apimgt.api.model.CORSConfiguration; import org.wso2.carbon.apimgt.api.model.Documentation; import org.wso2.carbon.apimgt.api.model.DocumentationType; import org.wso2.carbon.apimgt.api.model.KeyManagerConfiguration; import org.wso2.carbon.apimgt.api.model.Provider; import org.wso2.carbon.apimgt.api.model.Scope; import org.wso2.carbon.apimgt.api.model.Tier; import org.wso2.carbon.apimgt.api.model.URITemplate; import org.wso2.carbon.apimgt.api.model.policy.APIPolicy; import org.wso2.carbon.apimgt.api.model.policy.ApplicationPolicy; import org.wso2.carbon.apimgt.api.model.policy.BandwidthLimit; import org.wso2.carbon.apimgt.api.model.policy.Limit; import org.wso2.carbon.apimgt.api.model.policy.Policy; import org.wso2.carbon.apimgt.api.model.policy.PolicyConstants; import org.wso2.carbon.apimgt.api.model.policy.QuotaPolicy; import org.wso2.carbon.apimgt.api.model.policy.RequestCountLimit; import org.wso2.carbon.apimgt.api.model.policy.SubscriptionPolicy; import org.wso2.carbon.apimgt.impl.APIConstants; import org.wso2.carbon.apimgt.impl.APIMRegistryServiceImpl; import org.wso2.carbon.apimgt.impl.APIManagerAnalyticsConfiguration; import org.wso2.carbon.apimgt.impl.APIManagerConfiguration; import org.wso2.carbon.apimgt.impl.ThrottlePolicyDeploymentManager; import org.wso2.carbon.apimgt.impl.clients.ApplicationManagementServiceClient; import org.wso2.carbon.apimgt.impl.clients.OAuthAdminClient; import org.wso2.carbon.apimgt.impl.clients.UserInformationRecoveryClient; import org.wso2.carbon.apimgt.impl.dao.ApiMgtDAO; import org.wso2.carbon.apimgt.impl.dto.APIKeyValidationInfoDTO; import org.wso2.carbon.apimgt.impl.dto.Environment; import org.wso2.carbon.apimgt.impl.dto.ThrottleProperties; import org.wso2.carbon.apimgt.impl.factory.KeyManagerHolder; import org.wso2.carbon.apimgt.impl.internal.APIManagerComponent; import org.wso2.carbon.apimgt.impl.internal.ServiceReferenceHolder; import org.wso2.carbon.apimgt.impl.template.APITemplateException; import org.wso2.carbon.apimgt.impl.template.ThrottlePolicyTemplateBuilder; import org.wso2.carbon.apimgt.keymgt.client.SubscriberKeyMgtClient; import org.wso2.carbon.base.MultitenantConstants; import org.wso2.carbon.base.ServerConfiguration; import org.wso2.carbon.context.CarbonContext; import org.wso2.carbon.context.PrivilegedCarbonContext; import org.wso2.carbon.core.commons.stub.loggeduserinfo.ExceptionException; import org.wso2.carbon.core.commons.stub.loggeduserinfo.LoggedUserInfo; import org.wso2.carbon.core.commons.stub.loggeduserinfo.LoggedUserInfoAdminStub; import org.wso2.carbon.core.multitenancy.utils.TenantAxisUtils; import org.wso2.carbon.core.util.CryptoException; import org.wso2.carbon.core.util.CryptoUtil; import org.wso2.carbon.core.util.PermissionUpdateUtil; import org.wso2.carbon.governance.api.common.dataobjects.GovernanceArtifact; import org.wso2.carbon.governance.api.endpoints.EndpointManager; import org.wso2.carbon.governance.api.endpoints.dataobjects.Endpoint; import org.wso2.carbon.governance.api.exception.GovernanceException; import org.wso2.carbon.governance.api.generic.GenericArtifactManager; import org.wso2.carbon.governance.api.generic.dataobjects.GenericArtifact; import org.wso2.carbon.governance.api.util.GovernanceConstants; import org.wso2.carbon.governance.api.util.GovernanceUtils; import org.wso2.carbon.governance.lcm.util.CommonUtil; import org.wso2.carbon.identity.oauth.config.OAuthServerConfiguration; import org.wso2.carbon.identity.user.profile.stub.UserProfileMgtServiceStub; import org.wso2.carbon.identity.user.profile.stub.UserProfileMgtServiceUserProfileExceptionException; import org.wso2.carbon.identity.user.profile.stub.types.UserProfileDTO; import org.wso2.carbon.registry.core.ActionConstants; import org.wso2.carbon.registry.core.Association; import org.wso2.carbon.registry.core.Registry; import org.wso2.carbon.registry.core.RegistryConstants; import org.wso2.carbon.registry.core.Resource; import org.wso2.carbon.registry.core.Tag; import org.wso2.carbon.registry.core.config.Mount; import org.wso2.carbon.registry.core.config.RegistryContext; import org.wso2.carbon.registry.core.exceptions.RegistryException; import org.wso2.carbon.registry.core.jdbc.realm.RegistryAuthorizationManager; import org.wso2.carbon.registry.core.pagination.PaginationContext; import org.wso2.carbon.registry.core.service.RegistryService; import org.wso2.carbon.registry.core.service.TenantRegistryLoader; import org.wso2.carbon.registry.core.session.UserRegistry; import org.wso2.carbon.registry.core.utils.RegistryUtils; import org.wso2.carbon.registry.indexing.indexer.IndexerException; import org.wso2.carbon.registry.indexing.solr.SolrClient; import org.wso2.carbon.user.api.Permission; import org.wso2.carbon.user.api.RealmConfiguration; import org.wso2.carbon.user.api.Tenant; import org.wso2.carbon.user.api.UserStoreException; import org.wso2.carbon.user.api.UserStoreManager; import org.wso2.carbon.user.core.UserCoreConstants; import org.wso2.carbon.user.core.UserRealm; import org.wso2.carbon.user.core.config.RealmConfigXMLProcessor; import org.wso2.carbon.user.core.service.RealmService; import org.wso2.carbon.user.mgt.UserMgtConstants; import org.wso2.carbon.utils.CarbonUtils; import org.wso2.carbon.utils.ConfigurationContextService; import org.wso2.carbon.utils.FileUtil; import org.wso2.carbon.utils.NetworkUtils; import org.wso2.carbon.utils.multitenancy.MultitenantUtils; import org.xml.sax.SAXException; import java.io.File; import java.io.FileInputStream; import java.io.FilenameFilter; import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.math.BigDecimal; import java.math.RoundingMode; import java.net.Inet4Address; import java.net.InetAddress; import java.net.MalformedURLException; import java.net.NetworkInterface; import java.net.SocketException; import java.net.URL; import java.nio.charset.Charset; import java.rmi.RemoteException; import java.security.KeyManagementException; import java.security.KeyStore; import java.security.KeyStoreException; import java.security.NoSuchAlgorithmException; import java.security.UnrecoverableKeyException; import java.security.cert.CertificateException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.SortedSet; import java.util.TreeMap; import java.util.TreeSet; import java.util.concurrent.TimeUnit; import javax.cache.Cache; import javax.cache.CacheConfiguration; import javax.cache.CacheManager; import javax.cache.Caching; import javax.xml.XMLConstants; import javax.xml.namespace.QName; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; /** * This class contains the utility methods used by the implementations of APIManager, APIProvider * and APIConsumer interfaces. */ public final class APIUtil { private static final Log log = LogFactory.getLog(APIUtil.class); private static final Log audit = CarbonConstants.AUDIT_LOG; private static boolean isContextCacheInitialized = false; public static final String DISABLE_ROLE_VALIDATION_AT_SCOPE_CREATION = "disableRoleValidationAtScopeCreation"; private static final int ENTITY_EXPANSION_LIMIT = 0; private static final String DESCRIPTION = "Allows [1] request(s) per minute."; private static final int DEFAULT_TENANT_IDLE_MINS = 30; private static long tenantIdleTimeMillis; private static Set<String> currentLoadingTenants = new HashSet<String>(); private static volatile Set<String> whiteListedScopes; private static boolean isPublisherRoleCacheEnabled = true; public static final String STRICT = "Strict"; public static final String ALLOW_ALL = "AllowAll"; public static final String DEFAULT_AND_LOCALHOST = "DefaultAndLocalhost"; public static final String HOST_NAME_VERIFIER = "httpclient.hostnameVerifier"; //Need tenantIdleTime to check whether the tenant is in idle state in loadTenantConfig method static { tenantIdleTimeMillis = Long.parseLong(System.getProperty( org.wso2.carbon.utils.multitenancy.MultitenantConstants.TENANT_IDLE_TIME, String.valueOf(DEFAULT_TENANT_IDLE_MINS))) * 60 * 1000; } private static String hostAddress = null; /** * To initialize the publisherRoleCache configurations, based on configurations. */ public static void init() { APIManagerConfiguration apiManagerConfiguration = ServiceReferenceHolder.getInstance() .getAPIManagerConfigurationService().getAPIManagerConfiguration(); String isPublisherRoleCacheEnabledConfiguration = apiManagerConfiguration .getFirstProperty(APIConstants.PUBLISHER_ROLE_CACHE_ENABLED); isPublisherRoleCacheEnabled = isPublisherRoleCacheEnabledConfiguration == null || Boolean .parseBoolean(isPublisherRoleCacheEnabledConfiguration); } /** * This method used to get API from governance artifact * * @param artifact API artifact * @param registry Registry * @return API * @throws APIManagementException if failed to get API from artifact */ public static API getAPI(GovernanceArtifact artifact, Registry registry) throws APIManagementException { API api; try { String providerName = artifact.getAttribute(APIConstants.API_OVERVIEW_PROVIDER); String apiName = artifact.getAttribute(APIConstants.API_OVERVIEW_NAME); String apiVersion = artifact.getAttribute(APIConstants.API_OVERVIEW_VERSION); APIIdentifier apiIdentifier = new APIIdentifier(providerName, apiName, apiVersion); int apiId = ApiMgtDAO.getInstance().getAPIID(apiIdentifier, null); if (apiId == -1) { return null; } api = new API(apiIdentifier); // set rating String artifactPath = GovernanceUtils.getArtifactPath(registry, artifact.getId()); api.setRating(getAverageRating(apiId)); //set description api.setDescription(artifact.getAttribute(APIConstants.API_OVERVIEW_DESCRIPTION)); //set last access time api.setLastUpdated(registry.get(artifactPath).getLastModified()); //set uuid api.setUUID(artifact.getId()); // set url api.setStatus(getApiStatus(artifact.getAttribute(APIConstants.API_OVERVIEW_STATUS))); api.setThumbnailUrl(artifact.getAttribute(APIConstants.API_OVERVIEW_THUMBNAIL_URL)); api.setWsdlUrl(artifact.getAttribute(APIConstants.API_OVERVIEW_WSDL)); api.setWadlUrl(artifact.getAttribute(APIConstants.API_OVERVIEW_WADL)); api.setTechnicalOwner(artifact.getAttribute(APIConstants.API_OVERVIEW_TEC_OWNER)); api.setTechnicalOwnerEmail(artifact.getAttribute(APIConstants.API_OVERVIEW_TEC_OWNER_EMAIL)); api.setBusinessOwner(artifact.getAttribute(APIConstants.API_OVERVIEW_BUSS_OWNER)); api.setBusinessOwnerEmail(artifact.getAttribute(APIConstants.API_OVERVIEW_BUSS_OWNER_EMAIL)); api.setVisibility(artifact.getAttribute(APIConstants.API_OVERVIEW_VISIBILITY)); api.setVisibleRoles(artifact.getAttribute(APIConstants.API_OVERVIEW_VISIBLE_ROLES)); api.setVisibleTenants(artifact.getAttribute(APIConstants.API_OVERVIEW_VISIBLE_TENANTS)); api.setEndpointSecured(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_SECURED))); api.setEndpointAuthDigest(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_AUTH_DIGEST))); api.setEndpointUTUsername(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_USERNAME)); if (!((APIConstants.DEFAULT_MODIFIED_ENDPOINT_PASSWORD) .equals(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_PASSWORD)))) { api.setEndpointUTPassword(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_PASSWORD)); } else { //If APIEndpointPasswordRegistryHandler is enabled take password from the registry hidden property api.setEndpointUTPassword(getActualEpPswdFromHiddenProperty(api, registry)); } api.setTransports(artifact.getAttribute(APIConstants.API_OVERVIEW_TRANSPORTS)); api.setInSequence(artifact.getAttribute(APIConstants.API_OVERVIEW_INSEQUENCE)); api.setOutSequence(artifact.getAttribute(APIConstants.API_OVERVIEW_OUTSEQUENCE)); api.setFaultSequence(artifact.getAttribute(APIConstants.API_OVERVIEW_FAULTSEQUENCE)); api.setResponseCache(artifact.getAttribute(APIConstants.API_OVERVIEW_RESPONSE_CACHING)); api.setImplementation(artifact.getAttribute(APIConstants.PROTOTYPE_OVERVIEW_IMPLEMENTATION)); api.setProductionMaxTps(artifact.getAttribute(APIConstants.API_PRODUCTION_THROTTLE_MAXTPS)); int cacheTimeout = APIConstants.API_RESPONSE_CACHE_TIMEOUT; try { cacheTimeout = Integer.parseInt(artifact.getAttribute(APIConstants.API_OVERVIEW_CACHE_TIMEOUT)); } catch (NumberFormatException e) { //ignore } api.setCacheTimeout(cacheTimeout); api.setEndpointConfig(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_CONFIG)); api.setRedirectURL(artifact.getAttribute(APIConstants.API_OVERVIEW_REDIRECT_URL)); api.setApiOwner(artifact.getAttribute(APIConstants.API_OVERVIEW_OWNER)); api.setAdvertiseOnly(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_ADVERTISE_ONLY))); api.setSubscriptionAvailability(artifact.getAttribute(APIConstants.API_OVERVIEW_SUBSCRIPTION_AVAILABILITY)); api.setSubscriptionAvailableTenants(artifact.getAttribute(APIConstants.API_OVERVIEW_SUBSCRIPTION_AVAILABLE_TENANTS)); String tenantDomainName = MultitenantUtils.getTenantDomain(replaceEmailDomainBack(providerName)); int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(tenantDomainName); boolean isGlobalThrottlingEnabled = APIUtil.isAdvanceThrottlingEnabled(); if (isGlobalThrottlingEnabled) { String apiLevelTier = ApiMgtDAO.getInstance().getAPILevelTier(apiId); api.setApiLevelPolicy(apiLevelTier); } String tiers = artifact.getAttribute(APIConstants.API_OVERVIEW_TIER); Map<String, Tier> definedTiers = getTiers(tenantId); Set<Tier> availableTier = getAvailableTiers(definedTiers, tiers, apiName); api.addAvailableTiers(availableTier); api.setMonetizationCategory(getAPIMonetizationCategory(availableTier, tenantDomainName)); api.setContext(artifact.getAttribute(APIConstants.API_OVERVIEW_CONTEXT)); // We set the context template here api.setContextTemplate(artifact.getAttribute(APIConstants.API_OVERVIEW_CONTEXT_TEMPLATE)); api.setLatest(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_IS_LATEST))); Set<URITemplate> uriTemplates = new LinkedHashSet<URITemplate>(); List<String> uriTemplateNames = new ArrayList<String>(); Set<Scope> scopes = ApiMgtDAO.getInstance().getAPIScopes(api.getId()); api.setScopes(scopes); HashMap<String, String> urlPatternsSet; urlPatternsSet = ApiMgtDAO.getInstance().getURITemplatesPerAPIAsString(api.getId()); HashMap<String, String> resourceScopesMap; resourceScopesMap = ApiMgtDAO.getInstance().getResourceToScopeMapping(api.getId()); Set<String> urlPatternsKeySet = urlPatternsSet.keySet(); String resourceScopeKey; for (String urlPattern : urlPatternsKeySet) { URITemplate uriTemplate = new URITemplate(); String[] urlPatternComponents = urlPattern.split("::"); String uTemplate = (urlPatternComponents.length >= 1) ? urlPatternComponents[0] : null; String method = (urlPatternComponents.length >= 2) ? urlPatternComponents[1] : null; String authType = (urlPatternComponents.length >= 3) ? urlPatternComponents[2] : null; String throttlingTier = (urlPatternComponents.length >= 4) ? urlPatternComponents[3] : null; String mediationScript = (urlPatternComponents.length >= 5) ? urlPatternComponents[4] : null; uriTemplate.setHTTPVerb(method); uriTemplate.setAuthType(authType); uriTemplate.setThrottlingTier(throttlingTier); uriTemplate.setHttpVerbs(method); uriTemplate.setAuthTypes(authType); uriTemplate.setUriTemplate(uTemplate); uriTemplate.setResourceURI(api.getUrl()); uriTemplate.setResourceSandboxURI(api.getSandboxUrl()); uriTemplate.setThrottlingTiers(throttlingTier); uriTemplate.setMediationScript(mediationScript); resourceScopeKey = APIUtil.getResourceKey(api.getContext(), apiVersion, uTemplate, method); uriTemplate.setScopes(findScopeByKey(scopes, resourceScopesMap.get(resourceScopeKey))); //Checking for duplicate uri template names if (uriTemplateNames.contains(uTemplate)) { for (URITemplate tmp : uriTemplates) { if (uTemplate.equals(tmp.getUriTemplate())) { tmp.setHttpVerbs(method); tmp.setAuthTypes(authType); tmp.setThrottlingTiers(throttlingTier); resourceScopeKey = APIUtil.getResourceKey(api.getContext(), apiVersion, uTemplate, method); tmp.setScopes(findScopeByKey(scopes, resourceScopesMap.get(resourceScopeKey))); break; } } } else { uriTemplates.add(uriTemplate); } uriTemplateNames.add(uTemplate); } api.setUriTemplates(uriTemplates); api.setAsDefaultVersion(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_IS_DEFAULT_VERSION))); Set<String> tags = new HashSet<String>(); Tag[] tag = registry.getTags(artifactPath); for (Tag tag1 : tag) { tags.add(tag1.getTagName()); } api.addTags(tags); api.setLastUpdated(registry.get(artifactPath).getLastModified()); api.setImplementation(artifact.getAttribute(APIConstants.PROTOTYPE_OVERVIEW_IMPLEMENTATION)); String environments = artifact.getAttribute(APIConstants.API_OVERVIEW_ENVIRONMENTS); api.setEnvironments(extractEnvironmentsForAPI(environments)); api.setCorsConfiguration(getCorsConfigurationFromArtifact(artifact)); } catch (GovernanceException e) { String msg = "Failed to get API for artifact "; throw new APIManagementException(msg, e); } catch (RegistryException e) { String msg = "Failed to get LastAccess time or Rating"; throw new APIManagementException(msg, e); } catch (UserStoreException e) { String msg = "Failed to get User Realm of API Provider"; throw new APIManagementException(msg, e); } return api; } /** * This Method is different from getAPI method, as this one returns * URLTemplates without aggregating duplicates. This is to be used for building synapse config. * * @param artifact * @param registry * @return API * @throws org.wso2.carbon.apimgt.api.APIManagementException */ public static API getAPIForPublishing(GovernanceArtifact artifact, Registry registry) throws APIManagementException { API api; try { String providerName = artifact.getAttribute(APIConstants.API_OVERVIEW_PROVIDER); String apiName = artifact.getAttribute(APIConstants.API_OVERVIEW_NAME); String apiVersion = artifact.getAttribute(APIConstants.API_OVERVIEW_VERSION); APIIdentifier apiIdentifier = new APIIdentifier(providerName, apiName, apiVersion); int apiId = ApiMgtDAO.getInstance().getAPIID(apiIdentifier, null); if (apiId == -1) { return null; } api = new API(apiIdentifier); //set uuid api.setUUID(artifact.getId()); // set rating String artifactPath = GovernanceUtils.getArtifactPath(registry, artifact.getId()); Resource apiResource = registry.get(artifactPath); api.setAccessControl(apiResource.getProperty(APIConstants.ACCESS_CONTROL)); api.setAccessControlRoles( APIConstants.NULL_USER_ROLE_LIST.equals(apiResource.getProperty(APIConstants.PUBLISHER_ROLES)) ? null : apiResource.getProperty(APIConstants.PUBLISHER_ROLES)); api.setRating(getAverageRating(apiId)); //set description api.setDescription(artifact.getAttribute(APIConstants.API_OVERVIEW_DESCRIPTION)); //set last access time api.setLastUpdated(registry.get(artifactPath).getLastModified()); // set url api.setStatus(getApiStatus(artifact.getAttribute(APIConstants.API_OVERVIEW_STATUS))); api.setThumbnailUrl(artifact.getAttribute(APIConstants.API_OVERVIEW_THUMBNAIL_URL)); api.setWsdlUrl(artifact.getAttribute(APIConstants.API_OVERVIEW_WSDL)); api.setWadlUrl(artifact.getAttribute(APIConstants.API_OVERVIEW_WADL)); api.setTechnicalOwner(artifact.getAttribute(APIConstants.API_OVERVIEW_TEC_OWNER)); api.setTechnicalOwnerEmail(artifact.getAttribute(APIConstants.API_OVERVIEW_TEC_OWNER_EMAIL)); api.setBusinessOwner(artifact.getAttribute(APIConstants.API_OVERVIEW_BUSS_OWNER)); api.setBusinessOwnerEmail(artifact.getAttribute(APIConstants.API_OVERVIEW_BUSS_OWNER_EMAIL)); api.setVisibility(artifact.getAttribute(APIConstants.API_OVERVIEW_VISIBILITY)); api.setVisibleRoles(artifact.getAttribute(APIConstants.API_OVERVIEW_VISIBLE_ROLES)); api.setVisibleTenants(artifact.getAttribute(APIConstants.API_OVERVIEW_VISIBLE_TENANTS)); api.setEndpointSecured(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_SECURED))); api.setEndpointAuthDigest(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_AUTH_DIGEST))); api.setEndpointUTUsername(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_USERNAME)); if (!((APIConstants.DEFAULT_MODIFIED_ENDPOINT_PASSWORD) .equals(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_PASSWORD)))) { api.setEndpointUTPassword(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_PASSWORD)); } else { //If APIEndpointPasswordRegistryHandler is enabled take password from the registry hidden property api.setEndpointUTPassword(getActualEpPswdFromHiddenProperty(api, registry)); } api.setTransports(artifact.getAttribute(APIConstants.API_OVERVIEW_TRANSPORTS)); api.setInSequence(artifact.getAttribute(APIConstants.API_OVERVIEW_INSEQUENCE)); api.setOutSequence(artifact.getAttribute(APIConstants.API_OVERVIEW_OUTSEQUENCE)); api.setFaultSequence(artifact.getAttribute(APIConstants.API_OVERVIEW_FAULTSEQUENCE)); api.setResponseCache(artifact.getAttribute(APIConstants.API_OVERVIEW_RESPONSE_CACHING)); api.setImplementation(artifact.getAttribute(APIConstants.PROTOTYPE_OVERVIEW_IMPLEMENTATION)); api.setType(artifact.getAttribute(APIConstants.API_OVERVIEW_TYPE)); api.setProductionMaxTps(artifact.getAttribute(APIConstants.API_PRODUCTION_THROTTLE_MAXTPS)); api.setSandboxMaxTps(artifact.getAttribute(APIConstants.API_SANDBOX_THROTTLE_MAXTPS)); int cacheTimeout = APIConstants.API_RESPONSE_CACHE_TIMEOUT; try { String strCacheTimeout = artifact.getAttribute(APIConstants.API_OVERVIEW_CACHE_TIMEOUT); if (strCacheTimeout != null && !strCacheTimeout.isEmpty()) { cacheTimeout = Integer.parseInt(strCacheTimeout); } } catch (NumberFormatException e) { if (log.isWarnEnabled()) { log.warn("Error while retrieving cache timeout from the registry for " + apiIdentifier); } // ignore the exception and use default cache timeout value } api.setCacheTimeout(cacheTimeout); api.setEndpointConfig(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_CONFIG)); api.setRedirectURL(artifact.getAttribute(APIConstants.API_OVERVIEW_REDIRECT_URL)); api.setApiOwner(artifact.getAttribute(APIConstants.API_OVERVIEW_OWNER)); api.setAdvertiseOnly(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_ADVERTISE_ONLY))); api.setType(artifact.getAttribute(APIConstants.API_OVERVIEW_TYPE)); api.setSubscriptionAvailability(artifact.getAttribute(APIConstants.API_OVERVIEW_SUBSCRIPTION_AVAILABILITY)); api.setSubscriptionAvailableTenants(artifact.getAttribute(APIConstants.API_OVERVIEW_SUBSCRIPTION_AVAILABLE_TENANTS)); String tenantDomainName = MultitenantUtils.getTenantDomain(replaceEmailDomainBack(providerName)); int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(tenantDomainName); APIManagerConfiguration config = ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService() .getAPIManagerConfiguration(); boolean isGlobalThrottlingEnabled = APIUtil.isAdvanceThrottlingEnabled(); if (isGlobalThrottlingEnabled) { String apiLevelTier = ApiMgtDAO.getInstance().getAPILevelTier(apiId); api.setApiLevelPolicy(apiLevelTier); } String tiers = artifact.getAttribute(APIConstants.API_OVERVIEW_TIER); Map<String, Tier> definedTiers = getTiers(tenantId); Set<Tier> availableTier = getAvailableTiers(definedTiers, tiers, apiName); api.addAvailableTiers(availableTier); // This contains the resolved context api.setContext(artifact.getAttribute(APIConstants.API_OVERVIEW_CONTEXT)); // We set the context template here api.setContextTemplate(artifact.getAttribute(APIConstants.API_OVERVIEW_CONTEXT_TEMPLATE)); api.setLatest(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_IS_LATEST))); Set<URITemplate> uriTemplates = new LinkedHashSet<URITemplate>(); List<String> uriTemplateNames = new ArrayList<String>(); Set<Scope> scopes = ApiMgtDAO.getInstance().getAPIScopes(api.getId()); api.setScopes(scopes); HashMap<String, String> urlPatternsSet; urlPatternsSet = ApiMgtDAO.getInstance().getURITemplatesPerAPIAsString(api.getId()); HashMap<String, String> resourceScopes; resourceScopes = ApiMgtDAO.getInstance().getResourceToScopeMapping(api.getId()); Set<String> urlPatternsKeySet = urlPatternsSet.keySet(); String resourceScopeKey; for (String urlPattern : urlPatternsKeySet) { URITemplate uriTemplate = new URITemplate(); String[] urlPatternComponents = urlPattern.split("::"); String uTemplate = (urlPatternComponents.length >= 1) ? urlPatternComponents[0] : null; String method = (urlPatternComponents.length >= 2) ? urlPatternComponents[1] : null; String authType = (urlPatternComponents.length >= 3) ? urlPatternComponents[2] : null; String throttlingTier = (urlPatternComponents.length >= 4) ? urlPatternComponents[3] : null; String mediationScript = (urlPatternComponents.length >= 5) ? urlPatternComponents[4] : null; uriTemplate.setHTTPVerb(method); uriTemplate.setAuthType(authType); uriTemplate.setThrottlingTier(throttlingTier); uriTemplate.setHttpVerbs(method); uriTemplate.setAuthTypes(authType); uriTemplate.setUriTemplate(uTemplate); uriTemplate.setResourceURI(api.getUrl()); uriTemplate.setResourceSandboxURI(api.getSandboxUrl()); uriTemplate.setThrottlingTiers(throttlingTier); uriTemplate.setMediationScript(mediationScript); uriTemplate.setMediationScripts(method, mediationScript); resourceScopeKey = APIUtil.getResourceKey(api.getContext(), apiVersion, uTemplate, method); uriTemplate.setScopes(findScopeByKey(scopes, resourceScopes.get(resourceScopeKey))); //Checking for duplicate uri template names if (uriTemplateNames.contains(uTemplate)) { for (URITemplate tmp : uriTemplates) { if (uTemplate.equals(tmp.getUriTemplate())) { tmp.setHttpVerbs(method); tmp.setAuthTypes(authType); tmp.setThrottlingTiers(throttlingTier); tmp.setMediationScripts(method, mediationScript); resourceScopeKey = APIUtil.getResourceKey(api.getContext(), apiVersion, uTemplate, method); tmp.setScopes(findScopeByKey(scopes, resourceScopes.get(resourceScopeKey))); break; } } } else { uriTemplates.add(uriTemplate); } uriTemplateNames.add(uTemplate); } if (APIConstants.IMPLEMENTATION_TYPE_INLINE.equalsIgnoreCase(api.getImplementation())) { for (URITemplate template : uriTemplates) { template.setMediationScript(template.getAggregatedMediationScript()); } } api.setUriTemplates(uriTemplates); api.setAsDefaultVersion(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_IS_DEFAULT_VERSION))); Set<String> tags = new HashSet<String>(); Tag[] tag = registry.getTags(artifactPath); for (Tag tag1 : tag) { tags.add(tag1.getTagName()); } api.addTags(tags); api.setLastUpdated(registry.get(artifactPath).getLastModified()); api.setCreatedTime(String.valueOf(registry.get(artifactPath).getCreatedTime().getTime())); api.setImplementation(artifact.getAttribute(APIConstants.PROTOTYPE_OVERVIEW_IMPLEMENTATION)); String environments = artifact.getAttribute(APIConstants.API_OVERVIEW_ENVIRONMENTS); api.setEnvironments(extractEnvironmentsForAPI(environments)); api.setCorsConfiguration(getCorsConfigurationFromArtifact(artifact)); } catch (GovernanceException e) { String msg = "Failed to get API for artifact "; throw new APIManagementException(msg, e); } catch (RegistryException e) { String msg = "Failed to get LastAccess time or Rating"; throw new APIManagementException(msg, e); } catch (UserStoreException e) { String msg = "Failed to get User Realm of API Provider"; throw new APIManagementException(msg, e); } return api; } public static API getAPI(GovernanceArtifact artifact) throws APIManagementException { API api; try { String providerName = artifact.getAttribute(APIConstants.API_OVERVIEW_PROVIDER); String apiName = artifact.getAttribute(APIConstants.API_OVERVIEW_NAME); String apiVersion = artifact.getAttribute(APIConstants.API_OVERVIEW_VERSION); APIIdentifier apiIdentifier = new APIIdentifier(providerName, apiName, apiVersion); api = new API(apiIdentifier); int apiId = ApiMgtDAO.getInstance().getAPIID(apiIdentifier, null); if (apiId == -1) { return null; } //set uuid api.setUUID(artifact.getId()); api.setRating(getAverageRating(apiId)); api.setThumbnailUrl(artifact.getAttribute(APIConstants.API_OVERVIEW_THUMBNAIL_URL)); api.setStatus(getApiStatus(artifact.getAttribute(APIConstants.API_OVERVIEW_STATUS))); api.setContext(artifact.getAttribute(APIConstants.API_OVERVIEW_CONTEXT)); api.setVisibility(artifact.getAttribute(APIConstants.API_OVERVIEW_VISIBILITY)); api.setVisibleRoles(artifact.getAttribute(APIConstants.API_OVERVIEW_VISIBLE_ROLES)); api.setVisibleTenants(artifact.getAttribute(APIConstants.API_OVERVIEW_VISIBLE_TENANTS)); api.setTransports(artifact.getAttribute(APIConstants.API_OVERVIEW_TRANSPORTS)); api.setInSequence(artifact.getAttribute(APIConstants.API_OVERVIEW_INSEQUENCE)); api.setOutSequence(artifact.getAttribute(APIConstants.API_OVERVIEW_OUTSEQUENCE)); api.setFaultSequence(artifact.getAttribute(APIConstants.API_OVERVIEW_FAULTSEQUENCE)); api.setDescription(artifact.getAttribute(APIConstants.API_OVERVIEW_DESCRIPTION)); api.setResponseCache(artifact.getAttribute(APIConstants.API_OVERVIEW_RESPONSE_CACHING)); api.setType(artifact.getAttribute(APIConstants.API_OVERVIEW_TYPE)); int cacheTimeout = APIConstants.API_RESPONSE_CACHE_TIMEOUT; try { cacheTimeout = Integer.parseInt(artifact.getAttribute(APIConstants.API_OVERVIEW_CACHE_TIMEOUT)); } catch (NumberFormatException e) { //ignore } api.setCacheTimeout(cacheTimeout); boolean isGlobalThrottlingEnabled = APIUtil.isAdvanceThrottlingEnabled(); if (isGlobalThrottlingEnabled) { String apiLevelTier = ApiMgtDAO.getInstance().getAPILevelTier(apiId); api.setApiLevelPolicy(apiLevelTier); Set<Tier> availablePolicy = new HashSet<Tier>(); String[] subscriptionPolicy = ApiMgtDAO.getInstance().getPolicyNames(PolicyConstants.POLICY_LEVEL_SUB, replaceEmailDomainBack(providerName)); List<String> definedPolicyNames = Arrays.asList(subscriptionPolicy); String policies = artifact.getAttribute(APIConstants.API_OVERVIEW_TIER); if (policies != null && !"".equals(policies)) { String[] policyNames = policies.split("\\|\\|"); for (String policyName : policyNames) { if (definedPolicyNames.contains(policyName) || APIConstants.UNLIMITED_TIER.equals(policyName)) { Tier p = new Tier(policyName); availablePolicy.add(p); } else { log.warn("Unknown policy: " + policyName + " found on API: " + apiName); } } } api.addAvailableTiers(availablePolicy); String tenantDomainName = MultitenantUtils.getTenantDomain(replaceEmailDomainBack(providerName)); api.setMonetizationCategory(getAPIMonetizationCategory(availablePolicy, tenantDomainName)); } else { //deprecated throttling method Set<Tier> availableTier = new HashSet<Tier>(); String tiers = artifact.getAttribute(APIConstants.API_OVERVIEW_TIER); String tenantDomainName = MultitenantUtils.getTenantDomain(replaceEmailDomainBack(providerName)); if (tiers != null) { String[] tierNames = tiers.split("\\|\\|"); for (String tierName : tierNames) { Tier tier = new Tier(tierName); availableTier.add(tier); } api.addAvailableTiers(availableTier); api.setMonetizationCategory(getAPIMonetizationCategory(availableTier, tenantDomainName)); } else { api.setMonetizationCategory(getAPIMonetizationCategory(availableTier, tenantDomainName)); } } api.setRedirectURL(artifact.getAttribute(APIConstants.API_OVERVIEW_REDIRECT_URL)); api.setApiOwner(artifact.getAttribute(APIConstants.API_OVERVIEW_OWNER)); api.setAdvertiseOnly(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_ADVERTISE_ONLY))); api.setEndpointConfig(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_CONFIG)); api.setSubscriptionAvailability(artifact.getAttribute(APIConstants.API_OVERVIEW_SUBSCRIPTION_AVAILABILITY)); api.setSubscriptionAvailableTenants(artifact.getAttribute(APIConstants.API_OVERVIEW_SUBSCRIPTION_AVAILABLE_TENANTS)); api.setAsDefaultVersion(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_IS_DEFAULT_VERSION))); api.setImplementation(artifact.getAttribute(APIConstants.PROTOTYPE_OVERVIEW_IMPLEMENTATION)); api.setTechnicalOwner(artifact.getAttribute(APIConstants.API_OVERVIEW_TEC_OWNER)); api.setTechnicalOwnerEmail(artifact.getAttribute(APIConstants.API_OVERVIEW_TEC_OWNER_EMAIL)); api.setBusinessOwner(artifact.getAttribute(APIConstants.API_OVERVIEW_BUSS_OWNER)); api.setBusinessOwnerEmail(artifact.getAttribute(APIConstants.API_OVERVIEW_BUSS_OWNER_EMAIL)); ArrayList<URITemplate> urlPatternsList; urlPatternsList = ApiMgtDAO.getInstance().getAllURITemplates(api.getContext(), api.getId().getVersion()); Set<URITemplate> uriTemplates = new HashSet<URITemplate>(urlPatternsList); for (URITemplate uriTemplate : uriTemplates) { uriTemplate.setResourceURI(api.getUrl()); uriTemplate.setResourceSandboxURI(api.getSandboxUrl()); } api.setUriTemplates(uriTemplates); String environments = artifact.getAttribute(APIConstants.API_OVERVIEW_ENVIRONMENTS); api.setEnvironments(extractEnvironmentsForAPI(environments)); api.setCorsConfiguration(getCorsConfigurationFromArtifact(artifact)); } catch (GovernanceException e) { String msg = "Failed to get API from artifact "; throw new APIManagementException(msg, e); } return api; } /** * This method used to get Provider from provider artifact * * @param artifact provider artifact * @return Provider * @throws APIManagementException if failed to get Provider from provider artifact. */ public static Provider getProvider(GenericArtifact artifact) throws APIManagementException { Provider provider; try { provider = new Provider(artifact.getAttribute(APIConstants.PROVIDER_OVERVIEW_NAME)); provider.setDescription(artifact.getAttribute(APIConstants.PROVIDER_OVERVIEW_DESCRIPTION)); provider.setEmail(artifact.getAttribute(APIConstants.PROVIDER_OVERVIEW_EMAIL)); } catch (GovernanceException e) { String msg = "Failed to get provider "; log.error(msg, e); throw new APIManagementException(msg, e); } return provider; } /** * Returns a list of scopes when passed the Provider Name and Scope Key * * @param scopeKey * @param provider * @return * @throws APIManagementException */ public static Set<Scope> getScopeByScopeKey(String scopeKey, String provider) throws APIManagementException { Set<Scope> scopeSet = null; String tenantDomainName = MultitenantUtils.getTenantDomain(replaceEmailDomainBack(provider)); try { int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(tenantDomainName); scopeSet = ApiMgtDAO.getInstance().getAPIScopesByScopeKey(scopeKey, tenantId); } catch (UserStoreException e) { String msg = "Error while retrieving Scopes"; log.error(msg, e); handleException(msg); } return scopeSet; } /** * Create Governance artifact from given attributes * * @param artifact initial governance artifact * @param api API object with the attributes value * @return GenericArtifact * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to create API */ public static GenericArtifact createAPIArtifactContent(GenericArtifact artifact, API api) throws APIManagementException { try { String apiStatus = api.getStatus().getStatus(); artifact.setAttribute(APIConstants.API_OVERVIEW_NAME, api.getId().getApiName()); artifact.setAttribute(APIConstants.API_OVERVIEW_VERSION, api.getId().getVersion()); artifact.setAttribute(APIConstants.API_OVERVIEW_IS_DEFAULT_VERSION, String.valueOf(api.isDefaultVersion())); artifact.setAttribute(APIConstants.API_OVERVIEW_CONTEXT, api.getContext()); artifact.setAttribute(APIConstants.API_OVERVIEW_PROVIDER, api.getId().getProviderName()); artifact.setAttribute(APIConstants.API_OVERVIEW_DESCRIPTION, api.getDescription()); artifact.setAttribute(APIConstants.API_OVERVIEW_WSDL, api.getWsdlUrl()); artifact.setAttribute(APIConstants.API_OVERVIEW_WADL, api.getWadlUrl()); artifact.setAttribute(APIConstants.API_OVERVIEW_THUMBNAIL_URL, api.getThumbnailUrl()); artifact.setAttribute(APIConstants.API_OVERVIEW_STATUS, apiStatus); artifact.setAttribute(APIConstants.API_OVERVIEW_TEC_OWNER, api.getTechnicalOwner()); artifact.setAttribute(APIConstants.API_OVERVIEW_TEC_OWNER_EMAIL, api.getTechnicalOwnerEmail()); artifact.setAttribute(APIConstants.API_OVERVIEW_BUSS_OWNER, api.getBusinessOwner()); artifact.setAttribute(APIConstants.API_OVERVIEW_BUSS_OWNER_EMAIL, api.getBusinessOwnerEmail()); artifact.setAttribute(APIConstants.API_OVERVIEW_VISIBILITY, api.getVisibility()); artifact.setAttribute(APIConstants.API_OVERVIEW_VISIBLE_ROLES, api.getVisibleRoles()); artifact.setAttribute(APIConstants.API_OVERVIEW_VISIBLE_TENANTS, api.getVisibleTenants()); artifact.setAttribute(APIConstants.API_OVERVIEW_ENDPOINT_SECURED, Boolean.toString(api.isEndpointSecured())); artifact.setAttribute(APIConstants.API_OVERVIEW_ENDPOINT_AUTH_DIGEST, Boolean.toString(api.isEndpointAuthDigest())); artifact.setAttribute(APIConstants.API_OVERVIEW_ENDPOINT_USERNAME, api.getEndpointUTUsername()); artifact.setAttribute(APIConstants.API_OVERVIEW_ENDPOINT_PASSWORD, api.getEndpointUTPassword()); artifact.setAttribute(APIConstants.API_OVERVIEW_TRANSPORTS, api.getTransports()); artifact.setAttribute(APIConstants.API_OVERVIEW_INSEQUENCE, api.getInSequence()); artifact.setAttribute(APIConstants.API_OVERVIEW_OUTSEQUENCE, api.getOutSequence()); artifact.setAttribute(APIConstants.API_OVERVIEW_FAULTSEQUENCE, api.getFaultSequence()); artifact.setAttribute(APIConstants.API_OVERVIEW_RESPONSE_CACHING, api.getResponseCache()); artifact.setAttribute(APIConstants.API_OVERVIEW_CACHE_TIMEOUT, Integer.toString(api.getCacheTimeout())); artifact.setAttribute(APIConstants.API_OVERVIEW_REDIRECT_URL, api.getRedirectURL()); artifact.setAttribute(APIConstants.API_OVERVIEW_OWNER, api.getApiOwner()); artifact.setAttribute(APIConstants.API_OVERVIEW_ADVERTISE_ONLY, Boolean.toString(api.isAdvertiseOnly())); artifact.setAttribute(APIConstants.API_OVERVIEW_ENDPOINT_CONFIG, api.getEndpointConfig()); artifact.setAttribute(APIConstants.API_OVERVIEW_SUBSCRIPTION_AVAILABILITY, api.getSubscriptionAvailability()); artifact.setAttribute(APIConstants.API_OVERVIEW_SUBSCRIPTION_AVAILABLE_TENANTS, api.getSubscriptionAvailableTenants()); artifact.setAttribute(APIConstants.PROTOTYPE_OVERVIEW_IMPLEMENTATION, api.getImplementation()); artifact.setAttribute(APIConstants.API_PRODUCTION_THROTTLE_MAXTPS, api.getProductionMaxTps()); artifact.setAttribute(APIConstants.API_SANDBOX_THROTTLE_MAXTPS, api.getSandboxMaxTps()); //Validate if the API has an unsupported context before setting it in the artifact String tenantDomain = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantDomain(); if (APIConstants.SUPER_TENANT_DOMAIN.equals(tenantDomain)) { String invalidContext = File.separator + APIConstants.VERSION_PLACEHOLDER; if (invalidContext.equals(api.getContextTemplate())) { throw new APIManagementException( "API : " + api.getId() + " has an unsupported context : " + api.getContextTemplate()); } } else { String invalidContext = APIConstants.TENANT_PREFIX + tenantDomain + File.separator + APIConstants.VERSION_PLACEHOLDER; if (invalidContext.equals(api.getContextTemplate())) { throw new APIManagementException( "API : " + api.getId() + " has an unsupported context : " + api.getContextTemplate()); } } // This is to support the pluggable version strategy. artifact.setAttribute(APIConstants.API_OVERVIEW_CONTEXT_TEMPLATE, api.getContextTemplate()); artifact.setAttribute(APIConstants.API_OVERVIEW_VERSION_TYPE, "context"); artifact.setAttribute(APIConstants.API_OVERVIEW_TYPE, api.getType()); StringBuilder policyBuilder = new StringBuilder(); for (Tier tier : api.getAvailableTiers()) { policyBuilder.append(tier.getName()); policyBuilder.append("||"); } String policies = policyBuilder.toString(); if (!"".equals(policies)) { policies = policies.substring(0, policies.length() - 2); artifact.setAttribute(APIConstants.API_OVERVIEW_TIER, policies); } StringBuilder tiersBuilder = new StringBuilder(); for (Tier tier : api.getAvailableTiers()) { tiersBuilder.append(tier.getName()); tiersBuilder.append("||"); } String tiers = tiersBuilder.toString(); if (!"".equals(tiers)) { tiers = tiers.substring(0, tiers.length() - 2); artifact.setAttribute(APIConstants.API_OVERVIEW_TIER, tiers); } if (APIConstants.PUBLISHED.equals(apiStatus)) { artifact.setAttribute(APIConstants.API_OVERVIEW_IS_LATEST, "true"); } String[] keys = artifact.getAttributeKeys(); for (String key : keys) { if (key.contains("URITemplate")) { artifact.removeAttribute(key); } } Set<URITemplate> uriTemplateSet = api.getUriTemplates(); int i = 0; for (URITemplate uriTemplate : uriTemplateSet) { artifact.addAttribute(APIConstants.API_URI_PATTERN + i, uriTemplate.getUriTemplate()); artifact.addAttribute(APIConstants.API_URI_HTTP_METHOD + i, uriTemplate.getHTTPVerb()); artifact.addAttribute(APIConstants.API_URI_AUTH_TYPE + i, uriTemplate.getAuthType()); i++; } artifact.setAttribute(APIConstants.API_OVERVIEW_ENVIRONMENTS, writeEnvironmentsToArtifact(api)); artifact.setAttribute(APIConstants.API_OVERVIEW_CORS_CONFIGURATION, APIUtil.getCorsConfigurationJsonFromDto(api.getCorsConfiguration())); } catch (GovernanceException e) { String msg = "Failed to create API for : " + api.getId().getApiName(); log.error(msg, e); throw new APIManagementException(msg, e); } return artifact; } /** * Create the Documentation from artifact * * @param artifact Documentation artifact * @return Documentation * @throws APIManagementException if failed to create Documentation from artifact */ public static Documentation getDocumentation(GenericArtifact artifact) throws APIManagementException { Documentation documentation; try { DocumentationType type; String docType = artifact.getAttribute(APIConstants.DOC_TYPE); if (docType.equalsIgnoreCase(DocumentationType.HOWTO.getType())) { type = DocumentationType.HOWTO; } else if (docType.equalsIgnoreCase(DocumentationType.PUBLIC_FORUM.getType())) { type = DocumentationType.PUBLIC_FORUM; } else if (docType.equalsIgnoreCase(DocumentationType.SUPPORT_FORUM.getType())) { type = DocumentationType.SUPPORT_FORUM; } else if (docType.equalsIgnoreCase(DocumentationType.API_MESSAGE_FORMAT.getType())) { type = DocumentationType.API_MESSAGE_FORMAT; } else if (docType.equalsIgnoreCase(DocumentationType.SAMPLES.getType())) { type = DocumentationType.SAMPLES; } else { type = DocumentationType.OTHER; } documentation = new Documentation(type, artifact.getAttribute(APIConstants.DOC_NAME)); documentation.setId(artifact.getId()); documentation.setSummary(artifact.getAttribute(APIConstants.DOC_SUMMARY)); String visibilityAttr = artifact.getAttribute(APIConstants.DOC_VISIBILITY); Documentation.DocumentVisibility documentVisibility = Documentation.DocumentVisibility.API_LEVEL; if (visibilityAttr != null) { if (visibilityAttr.equals(Documentation.DocumentVisibility.API_LEVEL.name())) { documentVisibility = Documentation.DocumentVisibility.API_LEVEL; } else if (visibilityAttr.equals(Documentation.DocumentVisibility.PRIVATE.name())) { documentVisibility = Documentation.DocumentVisibility.PRIVATE; } else if (visibilityAttr.equals(Documentation.DocumentVisibility.OWNER_ONLY.name())) { documentVisibility = Documentation.DocumentVisibility.OWNER_ONLY; } } documentation.setVisibility(documentVisibility); Documentation.DocumentSourceType docSourceType = Documentation.DocumentSourceType.INLINE; String artifactAttribute = artifact.getAttribute(APIConstants.DOC_SOURCE_TYPE); if (Documentation.DocumentSourceType.URL.name().equals(artifactAttribute)) { docSourceType = Documentation.DocumentSourceType.URL; documentation.setSourceUrl(artifact.getAttribute(APIConstants.DOC_SOURCE_URL)); } else if (Documentation.DocumentSourceType.FILE.name().equals(artifactAttribute)) { docSourceType = Documentation.DocumentSourceType.FILE; documentation.setFilePath(prependWebContextRoot(artifact.getAttribute(APIConstants.DOC_FILE_PATH))); } documentation.setSourceType(docSourceType); if (documentation.getType() == DocumentationType.OTHER) { documentation.setOtherTypeName(artifact.getAttribute(APIConstants.DOC_OTHER_TYPE_NAME)); } } catch (GovernanceException e) { throw new APIManagementException("Failed to get documentation from artifact", e); } return documentation; } /** * Create the Documentation from artifact * * @param artifact Documentation artifact * @return Documentation * @throws APIManagementException if failed to create Documentation from artifact */ public static Documentation getDocumentation(GenericArtifact artifact, String docCreatorName) throws APIManagementException { Documentation documentation; try { DocumentationType type; String docType = artifact.getAttribute(APIConstants.DOC_TYPE); if (docType.equalsIgnoreCase(DocumentationType.HOWTO.getType())) { type = DocumentationType.HOWTO; } else if (docType.equalsIgnoreCase(DocumentationType.PUBLIC_FORUM.getType())) { type = DocumentationType.PUBLIC_FORUM; } else if (docType.equalsIgnoreCase(DocumentationType.SUPPORT_FORUM.getType())) { type = DocumentationType.SUPPORT_FORUM; } else if (docType.equalsIgnoreCase(DocumentationType.API_MESSAGE_FORMAT.getType())) { type = DocumentationType.API_MESSAGE_FORMAT; } else if (docType.equalsIgnoreCase(DocumentationType.SAMPLES.getType())) { type = DocumentationType.SAMPLES; } else { type = DocumentationType.OTHER; } documentation = new Documentation(type, artifact.getAttribute(APIConstants.DOC_NAME)); documentation.setId(artifact.getId()); documentation.setSummary(artifact.getAttribute(APIConstants.DOC_SUMMARY)); Documentation.DocumentSourceType docSourceType = Documentation.DocumentSourceType.INLINE; String artifactAttribute = artifact.getAttribute(APIConstants.DOC_SOURCE_TYPE); if (artifactAttribute.equals(Documentation.DocumentSourceType.URL.name())) { docSourceType = Documentation.DocumentSourceType.URL; } else if (artifactAttribute.equals(Documentation.DocumentSourceType.FILE.name())) { docSourceType = Documentation.DocumentSourceType.FILE; } documentation.setSourceType(docSourceType); if ("URL".equals(artifact.getAttribute(APIConstants.DOC_SOURCE_TYPE))) { documentation.setSourceUrl(artifact.getAttribute(APIConstants.DOC_SOURCE_URL)); } if (docSourceType == Documentation.DocumentSourceType.FILE) { String filePath = prependTenantPrefix(artifact.getAttribute(APIConstants.DOC_FILE_PATH), docCreatorName); documentation.setFilePath(prependWebContextRoot(filePath)); } if (documentation.getType() == DocumentationType.OTHER) { documentation.setOtherTypeName(artifact.getAttribute(APIConstants.DOC_OTHER_TYPE_NAME)); } } catch (GovernanceException e) { throw new APIManagementException("Failed to get documentation from artifact", e); } return documentation; } public static APIStatus getApiStatus(String status) throws APIManagementException { APIStatus apiStatus = null; for (APIStatus aStatus : APIStatus.values()) { if (aStatus.getStatus().equalsIgnoreCase(status)) { apiStatus = aStatus; } } return apiStatus; } /** * Prepends the Tenant Prefix to a registry path. ex: /t/test1.com * * @param postfixUrl path to be prepended. * @return Path prepended with he Tenant domain prefix. */ public static String prependTenantPrefix(String postfixUrl, String username) { String tenantDomain = MultitenantUtils.getTenantDomain(replaceEmailDomainBack(username)); if (!(MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain))) { String tenantPrefix = "/t/"; postfixUrl = tenantPrefix + tenantDomain + postfixUrl; } return postfixUrl; } /** * Prepends the webcontextroot to a registry path. * * @param postfixUrl path to be prepended. * @return Path prepended with he WebContext root. */ public static String prependWebContextRoot(String postfixUrl) { String webContext = CarbonUtils.getServerConfiguration().getFirstProperty("WebContextRoot"); if (webContext != null && !"/".equals(webContext)) { postfixUrl = webContext + postfixUrl; } return postfixUrl; } /** * Utility method for creating storage path for an icon. * * @param identifier APIIdentifier * @return Icon storage path. */ public static String getIconPath(APIIdentifier identifier) { String artifactPath = APIConstants.API_IMAGE_LOCATION + RegistryConstants.PATH_SEPARATOR + identifier.getProviderName() + RegistryConstants.PATH_SEPARATOR + identifier.getApiName() + RegistryConstants.PATH_SEPARATOR + identifier.getVersion(); return artifactPath + RegistryConstants.PATH_SEPARATOR + APIConstants.API_ICON_IMAGE; } /** * Utility method to generate the path for a file. * * @param identifier APIIdentifier * @return Generated path. * @fileName File name. */ public static String getDocumentationFilePath(APIIdentifier identifier, String fileName) { return APIUtil.getAPIDocPath(identifier) + APIConstants.DOCUMENT_FILE_DIR + RegistryConstants.PATH_SEPARATOR + fileName; } //remove getSwagger12DefinitionFilePath once getSwagger20DefinitionFilePath operates public static String getSwagger12DefinitionFilePath(String apiName, String apiVersion, String apiProvider) { return APIConstants.API_DOC_LOCATION + RegistryConstants.PATH_SEPARATOR + apiName + '-' + apiVersion + '-' + apiProvider + RegistryConstants.PATH_SEPARATOR + APIConstants.API_DOC_1_2_LOCATION; } public static String getSwagger20DefinitionFilePath(String apiName, String apiVersion, String apiProvider) { return APIConstants.API_ROOT_LOCATION + RegistryConstants.PATH_SEPARATOR + apiProvider + RegistryConstants.PATH_SEPARATOR + apiName + RegistryConstants.PATH_SEPARATOR + apiVersion + RegistryConstants.PATH_SEPARATOR; } public static String getWSDLDefinitionFilePath(String apiName, String apiVersion, String apiProvider) { return APIConstants.API_WSDL_RESOURCE_LOCATION + apiProvider + "--" + apiName + apiVersion + ".wsdl"; } /** * Utility method to get api path from APIIdentifier * * @param identifier APIIdentifier * @return API path */ public static String getAPIPath(APIIdentifier identifier) { return APIConstants.API_ROOT_LOCATION + RegistryConstants.PATH_SEPARATOR + identifier.getProviderName() + RegistryConstants.PATH_SEPARATOR + identifier.getApiName() + RegistryConstants.PATH_SEPARATOR + identifier.getVersion() + APIConstants.API_RESOURCE_NAME; } /** * Utility method to get api identifier from api path. * * @param apiPath Path of the API in registry * @return relevant API Identifier */ public static APIIdentifier getAPIIdentifier(String apiPath) { int length = (APIConstants.API_ROOT_LOCATION + RegistryConstants.PATH_SEPARATOR).length(); if (!apiPath.contains(APIConstants.API_ROOT_LOCATION + RegistryConstants.PATH_SEPARATOR)) { length = (APIConstants.API_IMAGE_LOCATION + RegistryConstants.PATH_SEPARATOR).length(); } if (length <= 0) { length = (APIConstants.API_DOC_LOCATION + RegistryConstants.PATH_SEPARATOR).length(); } String relativePath = apiPath.substring(length); String[] values = relativePath.split(RegistryConstants.PATH_SEPARATOR); if (values.length > 3) { return new APIIdentifier(values[0], values[1], values[2]); } return null; } /** * Utility method to get API provider path * * @param identifier APIIdentifier * @return API provider path */ public static String getAPIProviderPath(APIIdentifier identifier) { return APIConstants.API_LOCATION + RegistryConstants.PATH_SEPARATOR + identifier.getProviderName(); } /** * Utility method to get documentation path * * @param apiId APIIdentifier * @return Doc path */ public static String getAPIDocPath(APIIdentifier apiId) { return APIConstants.API_LOCATION + RegistryConstants.PATH_SEPARATOR + apiId.getProviderName() + RegistryConstants.PATH_SEPARATOR + apiId.getApiName() + RegistryConstants.PATH_SEPARATOR + apiId.getVersion() + RegistryConstants.PATH_SEPARATOR + APIConstants.DOC_DIR + RegistryConstants.PATH_SEPARATOR; } /** * Utility method to get documentation content file path * * @param apiId APIIdentifier * @param documentationName String * @return Doc content path */ public static String getAPIDocContentPath(APIIdentifier apiId, String documentationName) { return getAPIDocPath(apiId) + RegistryConstants.PATH_SEPARATOR + documentationName; } /** * This utility method used to create documentation artifact content * * @param artifact GovernanceArtifact * @param apiId APIIdentifier * @param documentation Documentation * @return GenericArtifact * @throws APIManagementException if failed to get GovernanceArtifact from Documentation */ public static GenericArtifact createDocArtifactContent(GenericArtifact artifact, APIIdentifier apiId, Documentation documentation) throws APIManagementException { try { artifact.setAttribute(APIConstants.DOC_NAME, documentation.getName()); artifact.setAttribute(APIConstants.DOC_SUMMARY, documentation.getSummary()); artifact.setAttribute(APIConstants.DOC_TYPE, documentation.getType().getType()); artifact.setAttribute(APIConstants.DOC_VISIBILITY, documentation.getVisibility().name()); Documentation.DocumentSourceType sourceType = documentation.getSourceType(); switch (sourceType) { case INLINE: sourceType = Documentation.DocumentSourceType.INLINE; break; case URL: sourceType = Documentation.DocumentSourceType.URL; break; case FILE: { sourceType = Documentation.DocumentSourceType.FILE; } break; default: throw new APIManagementException("Unknown sourceType " + sourceType + " provided for documentation"); } //Documentation Source URL is a required field in the documentation.rxt for migrated setups //Therefore setting a default value if it is not set. if (documentation.getSourceUrl() == null) { documentation.setSourceUrl(" "); } artifact.setAttribute(APIConstants.DOC_SOURCE_TYPE, sourceType.name()); artifact.setAttribute(APIConstants.DOC_SOURCE_URL, documentation.getSourceUrl()); artifact.setAttribute(APIConstants.DOC_FILE_PATH, documentation.getFilePath()); artifact.setAttribute(APIConstants.DOC_OTHER_TYPE_NAME, documentation.getOtherTypeName()); String basePath = apiId.getProviderName() + RegistryConstants.PATH_SEPARATOR + apiId.getApiName() + RegistryConstants.PATH_SEPARATOR + apiId.getVersion(); artifact.setAttribute(APIConstants.DOC_API_BASE_PATH, basePath); } catch (GovernanceException e) { String msg = "Failed to create doc artifact content from :" + documentation.getName(); log.error(msg, e); throw new APIManagementException(msg, e); } return artifact; } /** * this method used to initialized the ArtifactManager * * @param registry Registry * @param key , key name of the key * @return GenericArtifactManager * @throws APIManagementException if failed to initialized GenericArtifactManager */ public static GenericArtifactManager getArtifactManager(Registry registry, String key) throws APIManagementException { GenericArtifactManager artifactManager = null; try { GovernanceUtils.loadGovernanceArtifacts((UserRegistry) registry); if (GovernanceUtils.findGovernanceArtifactConfiguration(key, registry) != null) { artifactManager = new GenericArtifactManager(registry, key); } else { log.warn("Couldn't find GovernanceArtifactConfiguration of RXT: " + key + ". Tenant id set in registry : " + ((UserRegistry) registry).getTenantId() + ", Tenant domain set in PrivilegedCarbonContext: " + PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId()); } } catch (RegistryException e) { String msg = "Failed to initialize GenericArtifactManager"; log.error(msg, e); throw new APIManagementException(msg, e); } return artifactManager; } private static void handleException(String msg) throws APIManagementException { log.error(msg); throw new APIManagementException(msg); } public static void handleException(String msg, Throwable t) throws APIManagementException { log.error(msg, t); throw new APIManagementException(msg, t); } public static SubscriberKeyMgtClient getKeyManagementClient() throws APIManagementException { KeyManagerConfiguration configuration = KeyManagerHolder.getKeyManagerInstance().getKeyManagerConfiguration(); String serverURL = configuration.getParameter(APIConstants.AUTHSERVER_URL); String username = configuration.getParameter(APIConstants.KEY_MANAGER_USERNAME); String password = configuration.getParameter(APIConstants.KEY_MANAGER_PASSWORD); if (serverURL == null) { handleException("API key manager URL unspecified"); } if (username == null || password == null) { handleException("Authentication credentials for API key manager unspecified"); } try { return new SubscriberKeyMgtClient(serverURL, username, password); } catch (Exception e) { handleException("Error while initializing the subscriber key management client", e); return null; } } public static OAuthAdminClient getOauthAdminClient() throws APIManagementException { try { return new OAuthAdminClient(); } catch (Exception e) { handleException("Error while initializing the OAuth admin client", e); return null; } } public static UserInformationRecoveryClient getUserInformationRecoveryClient() throws APIManagementException { try { return new UserInformationRecoveryClient(); } catch (Exception e) { handleException("Error while initializing the User information recovery client", e); return null; } } public static ApplicationManagementServiceClient getApplicationManagementServiceClient() throws APIManagementException { try { return new ApplicationManagementServiceClient(); } catch (Exception e) { handleException("Error while initializing the Application Management Service client", e); return null; } } /** * Crate an WSDL from given wsdl url. Reset the endpoint details to gateway node * * * * @param registry - Governance Registry space to save the WSDL * @param api -API instance * @return Path of the created resource * @throws APIManagementException If an error occurs while adding the WSDL */ public static String createWSDL(Registry registry, API api) throws RegistryException, APIManagementException { try { String wsdlResourcePath = APIConstants.API_WSDL_RESOURCE_LOCATION + api.getId().getProviderName() + "--" + api.getId().getApiName() + api.getId().getVersion() + ".wsdl"; String absoluteWSDLResourcePath = RegistryUtils.getAbsolutePath(RegistryContext.getBaseInstance(), RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH) + wsdlResourcePath; APIMWSDLReader wsdlReader = new APIMWSDLReader(api.getWsdlUrl()); OMElement wsdlContentEle; String wsdRegistryPath; String tenantDomain = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantDomain(); if (org.wso2.carbon.utils.multitenancy.MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equalsIgnoreCase (tenantDomain)) { wsdRegistryPath = RegistryConstants.PATH_SEPARATOR + "registry" + RegistryConstants.PATH_SEPARATOR + "resource" + absoluteWSDLResourcePath; } else { wsdRegistryPath = "/t/" + tenantDomain + RegistryConstants.PATH_SEPARATOR + "registry" + RegistryConstants.PATH_SEPARATOR + "resource" + absoluteWSDLResourcePath; } Resource wsdlResource = registry.newResource(); // isWSDL2Document(api.getWsdlUrl()) method only understands http or file system urls. // Hence if this is a registry url, should not go in to the following if block if (!api.getWsdlUrl().matches(wsdRegistryPath) && (api.getWsdlUrl().startsWith("http:") || api.getWsdlUrl ().startsWith("https:") || api.getWsdlUrl().startsWith("file:"))) { if (isWSDL2Document(api.getWsdlUrl())) { wsdlContentEle = wsdlReader.readAndCleanWsdl2(api); wsdlResource.setContent(wsdlContentEle.toString()); } else { wsdlContentEle = wsdlReader.readAndCleanWsdl(api); wsdlResource.setContent(wsdlContentEle.toString()); } registry.put(wsdlResourcePath, wsdlResource); //set the anonymous role for wsld resource to avoid basicauth security. String[] visibleRoles = null; if (api.getVisibleRoles() != null) { visibleRoles = api.getVisibleRoles().split(","); } setResourcePermissions(api.getId().getProviderName(), api.getVisibility(), visibleRoles, wsdlResourcePath); } else { byte[] wsdl = (byte[]) registry.get(wsdlResourcePath).getContent(); if (isWSDL2Resource(wsdl)) { wsdlContentEle = wsdlReader.updateWSDL2(wsdl, api); wsdlResource.setContent(wsdlContentEle.toString()); } else { wsdlContentEle = wsdlReader.updateWSDL(wsdl, api); wsdlResource.setContent(wsdlContentEle.toString()); } registry.put(wsdlResourcePath, wsdlResource); //set the anonymous role for wsld resource to avoid basicauth security. String[] visibleRoles = null; if (api.getVisibleRoles() != null) { visibleRoles = api.getVisibleRoles().split(","); } setResourcePermissions(api.getId().getProviderName(), api.getVisibility(), visibleRoles, wsdlResourcePath); } //set the wsdl resource permlink as the wsdlURL. api.setWsdlUrl(getRegistryResourceHTTPPermlink(absoluteWSDLResourcePath)); return wsdlResourcePath; } catch (RegistryException e) { String msg = "Failed to add WSDL " + api.getWsdlUrl() + " to the registry"; log.error(msg, e); throw new RegistryException(msg, e); } catch (APIManagementException e) { String msg = "Failed to process the WSDL : " + api.getWsdlUrl(); log.error(msg, e); throw new APIManagementException(msg, e); } } /** * Given a URL, this method checks if the underlying document is a WSDL2 * * @param url URL to check * @return true if the underlying document is a WSDL2 * @throws APIManagementException if error occurred while validating the URI */ public static boolean isWSDL2Document(String url) throws APIManagementException { APIMWSDLReader wsdlReader = new APIMWSDLReader(url); return wsdlReader.isWSDL2BaseURI(); } /** * Given a wsdl resource, this method checks if the underlying document is a WSDL2 * * @param wsdl byte array of wsdl definition saved in registry * @return true if wsdl2 definition * @throws APIManagementException */ private static boolean isWSDL2Resource(byte[] wsdl) throws APIManagementException { String wsdl2NameSpace = "http://www.w3.org/ns/wsdl"; String wsdlContent = new String(wsdl); return wsdlContent.indexOf(wsdl2NameSpace) > 0; } /** * Read the GateWay Endpoint from the APIConfiguration. If multiple Gateway * environments defined, * take only the production node's Endpoint. * Else, pick what is available as the gateway node. * * @return {@link String} - Gateway URL */ public static String getGatewayendpoint(String transports) { String gatewayURLs; Map<String, Environment> gatewayEnvironments = ServiceReferenceHolder.getInstance() .getAPIManagerConfigurationService() .getAPIManagerConfiguration() .getApiGatewayEnvironments(); if (gatewayEnvironments.size() > 1) { for (Environment environment : gatewayEnvironments.values()) { if (APIConstants.GATEWAY_ENV_TYPE_HYBRID.equals(environment.getType())) { gatewayURLs = environment.getApiGatewayEndpoint(); // This might have http,https // pick correct endpoint return APIUtil.extractHTTPSEndpoint(gatewayURLs, transports); } } for (Environment environment : gatewayEnvironments.values()) { if (APIConstants.GATEWAY_ENV_TYPE_PRODUCTION.equals(environment.getType())) { gatewayURLs = environment.getApiGatewayEndpoint(); // This might have http,https // pick correct endpoint return APIUtil.extractHTTPSEndpoint(gatewayURLs, transports); } } for (Environment environment : gatewayEnvironments.values()) { if (APIConstants.GATEWAY_ENV_TYPE_SANDBOX.equals(environment.getType())) { gatewayURLs = environment.getApiGatewayEndpoint(); // This might have http,https // pick correct endpoint return APIUtil.extractHTTPSEndpoint(gatewayURLs, transports); } } } else { gatewayURLs = ((Environment) gatewayEnvironments.values().toArray()[0]).getApiGatewayEndpoint(); return extractHTTPSEndpoint(gatewayURLs, transports); } return null; } /** * Gateway endpoint has HTTP and HTTPS endpoints. * If both are defined pick HTTPS only. Else, pick whatever available. * eg: <GatewayEndpoint>http://${carbon.local.ip}:${http.nio.port}, * https://${carbon.local.ip}:${https.nio.port}</GatewayEndpoint> * * @param gatewayURLs - String contains comma separated gateway urls. * @return {@link String} - Returns HTTPS gateway endpoint */ private static String extractHTTPSEndpoint(String gatewayURLs, String transports) { String gatewayURL; String gatewayHTTPURL = null; String gatewayHTTPSURL = null; boolean httpsEnabled = false; String[] gatewayURLsArray = gatewayURLs.split(","); String[] transportsArray = transports.split(","); for (String transport : transportsArray) { if (transport.startsWith(APIConstants.HTTPS_PROTOCOL)) { httpsEnabled = true; } } if (gatewayURLsArray.length > 1) { for (String url : gatewayURLsArray) { if (url.startsWith("https:")) { gatewayHTTPSURL = url; } else { if (!url.startsWith("ws:")) { gatewayHTTPURL = url; } } } if (httpsEnabled) { gatewayURL = gatewayHTTPSURL; } else { gatewayURL = gatewayHTTPURL; } } else { gatewayURL = gatewayURLs; } return gatewayURL; } /** * Create an Endpoint * * @param endpointUrl Endpoint url * @param registry Registry space to save the endpoint * @return Path of the created resource * @throws APIManagementException If an error occurs while adding the endpoint */ public static String createEndpoint(String endpointUrl, Registry registry) throws APIManagementException { try { EndpointManager endpointManager = new EndpointManager(registry); Endpoint endpoint = endpointManager.newEndpoint(endpointUrl); endpointManager.addEndpoint(endpoint); return GovernanceUtils.getArtifactPath(registry, endpoint.getId()); } catch (RegistryException e) { String msg = "Failed to import endpoint " + endpointUrl + " to registry "; log.error(msg, e); throw new APIManagementException(msg, e); } } /** * Sorts the list of tiers according to the number of requests allowed per minute in each tier in descending order. * * @param tiers - The list of tiers to be sorted * @return - The sorted list. */ public static List<Tier> sortTiers(Set<Tier> tiers) { List<Tier> tierList = new ArrayList<Tier>(); tierList.addAll(tiers); Collections.sort(tierList); return tierList; } /** * Returns a set of External API Stores as defined in the underlying governance * registry. * * @return a Map of tier names and Tier objects - possibly empty * @throws APIManagementException if an error occurs when loading tiers from the registry */ public static Set<APIStore> getExternalStores(int tenantId) throws APIManagementException { // First checking if ExternalStores are defined in api-manager.xml Set<APIStore> externalAPIStores = ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService() .getAPIManagerConfiguration().getExternalAPIStores(); // If defined, return Store Config provided there. if (externalAPIStores != null && !externalAPIStores.isEmpty()) { return externalAPIStores; } // Else Read the config from Tenant's Registry. externalAPIStores = new HashSet<APIStore>(); try { UserRegistry registry = ServiceReferenceHolder.getInstance().getRegistryService() .getGovernanceSystemRegistry(tenantId); if (registry.resourceExists(APIConstants.EXTERNAL_API_STORES_LOCATION)) { Resource resource = registry.get(APIConstants.EXTERNAL_API_STORES_LOCATION); String content = new String((byte[]) resource.getContent(), Charset.defaultCharset()); OMElement element = AXIOMUtil.stringToOM(content); Iterator apistoreIterator = element.getChildrenWithLocalName("ExternalAPIStore"); while (apistoreIterator.hasNext()) { APIStore store = new APIStore(); OMElement storeElem = (OMElement) apistoreIterator.next(); String type = storeElem.getAttributeValue(new QName(APIConstants.EXTERNAL_API_STORE_TYPE)); String className = storeElem.getAttributeValue(new QName(APIConstants.EXTERNAL_API_STORE_CLASS_NAME)); store.setPublisher((APIPublisher) getClassForName(className).newInstance()); store.setType(type); //Set Store type [eg:wso2] String name = storeElem.getAttributeValue(new QName(APIConstants.EXTERNAL_API_STORE_ID)); if (name == null) { log.error("The ExternalAPIStore name attribute is not defined in api-manager.xml."); } store.setName(name); //Set store name OMElement configDisplayName = storeElem.getFirstChildWithName (new QName(APIConstants.EXTERNAL_API_STORE_DISPLAY_NAME)); String displayName = (configDisplayName != null) ? replaceSystemProperty( configDisplayName.getText()) : name; store.setDisplayName(displayName);//Set store display name store.setEndpoint(replaceSystemProperty(storeElem.getFirstChildWithName( new QName(APIConstants.EXTERNAL_API_STORE_ENDPOINT)).getText())); //Set store endpoint, which is used to publish APIs store.setPublished(false); if (APIConstants.WSO2_API_STORE_TYPE.equals(type)) { OMElement password = storeElem.getFirstChildWithName(new QName( APIConstants.EXTERNAL_API_STORE_PASSWORD)); if (password != null) { String value = password.getText(); store.setPassword(replaceSystemProperty(value)); store.setUsername(replaceSystemProperty(storeElem.getFirstChildWithName( new QName(APIConstants.EXTERNAL_API_STORE_USERNAME)).getText())); //Set store login username } else { log.error("The user-credentials of API Publisher is not defined in the <ExternalAPIStore> " + "config of api-manager.xml."); } } externalAPIStores.add(store); } } } catch (RegistryException e) { String msg = "Error while retrieving External Stores Configuration from registry"; log.error(msg, e); throw new APIManagementException(msg, e); } catch (XMLStreamException e) { String msg = "Malformed XML found in the External Stores Configuration resource"; log.error(msg, e); throw new APIManagementException(msg, e); } catch (ClassNotFoundException e) { String msg = "One or more classes defined in APIConstants.EXTERNAL_API_STORE_CLASS_NAME cannot be found"; log.error(msg, e); throw new APIManagementException(msg, e); } catch (InstantiationException e) { String msg = "One or more classes defined in APIConstants.EXTERNAL_API_STORE_CLASS_NAME cannot be load"; log.error(msg, e); throw new APIManagementException(msg, e); } catch (IllegalAccessException e) { String msg = "One or more classes defined in APIConstants.EXTERNAL_API_STORE_CLASS_NAME cannot be access"; log.error(msg, e); throw new APIManagementException(msg, e); } return externalAPIStores; } /** * Returns the External API Store Configuration with the given Store Name * * @param apiStoreName * @return * @throws APIManagementException */ public static APIStore getExternalAPIStore(String apiStoreName, int tenantId) throws APIManagementException { Set<APIStore> externalAPIStoresConfig = APIUtil.getExternalStores(tenantId); for (APIStore apiStoreConfig : externalAPIStoresConfig) { if (apiStoreConfig.getName().equals(apiStoreName)) { return apiStoreConfig; } } return null; } /** * Returns an unfiltered map of API availability tiers as defined in the underlying governance * registry. * * @return Map<String, Tier> an unfiltered Map of tier names and Tier objects - possibly empty * @throws APIManagementException if an error occurs when loading tiers from the registry */ public static Map<String, Tier> getAllTiers() throws APIManagementException { if (!APIUtil.isAdvanceThrottlingEnabled()) { try { Registry registry = ServiceReferenceHolder.getInstance().getRegistryService(). getGovernanceSystemRegistry(); return getAllTiers(registry, APIConstants.API_TIER_LOCATION, MultitenantConstants.SUPER_TENANT_ID); } catch (RegistryException e) { log.error(APIConstants.MSG_TIER_RET_ERROR, e); throw new APIManagementException(APIConstants.MSG_TIER_RET_ERROR, e); } catch (XMLStreamException e) { log.error(APIConstants.MSG_MALFORMED_XML_ERROR, e); throw new APIManagementException(APIConstants.MSG_MALFORMED_XML_ERROR, e); } } else { return getTiersFromPolicies(PolicyConstants.POLICY_LEVEL_SUB, MultitenantConstants.SUPER_TENANT_ID); } } /** * Returns an unfiltered map of API availability tiers of the tenant as defined in the underlying governance * registry. * * @return Map<String, Tier> an unfiltered Map of tier names and Tier objects - possibly empty * @throws APIManagementException if an error occurs when loading tiers from the registry */ public static Map<String, Tier> getAllTiers(int tenantId) throws APIManagementException { if (!APIUtil.isAdvanceThrottlingEnabled()) { try { Registry registry = ServiceReferenceHolder.getInstance().getRegistryService(). getGovernanceSystemRegistry(tenantId); return getAllTiers(registry, APIConstants.API_TIER_LOCATION, tenantId); } catch (RegistryException e) { log.error(APIConstants.MSG_TIER_RET_ERROR, e); throw new APIManagementException(APIConstants.MSG_TIER_RET_ERROR, e); } catch (XMLStreamException e) { log.error(APIConstants.MSG_MALFORMED_XML_ERROR, e); throw new APIManagementException(APIConstants.MSG_MALFORMED_XML_ERROR, e); } } else { return getTiersFromPolicies(PolicyConstants.POLICY_LEVEL_SUB, tenantId); } } /** * Returns a map of API availability tiers as defined in the underlying governance * registry. * * @return a Map of tier names and Tier objects - possibly empty * @throws APIManagementException if an error occurs when loading tiers from the registry */ public static Map<String, Tier> getTiers() throws APIManagementException { if (!APIUtil.isAdvanceThrottlingEnabled()) { try { Registry registry = ServiceReferenceHolder.getInstance().getRegistryService(). getGovernanceSystemRegistry(); return getTiers(registry, APIConstants.API_TIER_LOCATION, MultitenantConstants.SUPER_TENANT_ID); } catch (RegistryException e) { log.error(APIConstants.MSG_TIER_RET_ERROR, e); throw new APIManagementException(APIConstants.MSG_TIER_RET_ERROR, e); } } else { return getTiersFromPolicies(PolicyConstants.POLICY_LEVEL_SUB, MultitenantConstants.SUPER_TENANT_ID); } } /** * Returns a map of API availability tiers as defined in the underlying governance * registry. * * @return a Map of tier names and Tier objects - possibly empty * @throws APIManagementException if an error occurs when loading tiers from the registry */ public static Map<String, Tier> getAdvancedSubsriptionTiers() throws APIManagementException { return getAdvancedSubsriptionTiers(MultitenantConstants.SUPER_TENANT_ID); } /** * Returns a map of API subscription tiers of the tenant as defined in database * registry. * * @return a Map of tier names and Tier objects - possibly empty * @throws APIManagementException if an error occurs when loading tiers from the registry */ public static Map<String, Tier> getAdvancedSubsriptionTiers(int tenantId) throws APIManagementException { return APIUtil.getTiersFromPolicies(PolicyConstants.POLICY_LEVEL_SUB, tenantId); } /** * Returns a map of API availability tiers of the tenant as defined in the underlying governance * registry. * * @return a Map of tier names and Tier objects - possibly empty * @throws APIManagementException if an error occurs when loading tiers from the registry */ public static Map<String, Tier> getTiers(int tenantId) throws APIManagementException { if (!APIUtil.isAdvanceThrottlingEnabled()) { try { Registry registry = ServiceReferenceHolder.getInstance().getRegistryService(). getGovernanceSystemRegistry(tenantId); return getTiers(registry, APIConstants.API_TIER_LOCATION, tenantId); } catch (RegistryException e) { log.error(APIConstants.MSG_TIER_RET_ERROR, e); throw new APIManagementException(APIConstants.MSG_TIER_RET_ERROR, e); } } else { return getTiersFromPolicies(PolicyConstants.POLICY_LEVEL_SUB, tenantId); } } /** * Returns a map of API availability tiers of the tenant as defined in the underlying governance * registry. * * @return a Map of tier names and Tier objects - possibly empty * @throws APIManagementException if an error occurs when loading tiers from the registry */ public static Map<String, Tier> getTiers(int tierType, String tenantDomain) throws APIManagementException { if (!APIUtil.isAdvanceThrottlingEnabled()) { boolean isTenantFlowStarted = false; try { PrivilegedCarbonContext.startTenantFlow(); isTenantFlowStarted = true; PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); Registry registry = ServiceReferenceHolder.getInstance().getRegistryService(). getGovernanceSystemRegistry(tenantId); if (tierType == APIConstants.TIER_API_TYPE) { return getTiers(registry, APIConstants.API_TIER_LOCATION, tenantId); } else if (tierType == APIConstants.TIER_RESOURCE_TYPE) { return getTiers(registry, APIConstants.RES_TIER_LOCATION, tenantId); } else if (tierType == APIConstants.TIER_APPLICATION_TYPE) { return getTiers(registry, APIConstants.APP_TIER_LOCATION, tenantId); } else { throw new APIManagementException("No such a tier type : " + tierType); } } catch (RegistryException e) { log.error(APIConstants.MSG_TIER_RET_ERROR, e); throw new APIManagementException(APIConstants.MSG_TIER_RET_ERROR, e); } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } } else { boolean isTenantFlowStarted = false; try { PrivilegedCarbonContext.startTenantFlow(); isTenantFlowStarted = true; PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); if (tierType == APIConstants.TIER_API_TYPE) { return getTiersFromPolicies(PolicyConstants.POLICY_LEVEL_SUB, tenantId); } else if (tierType == APIConstants.TIER_RESOURCE_TYPE) { return getTiersFromPolicies(PolicyConstants.POLICY_LEVEL_API, tenantId); } else if (tierType == APIConstants.TIER_APPLICATION_TYPE) { return getTiersFromPolicies(PolicyConstants.POLICY_LEVEL_APP, tenantId); } else { throw new APIManagementException("No such a tier type : " + tierType); } } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } } } /** * Retrieves unfiltered list of all available tiers from registry. * Result will contains all the tiers including unauthenticated tier which is * filtered out in getTiers} * * @param registry registry to access tiers config * @param tierLocation registry location of tiers config * @return Map<String, Tier> containing all available tiers * @throws RegistryException when registry action fails * @throws XMLStreamException when xml parsing fails * @throws APIManagementException when fails to retrieve tier attributes */ private static Map<String, Tier> getAllTiers(Registry registry, String tierLocation, int tenantId) throws RegistryException, XMLStreamException, APIManagementException { // We use a treeMap here to keep the order Map<String, Tier> tiers = new TreeMap<String, Tier>(); if (registry.resourceExists(tierLocation)) { Resource resource = registry.get(tierLocation); String content = new String((byte[]) resource.getContent(), Charset.defaultCharset()); OMElement element = AXIOMUtil.stringToOM(content); OMElement assertion = element.getFirstChildWithName(APIConstants.ASSERTION_ELEMENT); Iterator policies = assertion.getChildrenWithName(APIConstants.POLICY_ELEMENT); while (policies.hasNext()) { OMElement policy = (OMElement) policies.next(); OMElement id = policy.getFirstChildWithName(APIConstants.THROTTLE_ID_ELEMENT); String tierName = id.getText(); // Constructing the tier object Tier tier = new Tier(tierName); tier.setPolicyContent(policy.toString().getBytes(Charset.defaultCharset())); if (id.getAttribute(APIConstants.THROTTLE_ID_DISPLAY_NAME_ELEMENT) != null) { tier.setDisplayName(id.getAttributeValue(APIConstants.THROTTLE_ID_DISPLAY_NAME_ELEMENT)); } else { tier.setDisplayName(tierName); } String desc; try { long requestPerMin = APIDescriptionGenUtil.getAllowedCountPerMinute(policy); tier.setRequestsPerMin(requestPerMin); long requestCount = APIDescriptionGenUtil.getAllowedRequestCount(policy); tier.setRequestCount(requestCount); long unitTime = APIDescriptionGenUtil.getTimeDuration(policy); tier.setUnitTime(unitTime); if (requestPerMin >= 1) { desc = DESCRIPTION.replaceAll("\\[1\\]", Long.toString(requestPerMin)); } else { desc = DESCRIPTION; } tier.setDescription(desc); } catch (APIManagementException ex) { // If there is any issue in getting the request counts or the time duration, that means this tier // information can not be used for throttling. Hence we log this exception and continue the flow // to the next tier. log.warn("Unable to get the request count/time duration information for : " + tier.getName() + ". " + ex.getMessage()); continue; } // Get all the attributes of the tier. Map<String, Object> tierAttributes = APIDescriptionGenUtil.getTierAttributes(policy); if (!tierAttributes.isEmpty()) { // The description, billing plan and the stop on quota reach properties are also stored as attributes // of the tier attributes. Hence we extract them from the above attributes map. Iterator<Entry<String, Object>> attributeIterator = tierAttributes.entrySet().iterator(); while (attributeIterator.hasNext()) { Entry<String, Object> entry = attributeIterator.next(); if (APIConstants.THROTTLE_TIER_DESCRIPTION_ATTRIBUTE.equals(entry.getKey()) && entry.getValue() instanceof String) { tier.setDescription((String) entry.getValue()); // We remove the attribute from the map attributeIterator.remove(); continue; } if (APIConstants.THROTTLE_TIER_PLAN_ATTRIBUTE.equals(entry.getKey()) && entry.getValue() instanceof String) { tier.setTierPlan((String) entry.getValue()); // We remove the attribute from the map attributeIterator.remove(); continue; } if (APIConstants.THROTTLE_TIER_QUOTA_ACTION_ATTRIBUTE.equals(entry.getKey()) && entry.getValue() instanceof String) { tier.setStopOnQuotaReached(Boolean.parseBoolean((String) entry.getValue())); // We remove the attribute from the map attributeIterator.remove(); // We do not need a continue since this is the last statement. } } tier.setTierAttributes(tierAttributes); } tiers.put(tierName, tier); } } if (isEnabledUnlimitedTier()) { Tier tier = new Tier(APIConstants.UNLIMITED_TIER); tier.setDescription(APIConstants.UNLIMITED_TIER_DESC); tier.setDisplayName(APIConstants.UNLIMITED_TIER); tier.setRequestsPerMin(Long.MAX_VALUE); if (isUnlimitedTierPaid(getTenantDomainFromTenantId(tenantId))) { tier.setTierPlan(APIConstants.COMMERCIAL_TIER_PLAN); } else { tier.setTierPlan(APIConstants.BILLING_PLAN_FREE); } tiers.put(tier.getName(), tier); } return tiers; } /** * Retrieves filtered list of available tiers from registry. This method will not return Unauthenticated * tier in the list. Use to retrieve all tiers without * any filtering. * * @param registry registry to access tiers config * @param tierLocation registry location of tiers config * @return map containing available tiers * @throws APIManagementException when fails to retrieve tier attributes */ private static Map<String, Tier> getTiers(Registry registry, String tierLocation, int tenantId) throws APIManagementException { Map<String, Tier> tiers = null; try { tiers = getAllTiers(registry, tierLocation, tenantId); tiers.remove(APIConstants.UNAUTHENTICATED_TIER); } catch (RegistryException e) { handleException(APIConstants.MSG_TIER_RET_ERROR, e); } catch (XMLStreamException e) { handleException(APIConstants.MSG_MALFORMED_XML_ERROR, e); } catch (APIManagementException e) { handleException("Unable to get tier attributes", e); } catch (Exception e) { // generic exception is caught to catch exceptions thrown from map remove method handleException("Unable to remove Unauthenticated tier from tiers list", e); } return tiers; } /** * This method deletes a given tier from tier xml file, for a given tenant * * @param tier tier to be deleted * @param tenantId id of the tenant * @throws APIManagementException if error occurs while getting registry resource or processing XML */ public static void deleteTier(Tier tier, int tenantId) throws APIManagementException { try { Registry registry = ServiceReferenceHolder.getInstance().getRegistryService(). getGovernanceSystemRegistry(tenantId); if (registry.resourceExists(APIConstants.API_TIER_LOCATION)) { Resource resource = registry.get(APIConstants.API_TIER_LOCATION); String content = new String((byte[]) resource.getContent(), Charset.defaultCharset()); OMElement element = AXIOMUtil.stringToOM(content); OMElement assertion = element.getFirstChildWithName(APIConstants.ASSERTION_ELEMENT); Iterator policies = assertion.getChildrenWithName(APIConstants.POLICY_ELEMENT); boolean foundTier = false; String tierName = null; while (policies.hasNext()) { OMElement policy = (OMElement) policies.next(); OMElement id = policy.getFirstChildWithName(APIConstants.THROTTLE_ID_ELEMENT); tierName = tier.getName(); if (tierName != null && tierName.equalsIgnoreCase(id.getText())) { foundTier = true; policies.remove(); break; } } if (!foundTier) { log.error("Tier doesn't exist : " + tierName); throw new APIManagementException("Tier doesn't exist : " + tierName); } resource.setContent(element.toString()); registry.put(APIConstants.API_TIER_LOCATION, resource); } } catch (RegistryException e) { log.error(APIConstants.MSG_TIER_RET_ERROR, e); throw new APIManagementException(e.getMessage()); } catch (XMLStreamException e) { log.error(APIConstants.MSG_MALFORMED_XML_ERROR, e); throw new APIManagementException(e.getMessage()); } } /** * Returns the tier display name for a particular tier * * @return the relevant tier display name * @throws APIManagementException if an error occurs when loading tiers from the registry */ public static String getTierDisplayName(int tenantId, String tierName) throws APIManagementException { String displayName = null; if (APIConstants.UNLIMITED_TIER.equals(tierName)) { return APIConstants.UNLIMITED_TIER; } try { Registry registry = ServiceReferenceHolder.getInstance().getRegistryService(). getGovernanceSystemRegistry(tenantId); if (registry.resourceExists(APIConstants.API_TIER_LOCATION)) { Resource resource = registry.get(APIConstants.API_TIER_LOCATION); String content = new String((byte[]) resource.getContent(), Charset.defaultCharset()); OMElement element = AXIOMUtil.stringToOM(content); OMElement assertion = element.getFirstChildWithName(APIConstants.ASSERTION_ELEMENT); Iterator policies = assertion.getChildrenWithName(APIConstants.POLICY_ELEMENT); while (policies.hasNext()) { OMElement policy = (OMElement) policies.next(); OMElement id = policy.getFirstChildWithName(APIConstants.THROTTLE_ID_ELEMENT); if (id.getText().equals(tierName)) { if (id.getAttribute(APIConstants.THROTTLE_ID_DISPLAY_NAME_ELEMENT) != null) { displayName = id.getAttributeValue(APIConstants.THROTTLE_ID_DISPLAY_NAME_ELEMENT); } else if (displayName == null) { displayName = id.getText(); } } } } } catch (RegistryException e) { log.error(APIConstants.MSG_TIER_RET_ERROR, e); throw new APIManagementException(APIConstants.MSG_TIER_RET_ERROR, e); } catch (XMLStreamException e) { log.error(APIConstants.MSG_MALFORMED_XML_ERROR, e); throw new APIManagementException(APIConstants.MSG_MALFORMED_XML_ERROR, e); } return displayName; } /** * Checks whether the specified user has the specified permission. * * @param username A username * @param permission A valid Carbon permission * @throws APIManagementException If the user does not have the specified permission or if an error occurs */ public static void checkPermission(String username, String permission) throws APIManagementException { if (username == null) { throw new APIManagementException("Attempt to execute privileged operation as" + " the anonymous user"); } if (isPermissionCheckDisabled()) { log.debug("Permission verification is disabled by APIStore configuration"); return; } String tenantDomain = MultitenantUtils.getTenantDomain(username); PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); boolean authorized; try { int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager(). getTenantId(tenantDomain); if (!org.wso2.carbon.utils.multitenancy.MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { org.wso2.carbon.user.api.AuthorizationManager manager = ServiceReferenceHolder.getInstance() .getRealmService() .getTenantUserRealm(tenantId) .getAuthorizationManager(); authorized = manager.isUserAuthorized(MultitenantUtils.getTenantAwareUsername(username), permission, CarbonConstants.UI_PERMISSION_ACTION); } else { // On the first login attempt to publisher (without browsing the // store), the user realm will be null. if (ServiceReferenceHolder.getUserRealm() == null) { ServiceReferenceHolder.setUserRealm((UserRealm) ServiceReferenceHolder.getInstance() .getRealmService() .getTenantUserRealm(tenantId)); } authorized = AuthorizationManager.getInstance() .isUserAuthorized(MultitenantUtils.getTenantAwareUsername(username), permission); } if (!authorized) { throw new APIManagementException("User '" + username + "' does not have the " + "required permission: " + permission); } } catch (UserStoreException e) { throw new APIManagementException("Error while checking the user:" + username + " authorized or not", e); } finally { PrivilegedCarbonContext.endTenantFlow(); } } /** * Checks whether the specified user has the specified permission. * * @param userNameWithoutChange A username * @param permission A valid Carbon permission * @throws APIManagementException If the user does not have the specified permission or if an error occurs */ public static boolean hasPermission(String userNameWithoutChange, String permission) throws APIManagementException { return hasPermission(userNameWithoutChange, permission, false); } /** * Checks whether the specified user has the specified permission. * * @param userNameWithoutChange A username * @param permission A valid Carbon permission * @throws APIManagementException If the user does not have the specified permission or if an error occurs */ public static boolean hasPermission(String userNameWithoutChange, String permission, boolean isFromPublisher) throws APIManagementException { boolean authorized = false; if (userNameWithoutChange == null) { throw new APIManagementException("Attempt to execute privileged operation as" + " the anonymous user"); } if (isPermissionCheckDisabled()) { log.debug("Permission verification is disabled by APIStore configuration"); authorized = true; return authorized; } if (isFromPublisher && APIConstants.Permissions.APIM_ADMIN.equals(permission)) { userNameWithoutChange = getUserNameWithTenantSuffix(userNameWithoutChange); Integer value = getValueFromCache(APIConstants.API_PUBLISHER_ADMIN_PERMISSION_CACHE, userNameWithoutChange); if (value != null) { return value == 1; } } String tenantDomain = MultitenantUtils.getTenantDomain(userNameWithoutChange); PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); try { int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager(). getTenantId(tenantDomain); if (!org.wso2.carbon.utils.multitenancy.MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { org.wso2.carbon.user.api.AuthorizationManager manager = ServiceReferenceHolder.getInstance() .getRealmService() .getTenantUserRealm(tenantId) .getAuthorizationManager(); authorized = manager.isUserAuthorized(MultitenantUtils.getTenantAwareUsername(userNameWithoutChange), permission, CarbonConstants.UI_PERMISSION_ACTION); } else { // On the first login attempt to publisher (without browsing the // store), the user realm will be null. if (ServiceReferenceHolder.getUserRealm() == null) { ServiceReferenceHolder.setUserRealm((UserRealm) ServiceReferenceHolder.getInstance() .getRealmService() .getTenantUserRealm(tenantId)); } authorized = AuthorizationManager.getInstance() .isUserAuthorized(MultitenantUtils.getTenantAwareUsername(userNameWithoutChange), permission); } if (isFromPublisher && APIConstants.Permissions.APIM_ADMIN.equals(permission)) { addToRolesCache(APIConstants.API_PUBLISHER_ADMIN_PERMISSION_CACHE, userNameWithoutChange, authorized ? 1 : 2); } } catch (UserStoreException e) { throw new APIManagementException("Error while checking the user:" + userNameWithoutChange + " authorized or not", e); } finally { PrivilegedCarbonContext.endTenantFlow(); } return authorized; } /** * Checks whether the disablePermissionCheck parameter enabled * * @return boolean */ public static boolean isPermissionCheckDisabled() { APIManagerConfiguration config = ServiceReferenceHolder.getInstance(). getAPIManagerConfigurationService().getAPIManagerConfiguration(); String disablePermissionCheck = config.getFirstProperty(APIConstants.API_STORE_DISABLE_PERMISSION_CHECK); if (disablePermissionCheck == null) { return false; } return Boolean.parseBoolean(disablePermissionCheck); } /** * Checks whether the specified user has the specified permission without throwing * any exceptions. * * @param username A username * @param permission A valid Carbon permission * @return true if the user has the specified permission and false otherwise */ public static boolean checkPermissionQuietly(String username, String permission) { try { checkPermission(username, permission); return true; } catch (APIManagementException ignore) { // Ignore the exception. // Logging it on debug mode so if needed we can see the exception stacktrace. if (log.isDebugEnabled()) { log.debug("User does not have permission", ignore); } return false; } } /** * Gets the information of the logged in User. * * @param cookie Cookie of the previously logged in session. * @param serviceUrl Url of the authentication service. * @return LoggedUserInfo object containing details of the logged in user. * @throws ExceptionException * @throws RemoteException */ public static LoggedUserInfo getLoggedInUserInfo(String cookie, String serviceUrl) throws RemoteException, ExceptionException { LoggedUserInfoAdminStub stub = new LoggedUserInfoAdminStub(null, serviceUrl + "LoggedUserInfoAdmin"); ServiceClient client = stub._getServiceClient(); Options options = client.getOptions(); options.setManageSession(true); options.setProperty(HTTPConstants.COOKIE_STRING, cookie); return stub.getUserInfo(); } /** * Get user profiles of user * * @param username username * @return default user profile of user * @throws APIManagementException */ public static UserProfileDTO getUserDefaultProfile(String username) throws APIManagementException { APIManagerConfiguration apiManagerConfiguration = ServiceReferenceHolder.getInstance() .getAPIManagerConfigurationService().getAPIManagerConfiguration(); String url = apiManagerConfiguration.getFirstProperty(APIConstants.API_KEY_VALIDATOR_URL); String errorMsg = "Error while getting profile of user "; try { UserProfileMgtServiceStub stub = new UserProfileMgtServiceStub( ServiceReferenceHolder.getContextService().getClientConfigContext(), url + APIConstants.USER_PROFILE_MGT_SERVICE); ServiceClient gatewayServiceClient = stub._getServiceClient(); CarbonUtils.setBasicAccessSecurityHeaders( apiManagerConfiguration.getFirstProperty(APIConstants.API_KEY_VALIDATOR_USERNAME), apiManagerConfiguration.getFirstProperty(APIConstants.API_KEY_VALIDATOR_PASSWORD), gatewayServiceClient); UserProfileDTO[] profiles = stub.getUserProfiles(username); for (UserProfileDTO dto : profiles) { if (APIConstants.USER_DEFAULT_PROFILE.equals(dto.getProfileName())) { return dto; } } } catch (AxisFault axisFault) { //here we are going to log the error message and return because in this case, current user cannot fetch //profile of another user (due to cross tenant isolation, not allowed to access user details etc.) log.error("Cannot access user profile of : " + username); return null; } catch (RemoteException e) { handleException(errorMsg + username, e); } catch (UserProfileMgtServiceUserProfileExceptionException e) { handleException(errorMsg + username, e); } return null; } /** * Retrieves the role list of a user * * @param username Name of the username * @throws APIManagementException If an error occurs */ public static String[] getListOfRoles(String username) throws APIManagementException { return getListOfRoles(username, false); } /** * Retrieves the role list of a user * * @param username A username * @param isFromPublisher To specify whether this call is from publisher * @throws APIManagementException If an error occurs */ public static String[] getListOfRoles(String username, boolean isFromPublisher) throws APIManagementException { if (username == null) { throw new APIManagementException("Attempt to execute privileged operation as" + " the anonymous user"); } String[] roles = null; if (isFromPublisher) { username = getUserNameWithTenantSuffix(username); roles = getValueFromCache(APIConstants.API_PUBLISHER_USER_ROLE_CACHE, username); } if (roles != null) { return roles; } String tenantDomain = MultitenantUtils.getTenantDomain(username); try { if (!org.wso2.carbon.utils.multitenancy.MultitenantConstants.SUPER_TENANT_DOMAIN_NAME .equals(tenantDomain)) { int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(tenantDomain); UserStoreManager manager = ServiceReferenceHolder.getInstance().getRealmService() .getTenantUserRealm(tenantId).getUserStoreManager(); roles = manager.getRoleListOfUser(MultitenantUtils.getTenantAwareUsername(username)); } else { roles = AuthorizationManager.getInstance() .getRolesOfUser(MultitenantUtils.getTenantAwareUsername(username)); } if (isFromPublisher) { addToRolesCache(APIConstants.API_PUBLISHER_USER_ROLE_CACHE, username, roles); } return roles; } catch (UserStoreException e) { throw new APIManagementException("UserStoreException while trying the role list of the user " + username, e); } } /** * To add the value to a cache. * * @param cacheName - Name of the Cache * @param key - Key of the entry that need to be added. * @param value - Value of the entry that need to be added. */ protected static <T> void addToRolesCache(String cacheName, String key, T value) { if (isPublisherRoleCacheEnabled) { if (log.isDebugEnabled()) { log.debug("Publisher role cache is enabled, adding the roles for the " + key + " to the cache " + cacheName + "'"); } Caching.getCacheManager(APIConstants.API_MANAGER_CACHE_MANAGER).getCache(cacheName).put(key, value); } } /** * To get the value from the cache. * * @param cacheName Name of the cache. * @param key Key of the cache entry. * @return Role list from the cache, if a values exists, otherwise null. */ protected static <T> T getValueFromCache(String cacheName, String key) { if (isPublisherRoleCacheEnabled) { if (log.isDebugEnabled()) { log.debug("Publisher role cache is enabled, retrieving the roles for " + key + " from the cache " + cacheName + "'"); } Cache<String, T> rolesCache = Caching.getCacheManager(APIConstants.API_MANAGER_CACHE_MANAGER) .getCache(cacheName); return rolesCache.get(key); } return null; } /** * Retrieves the list of user roles without throwing any exceptions. * * @param username A username * @return the list of roles to which the user belongs to. */ public static String[] getListOfRolesQuietly(String username) { try { return getListOfRoles(username); } catch (APIManagementException e) { return new String[0]; } } /** * Sets permission for uploaded file resource. * * @param filePath Registry path for the uploaded file * @throws APIManagementException */ public static void setFilePermission(String filePath) throws APIManagementException { try { String filePathString = filePath.replaceFirst("/registry/resource/", ""); org.wso2.carbon.user.api.AuthorizationManager accessControlAdmin = ServiceReferenceHolder.getInstance(). getRealmService().getTenantUserRealm(MultitenantConstants.SUPER_TENANT_ID). getAuthorizationManager(); if (!accessControlAdmin.isRoleAuthorized(CarbonConstants.REGISTRY_ANONNYMOUS_ROLE_NAME, filePathString, ActionConstants.GET)) { accessControlAdmin.authorizeRole(CarbonConstants.REGISTRY_ANONNYMOUS_ROLE_NAME, filePathString, ActionConstants.GET); } } catch (UserStoreException e) { throw new APIManagementException("Error while setting up permissions for file location", e); } } /** * This method used to get API from governance artifact specific to copyAPI * * @param artifact API artifact * @param registry Registry * @return API * @throws APIManagementException if failed to get API from artifact */ public static API getAPI(GovernanceArtifact artifact, Registry registry, APIIdentifier oldId, String oldContext) throws APIManagementException { API api; try { String providerName = artifact.getAttribute(APIConstants.API_OVERVIEW_PROVIDER); String apiName = artifact.getAttribute(APIConstants.API_OVERVIEW_NAME); String apiVersion = artifact.getAttribute(APIConstants.API_OVERVIEW_VERSION); api = new API(new APIIdentifier(providerName, apiName, apiVersion)); int apiId = ApiMgtDAO.getInstance().getAPIID(oldId, null); if (apiId == -1) { return null; } // set rating String artifactPath = GovernanceUtils.getArtifactPath(registry, artifact.getId()); BigDecimal bigDecimal = BigDecimal.valueOf(registry.getAverageRating(artifactPath)); BigDecimal res = bigDecimal.setScale(1, RoundingMode.HALF_UP); api.setRating(res.floatValue()); //set description api.setDescription(artifact.getAttribute(APIConstants.API_OVERVIEW_DESCRIPTION)); //set last access time api.setLastUpdated(registry.get(artifactPath).getLastModified()); //set uuid api.setUUID(artifact.getId()); // set url api.setStatus(getApiStatus(artifact.getAttribute(APIConstants.API_OVERVIEW_STATUS))); api.setThumbnailUrl(artifact.getAttribute(APIConstants.API_OVERVIEW_THUMBNAIL_URL)); api.setWsdlUrl(artifact.getAttribute(APIConstants.API_OVERVIEW_WSDL)); api.setWadlUrl(artifact.getAttribute(APIConstants.API_OVERVIEW_WADL)); api.setTechnicalOwner(artifact.getAttribute(APIConstants.API_OVERVIEW_TEC_OWNER)); api.setTechnicalOwnerEmail(artifact.getAttribute(APIConstants.API_OVERVIEW_TEC_OWNER_EMAIL)); api.setBusinessOwner(artifact.getAttribute(APIConstants.API_OVERVIEW_BUSS_OWNER)); api.setBusinessOwnerEmail(artifact.getAttribute(APIConstants.API_OVERVIEW_BUSS_OWNER_EMAIL)); api.setEndpointSecured(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_SECURED))); api.setEndpointAuthDigest(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_AUTH_DIGEST))); api.setEndpointUTUsername(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_USERNAME)); if (!((APIConstants.DEFAULT_MODIFIED_ENDPOINT_PASSWORD) .equals(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_PASSWORD)))) { api.setEndpointUTPassword(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_PASSWORD)); } else { //If APIEndpointPasswordRegistryHandler is enabled take password from the registry hidden property api.setEndpointUTPassword(getActualEpPswdFromHiddenProperty(api, registry)); } api.setTransports(artifact.getAttribute(APIConstants.API_OVERVIEW_TRANSPORTS)); api.setEndpointConfig(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_CONFIG)); api.setRedirectURL(artifact.getAttribute(APIConstants.API_OVERVIEW_REDIRECT_URL)); api.setApiOwner(artifact.getAttribute(APIConstants.API_OVERVIEW_OWNER)); api.setAdvertiseOnly(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_ADVERTISE_ONLY))); api.setSubscriptionAvailability(artifact.getAttribute(APIConstants.API_OVERVIEW_SUBSCRIPTION_AVAILABILITY)); api.setSubscriptionAvailableTenants(artifact.getAttribute(APIConstants.API_OVERVIEW_SUBSCRIPTION_AVAILABLE_TENANTS)); api.setResponseCache(artifact.getAttribute(APIConstants.API_OVERVIEW_RESPONSE_CACHING)); api.setImplementation(artifact.getAttribute(APIConstants.PROTOTYPE_OVERVIEW_IMPLEMENTATION)); api.setVisibility(artifact.getAttribute(APIConstants.API_OVERVIEW_VISIBILITY)); String tenantDomainName = MultitenantUtils.getTenantDomain(replaceEmailDomainBack(providerName)); int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(tenantDomainName); boolean isGlobalThrottlingEnabled = APIUtil.isAdvanceThrottlingEnabled(); if (isGlobalThrottlingEnabled) { String apiLevelTier = ApiMgtDAO.getInstance().getAPILevelTier(apiId); api.setApiLevelPolicy(apiLevelTier); } String tiers = artifact.getAttribute(APIConstants.API_OVERVIEW_TIER); Map<String, Tier> definedTiers = getTiers(tenantId); Set<Tier> availableTier = getAvailableTiers(definedTiers, tiers, apiName); api.addAvailableTiers(availableTier); api.setContext(artifact.getAttribute(APIConstants.API_OVERVIEW_CONTEXT)); api.setContextTemplate(artifact.getAttribute(APIConstants.API_OVERVIEW_CONTEXT_TEMPLATE)); api.setLatest(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_IS_LATEST))); ArrayList<URITemplate> urlPatternsList; Set<Scope> scopes = ApiMgtDAO.getInstance().getAPIScopes(oldId); api.setScopes(scopes); HashMap<String, String> resourceScopes; resourceScopes = ApiMgtDAO.getInstance().getResourceToScopeMapping(oldId); urlPatternsList = ApiMgtDAO.getInstance().getAllURITemplates(oldContext, oldId.getVersion()); Set<URITemplate> uriTemplates = new HashSet<URITemplate>(urlPatternsList); for (URITemplate uriTemplate : uriTemplates) { uriTemplate.setResourceURI(api.getUrl()); uriTemplate.setResourceSandboxURI(api.getSandboxUrl()); String resourceScopeKey = APIUtil.getResourceKey(oldContext, oldId.getVersion(), uriTemplate.getUriTemplate(), uriTemplate.getHTTPVerb()); uriTemplate.setScope(findScopeByKey(scopes, resourceScopes.get(resourceScopeKey))); } api.setUriTemplates(uriTemplates); Set<String> tags = new HashSet<String>(); Tag[] tag = registry.getTags(artifactPath); for (Tag tag1 : tag) { tags.add(tag1.getTagName()); } api.addTags(tags); api.setLastUpdated(registry.get(artifactPath).getLastModified()); api.setAsDefaultVersion(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_IS_DEFAULT_VERSION))); String environments = artifact.getAttribute(APIConstants.API_OVERVIEW_ENVIRONMENTS); api.setEnvironments(extractEnvironmentsForAPI(environments)); api.setCorsConfiguration(getCorsConfigurationFromArtifact(artifact)); } catch (GovernanceException e) { String msg = "Failed to get API fro artifact "; throw new APIManagementException(msg, e); } catch (RegistryException e) { String msg = "Failed to get LastAccess time or Rating"; throw new APIManagementException(msg, e); } catch (UserStoreException e) { String msg = "Failed to get User Realm of API Provider"; throw new APIManagementException(msg, e); } return api; } public static boolean checkAccessTokenPartitioningEnabled() { return OAuthServerConfiguration.getInstance().isAccessTokenPartitioningEnabled(); } public static boolean checkUserNameAssertionEnabled() { return OAuthServerConfiguration.getInstance().isUserNameAssertionEnabled(); } public static String[] getAvailableKeyStoreTables() throws APIManagementException { String[] keyStoreTables = new String[0]; Map<String, String> domainMappings = getAvailableUserStoreDomainMappings(); if (domainMappings != null) { keyStoreTables = new String[domainMappings.size()]; int i = 0; for (Entry<String, String> e : domainMappings.entrySet()) { String value = e.getValue(); keyStoreTables[i] = APIConstants.ACCESS_TOKEN_STORE_TABLE + "_" + value.trim(); i++; } } return keyStoreTables; } public static Map<String, String> getAvailableUserStoreDomainMappings() throws APIManagementException { Map<String, String> userStoreDomainMap = new HashMap<String, String>(); String domainsStr = OAuthServerConfiguration.getInstance().getAccessTokenPartitioningDomains(); if (domainsStr != null) { String[] userStoreDomainsArr = domainsStr.split(","); for (String anUserStoreDomainsArr : userStoreDomainsArr) { String[] mapping = anUserStoreDomainsArr.trim().split(":"); //A:foo.com , B:bar.com if (mapping.length < 2) { throw new APIManagementException("Domain mapping has not defined"); } userStoreDomainMap.put(mapping[1].trim(), mapping[0].trim()); //key=domain & value=mapping } } return userStoreDomainMap; } public static String getAccessTokenStoreTableFromUserId(String userId) throws APIManagementException { String accessTokenStoreTable = APIConstants.ACCESS_TOKEN_STORE_TABLE; String userStore; if (userId != null) { String[] strArr = userId.split("/"); if (strArr.length > 1) { userStore = strArr[0]; Map<String, String> availableDomainMappings = getAvailableUserStoreDomainMappings(); if (availableDomainMappings != null && availableDomainMappings.containsKey(userStore)) { accessTokenStoreTable = accessTokenStoreTable + "_" + availableDomainMappings.get(userStore); } } } return accessTokenStoreTable; } public static String getAccessTokenStoreTableFromAccessToken(String apiKey) throws APIManagementException { String userId = getUserIdFromAccessToken(apiKey); //i.e: 'foo.com/admin' or 'admin' return getAccessTokenStoreTableFromUserId(userId); } public static String getUserIdFromAccessToken(String apiKey) { String userId = null; String decodedKey = new String(Base64.decodeBase64(apiKey.getBytes(Charset.defaultCharset())), Charset.defaultCharset()); String[] tmpArr = decodedKey.split(":"); if (tmpArr.length == 2) { //tmpArr[0]= userStoreDomain & tmpArr[1] = userId userId = tmpArr[1]; } return userId; } /** * validates if an accessToken has expired or not * * @param accessTokenDO * @return true if token has expired else false */ public static boolean isAccessTokenExpired(APIKeyValidationInfoDTO accessTokenDO) { long validityPeriod = accessTokenDO.getValidityPeriod(); long issuedTime = accessTokenDO.getIssuedTime(); long timestampSkew = OAuthServerConfiguration.getInstance().getTimeStampSkewInSeconds() * 1000; long currentTime = System.currentTimeMillis(); //If the validity period is not an never expiring value if (validityPeriod != Long.MAX_VALUE && // For cases where validityPeriod is closer to Long.MAX_VALUE (then issuedTime + validityPeriod would spill // over and would produce a negative value) (currentTime - timestampSkew) > validityPeriod) { //check the validity of cached OAuth2AccessToken Response if ((currentTime - timestampSkew) > (issuedTime + validityPeriod)) { accessTokenDO.setValidationStatus(APIConstants.KeyValidationStatus.API_AUTH_INVALID_CREDENTIALS); return true; } } return false; } /** * When an input is having '@',replace it with '-AT-' [This is required to persist API data in registry,as registry * paths don't allow '@' sign.] * * @param input inputString * @return String modifiedString */ public static String replaceEmailDomain(String input) { if (input != null && input.contains(APIConstants.EMAIL_DOMAIN_SEPARATOR)) { input = input.replace(APIConstants.EMAIL_DOMAIN_SEPARATOR, APIConstants.EMAIL_DOMAIN_SEPARATOR_REPLACEMENT); } return input; } /** * When an input is having '-AT-',replace it with @ [This is required to persist API data between registry and database] * * @param input inputString * @return String modifiedString */ public static String replaceEmailDomainBack(String input) { if (input != null && input.contains(APIConstants.EMAIL_DOMAIN_SEPARATOR_REPLACEMENT)) { input = input.replace(APIConstants.EMAIL_DOMAIN_SEPARATOR_REPLACEMENT, APIConstants.EMAIL_DOMAIN_SEPARATOR); } return input; } public static void copyResourcePermissions(String username, String sourceArtifactPath, String targetArtifactPath) throws APIManagementException { String sourceResourcePath = RegistryUtils.getAbsolutePath(RegistryContext.getBaseInstance(), APIUtil.getMountedPath(RegistryContext.getBaseInstance(), RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH) + sourceArtifactPath); String targetResourcePath = RegistryUtils.getAbsolutePath(RegistryContext.getBaseInstance(), APIUtil.getMountedPath(RegistryContext.getBaseInstance(), RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH) + targetArtifactPath); String tenantDomain = MultitenantUtils.getTenantDomain(APIUtil.replaceEmailDomainBack(username)); try { int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager().getTenantId(tenantDomain); org.wso2.carbon.user.api.AuthorizationManager authManager = ServiceReferenceHolder.getInstance().getRealmService(). getTenantUserRealm(tenantId).getAuthorizationManager(); String[] allowedRoles = authManager.getAllowedRolesForResource(sourceResourcePath, ActionConstants.GET); if (allowedRoles != null) { for (String allowedRole : allowedRoles) { authManager.authorizeRole(allowedRole, targetResourcePath, ActionConstants.GET); } } } catch (UserStoreException e) { throw new APIManagementException("Error while adding role permissions to API", e); } } /** * This function is to set resource permissions based on its visibility * * @param visibility API visibility * @param roles Authorized roles * @param artifactPath API resource path * @throws APIManagementException Throwing exception */ public static void setResourcePermissions(String username, String visibility, String[] roles, String artifactPath) throws APIManagementException { try { String resourcePath = RegistryUtils.getAbsolutePath(RegistryContext.getBaseInstance(), APIUtil.getMountedPath(RegistryContext.getBaseInstance(), RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH) + artifactPath); String tenantDomain = MultitenantUtils.getTenantDomain(APIUtil.replaceEmailDomainBack(username)); if (!org.wso2.carbon.utils.multitenancy.MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { int tenantId = ServiceReferenceHolder.getInstance().getRealmService(). getTenantManager().getTenantId(tenantDomain); // calculate resource path RegistryAuthorizationManager authorizationManager = new RegistryAuthorizationManager (ServiceReferenceHolder.getUserRealm()); resourcePath = authorizationManager.computePathOnMount(resourcePath); org.wso2.carbon.user.api.AuthorizationManager authManager = ServiceReferenceHolder.getInstance().getRealmService(). getTenantUserRealm(tenantId).getAuthorizationManager(); if (visibility != null && APIConstants.API_RESTRICTED_VISIBILITY.equalsIgnoreCase(visibility)) { boolean isRoleEveryOne = false; /*If no roles have defined, authorize for everyone role */ if (roles != null) { if (roles.length == 1 && "".equals(roles[0])) { authManager.authorizeRole(APIConstants.EVERYONE_ROLE, resourcePath, ActionConstants.GET); isRoleEveryOne = true; } else { for (String role : roles) { if (APIConstants.EVERYONE_ROLE.equalsIgnoreCase(role)) { isRoleEveryOne = true; } authManager.authorizeRole(role, resourcePath, ActionConstants.GET); } } } if (!isRoleEveryOne) { authManager.denyRole(APIConstants.EVERYONE_ROLE, resourcePath, ActionConstants.GET); } authManager.denyRole(APIConstants.ANONYMOUS_ROLE, resourcePath, ActionConstants.GET); } else if (visibility != null && APIConstants.API_PRIVATE_VISIBILITY.equalsIgnoreCase(visibility)) { authManager.authorizeRole(APIConstants.EVERYONE_ROLE, resourcePath, ActionConstants.GET); authManager.denyRole(APIConstants.ANONYMOUS_ROLE, resourcePath, ActionConstants.GET); } else if (visibility != null && APIConstants.DOC_OWNER_VISIBILITY.equalsIgnoreCase(visibility)) { /*If no roles have defined, deny access for everyone & anonymous role */ if (roles == null) { authManager.denyRole(APIConstants.EVERYONE_ROLE, resourcePath, ActionConstants.GET); authManager.denyRole(APIConstants.ANONYMOUS_ROLE, resourcePath, ActionConstants.GET); } else { for (String role : roles) { authManager.denyRole(role, resourcePath, ActionConstants.GET); } } } else { authManager.authorizeRole(APIConstants.EVERYONE_ROLE, resourcePath, ActionConstants.GET); authManager.authorizeRole(APIConstants.ANONYMOUS_ROLE, resourcePath, ActionConstants.GET); } } else { RegistryAuthorizationManager authorizationManager = new RegistryAuthorizationManager (ServiceReferenceHolder.getUserRealm()); if (visibility != null && APIConstants.API_RESTRICTED_VISIBILITY.equalsIgnoreCase(visibility)) { boolean isRoleEveryOne = false; if (roles != null) { for (String role : roles) { if (APIConstants.EVERYONE_ROLE.equalsIgnoreCase(role)) { isRoleEveryOne = true; } authorizationManager.authorizeRole(role, resourcePath, ActionConstants.GET); } } if (!isRoleEveryOne) { authorizationManager.denyRole(APIConstants.EVERYONE_ROLE, resourcePath, ActionConstants.GET); } authorizationManager.denyRole(APIConstants.ANONYMOUS_ROLE, resourcePath, ActionConstants.GET); } else if (visibility != null && APIConstants.API_PRIVATE_VISIBILITY.equalsIgnoreCase(visibility)) { authorizationManager.authorizeRole(APIConstants.EVERYONE_ROLE, resourcePath, ActionConstants.GET); authorizationManager.denyRole(APIConstants.ANONYMOUS_ROLE, resourcePath, ActionConstants.GET); } else if (visibility != null && APIConstants.DOC_OWNER_VISIBILITY.equalsIgnoreCase(visibility)) { /*If no roles have defined, deny access for everyone & anonymous role */ if (roles == null) { authorizationManager.denyRole(APIConstants.EVERYONE_ROLE, resourcePath, ActionConstants.GET); authorizationManager.denyRole(APIConstants.ANONYMOUS_ROLE, resourcePath, ActionConstants.GET); } else { for (String role : roles) { authorizationManager.denyRole(role, resourcePath, ActionConstants.GET); } } } else { authorizationManager.authorizeRole(APIConstants.EVERYONE_ROLE, resourcePath, ActionConstants.GET); authorizationManager.authorizeRole(APIConstants.ANONYMOUS_ROLE, resourcePath, ActionConstants.GET); } } } catch (UserStoreException e) { throw new APIManagementException("Error while adding role permissions to API", e); } } public static void loadTenantAPIPolicy(String tenant, int tenantID) throws APIManagementException { String tierBasePath = CarbonUtils.getCarbonHome() + File.separator + "repository" + File.separator + "resources" + File.separator + "default-tiers" + File.separator; String apiTierFilePath = tierBasePath + APIConstants.DEFAULT_API_TIER_FILE_NAME; String appTierFilePath = tierBasePath + APIConstants.DEFAULT_APP_TIER_FILE_NAME; String resTierFilePath = tierBasePath + APIConstants.DEFAULT_RES_TIER_FILE_NAME; loadTenantAPIPolicy(tenantID, APIConstants.API_TIER_LOCATION, apiTierFilePath); loadTenantAPIPolicy(tenantID, APIConstants.APP_TIER_LOCATION, appTierFilePath); loadTenantAPIPolicy(tenantID, APIConstants.RES_TIER_LOCATION, resTierFilePath); } /** * Load the throttling policy to the registry for tenants * * @param tenantID * @param location * @param fileName * @throws APIManagementException */ private static void loadTenantAPIPolicy(int tenantID, String location, String fileName) throws APIManagementException { InputStream inputStream = null; try { RegistryService registryService = ServiceReferenceHolder.getInstance().getRegistryService(); UserRegistry govRegistry = registryService.getGovernanceSystemRegistry(tenantID); if (govRegistry.resourceExists(location)) { if (log.isDebugEnabled()) { log.debug("Tier policies already uploaded to the tenant's registry space"); } return; } if (log.isDebugEnabled()) { log.debug("Adding API tier policies to the tenant's registry"); } File defaultTiers = new File(fileName); if (!defaultTiers.exists()) { log.info("Default tier policies not found in : " + fileName); return; } inputStream = FileUtils.openInputStream(defaultTiers); byte[] data = IOUtils.toByteArray(inputStream); Resource resource = govRegistry.newResource(); resource.setContent(data); govRegistry.put(location, resource); } catch (RegistryException e) { throw new APIManagementException("Error while saving policy information to the registry", e); } catch (IOException e) { throw new APIManagementException("Error while reading policy file content", e); } finally { if (inputStream != null) { try { inputStream.close(); } catch (IOException e) { log.error("Error when closing input stream", e); } } } } /** * Load the External API Store Configuration to the registry * * @param tenantID * @throws org.wso2.carbon.apimgt.api.APIManagementException */ public static void loadTenantExternalStoreConfig(int tenantID) throws APIManagementException { try { RegistryService registryService = ServiceReferenceHolder.getInstance().getRegistryService(); UserRegistry govRegistry = registryService.getGovernanceSystemRegistry(tenantID); if (govRegistry.resourceExists(APIConstants.EXTERNAL_API_STORES_LOCATION)) { log.debug("External Stores configuration already uploaded to the registry"); return; } if (log.isDebugEnabled()) { log.debug("Adding External Stores configuration to the tenant's registry"); } InputStream inputStream = APIManagerComponent.class.getResourceAsStream("/externalstores/default-external-api-stores.xml"); byte[] data = IOUtils.toByteArray(inputStream); Resource resource = govRegistry.newResource(); resource.setContent(data); govRegistry.put(APIConstants.EXTERNAL_API_STORES_LOCATION, resource); /*set resource permission*/ org.wso2.carbon.user.api.AuthorizationManager authManager = ServiceReferenceHolder.getInstance().getRealmService().getTenantUserRealm(tenantID). getAuthorizationManager(); String resourcePath = RegistryUtils.getAbsolutePath(RegistryContext.getBaseInstance(), APIUtil.getMountedPath(RegistryContext.getBaseInstance(), RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH) + APIConstants.EXTERNAL_API_STORES_LOCATION); authManager.denyRole(APIConstants.EVERYONE_ROLE, resourcePath, ActionConstants.GET); } catch (RegistryException e) { throw new APIManagementException("Error while saving External Stores configuration information to the " + "registry", e); } catch (IOException e) { throw new APIManagementException("Error while reading External Stores configuration file content", e); } catch (UserStoreException e) { throw new APIManagementException("Error while setting permission to External Stores configuration file", e); } } /** * Load the Google Analytics Configuration to the registry * * @param tenantID * @throws APIManagementException */ public static void loadTenantGAConfig(int tenantID) throws APIManagementException { InputStream inputStream = null; try { RegistryService registryService = ServiceReferenceHolder.getInstance() .getRegistryService(); UserRegistry govRegistry = registryService.getGovernanceSystemRegistry(tenantID); if (govRegistry.resourceExists(APIConstants.GA_CONFIGURATION_LOCATION)) { log.debug("Google Analytics configuration already uploaded to the registry"); return; } if (log.isDebugEnabled()) { log.debug("Adding Google Analytics configuration to the tenant's registry"); } inputStream = APIManagerComponent.class.getResourceAsStream("/statistics/default-ga-config.xml"); byte[] data = IOUtils.toByteArray(inputStream); Resource resource = govRegistry.newResource(); resource.setContent(data); govRegistry.put(APIConstants.GA_CONFIGURATION_LOCATION, resource); /*set resource permission*/ org.wso2.carbon.user.api.AuthorizationManager authManager = ServiceReferenceHolder.getInstance().getRealmService(). getTenantUserRealm(tenantID).getAuthorizationManager(); String resourcePath = RegistryUtils.getAbsolutePath(RegistryContext.getBaseInstance(), APIUtil.getMountedPath(RegistryContext.getBaseInstance(), RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH) + APIConstants.GA_CONFIGURATION_LOCATION); authManager.denyRole(APIConstants.EVERYONE_ROLE, resourcePath, ActionConstants.GET); } catch (RegistryException e) { throw new APIManagementException("Error while saving Google Analytics configuration information to the registry", e); } catch (IOException e) { throw new APIManagementException("Error while reading Google Analytics configuration file content", e); } catch (UserStoreException e) { throw new APIManagementException("Error while setting permission to Google Analytics configuration file", e); } finally { try { if (inputStream != null) { inputStream.close(); } } catch (IOException e) { if (log.isWarnEnabled()) { log.warn("Error while closing the input stream", e); } } } } public static void loadTenantWorkFlowExtensions(int tenantID) throws APIManagementException { // TODO: Merge different resource loading methods and create a single method. try { RegistryService registryService = ServiceReferenceHolder.getInstance().getRegistryService(); UserRegistry govRegistry = registryService.getGovernanceSystemRegistry(tenantID); if (govRegistry.resourceExists(APIConstants.WORKFLOW_EXECUTOR_LOCATION)) { log.debug("External Stores configuration already uploaded to the registry"); return; } if (log.isDebugEnabled()) { log.debug("Adding External Stores configuration to the tenant's registry"); } InputStream inputStream = APIManagerComponent.class.getResourceAsStream("/workflowextensions/default-workflow-extensions.xml"); byte[] data = IOUtils.toByteArray(inputStream); Resource resource = govRegistry.newResource(); resource.setContent(data); resource.setMediaType(APIConstants.WORKFLOW_MEDIA_TYPE); govRegistry.put(APIConstants.WORKFLOW_EXECUTOR_LOCATION, resource); } catch (RegistryException e) { throw new APIManagementException("Error while saving External Stores configuration information to the registry", e); } catch (IOException e) { throw new APIManagementException("Error while reading External Stores configuration file content", e); } } /** * @param tenantId * @throws APIManagementException */ public static void loadTenantSelfSignUpConfigurations(int tenantId) throws APIManagementException { try { RegistryService registryService = ServiceReferenceHolder.getInstance().getRegistryService(); UserRegistry govRegistry = registryService.getGovernanceSystemRegistry(tenantId); if (govRegistry.resourceExists(APIConstants.SELF_SIGN_UP_CONFIG_LOCATION)) { log.debug("Self signup configuration already uploaded to the registry"); return; } if (log.isDebugEnabled()) { log.debug("Adding Self signup configuration to the tenant's registry"); } InputStream inputStream; if (tenantId == org.wso2.carbon.utils.multitenancy.MultitenantConstants.SUPER_TENANT_ID) { inputStream = APIManagerComponent.class.getResourceAsStream("/signupconfigurations/default-sign-up-config.xml"); } else { inputStream = APIManagerComponent.class.getResourceAsStream("/signupconfigurations/tenant-sign-up-config.xml"); } byte[] data = IOUtils.toByteArray(inputStream); Resource resource = govRegistry.newResource(); resource.setContent(data); resource.setMediaType(APIConstants.SELF_SIGN_UP_CONFIG_MEDIA_TYPE); govRegistry.put(APIConstants.SELF_SIGN_UP_CONFIG_LOCATION, resource); } catch (RegistryException e) { throw new APIManagementException("Error while saving Self signup configuration information to the registry", e); } catch (IOException e) { throw new APIManagementException("Error while reading Self signup configuration file content", e); } } public static void loadTenantConf(int tenantID) throws APIManagementException { RegistryService registryService = ServiceReferenceHolder.getInstance().getRegistryService(); try { UserRegistry registry = registryService.getConfigSystemRegistry(tenantID); if (registry.resourceExists(APIConstants.API_TENANT_CONF_LOCATION)) { log.debug("Tenant conf already uploaded to the registry"); return; } String tenantConfLocation = CarbonUtils.getCarbonHome() + File.separator + APIConstants.RESOURCE_FOLDER_LOCATION + File.separator + APIConstants.API_TENANT_CONF; File tenantConfFile = new File(tenantConfLocation); byte[] data; if (tenantConfFile.exists()) { // Load conf from resources directory in pack if it exists FileInputStream fileInputStream = new FileInputStream(tenantConfFile); data = IOUtils.toByteArray(fileInputStream); } else { // Fallback to loading the conf that is stored at jar level if file does not exist in pack InputStream inputStream = APIManagerComponent.class.getResourceAsStream("/tenant/" + APIConstants.API_TENANT_CONF); data = IOUtils.toByteArray(inputStream); } log.debug("Adding tenant config to the registry"); Resource resource = registry.newResource(); resource.setMediaType(APIConstants.APPLICATION_JSON_MEDIA_TYPE); resource.setContent(data); registry.put(APIConstants.API_TENANT_CONF_LOCATION, resource); } catch (RegistryException e) { throw new APIManagementException("Error while saving tenant conf to the registry", e); } catch (IOException e) { throw new APIManagementException("Error while reading tenant conf file content", e); } } /** * @param tenantId * @throws APIManagementException */ public static void createSelfSignUpRoles(int tenantId) throws APIManagementException { try { RegistryService registryService = ServiceReferenceHolder.getInstance().getRegistryService(); UserRegistry govRegistry = registryService.getGovernanceSystemRegistry(tenantId); if (govRegistry.resourceExists(APIConstants.SELF_SIGN_UP_CONFIG_LOCATION)) { Resource resource = govRegistry.get(APIConstants.SELF_SIGN_UP_CONFIG_LOCATION); InputStream content = resource.getContentStream(); DocumentBuilderFactory factory = getSecuredDocumentBuilder(); factory.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true); DocumentBuilder parser = factory.newDocumentBuilder(); Document dc = parser.parse(content); boolean enableSubscriberRoleCreation = isSubscriberRoleCreationEnabled(tenantId); String signUpDomain = dc.getElementsByTagName(APIConstants.SELF_SIGN_UP_REG_DOMAIN_ELEM).item(0) .getFirstChild().getNodeValue(); if (enableSubscriberRoleCreation) { int roleLength = dc.getElementsByTagName(APIConstants.SELF_SIGN_UP_REG_ROLE_NAME_ELEMENT) .getLength(); for (int i = 0; i < roleLength; i++) { String roleName = dc.getElementsByTagName(APIConstants.SELF_SIGN_UP_REG_ROLE_NAME_ELEMENT) .item(i).getFirstChild().getNodeValue(); boolean isExternalRole = Boolean.parseBoolean(dc .getElementsByTagName(APIConstants.SELF_SIGN_UP_REG_ROLE_IS_EXTERNAL).item(i) .getFirstChild().getNodeValue()); if (roleName != null) { // If isExternalRole==false ;create the subscriber role as an internal role if (isExternalRole && signUpDomain != null) { roleName = signUpDomain.toUpperCase() + CarbonConstants.DOMAIN_SEPARATOR + roleName; } else { roleName = UserCoreConstants.INTERNAL_DOMAIN + CarbonConstants.DOMAIN_SEPARATOR + roleName; } createSubscriberRole(roleName, tenantId); } } } } if (log.isDebugEnabled()) { log.debug("Adding Self signup configuration to the tenant's registry"); } } catch (RegistryException e) { throw new APIManagementException("Error while getting Self signup role information from the registry", e); } catch (ParserConfigurationException e) { throw new APIManagementException("Error while getting Self signup role information from the registry", e); } catch (SAXException e) { throw new APIManagementException("Error while getting Self signup role information from the registry", e); } catch (IOException e) { throw new APIManagementException("Error while getting Self signup role information from the registry", e); } } /** * Returns whether subscriber role creation enabled for the given tenant in tenant-conf.json * * @param tenantId id of the tenant * @return true if subscriber role creation enabled in tenant-conf.json */ public static boolean isSubscriberRoleCreationEnabled(int tenantId) throws APIManagementException { String tenantDomain = getTenantDomainFromTenantId(tenantId); JSONObject defaultRoles = getTenantDefaultRoles(tenantDomain); boolean isSubscriberRoleCreationEnabled = false; if (defaultRoles != null) { JSONObject subscriberRoleConfig = (JSONObject) defaultRoles .get(APIConstants.API_TENANT_CONF_DEFAULT_ROLES_SUBSCRIBER_ROLE); isSubscriberRoleCreationEnabled = isRoleCreationEnabled(subscriberRoleConfig); } return isSubscriberRoleCreationEnabled; } /** * Create default roles specified in APIM per-tenant configuration file * * @param tenantId id of the tenant * @throws APIManagementException */ public static void createDefaultRoles(int tenantId) throws APIManagementException { String tenantDomain = getTenantDomainFromTenantId(tenantId); JSONObject defaultRoles = getTenantDefaultRoles(tenantDomain); if (defaultRoles != null) { // create publisher role if it's creation is enabled in tenant-conf.json JSONObject publisherRoleConfig = (JSONObject) defaultRoles .get(APIConstants.API_TENANT_CONF_DEFAULT_ROLES_PUBLISHER_ROLE); if (isRoleCreationEnabled(publisherRoleConfig)) { String publisherRoleName = String.valueOf(publisherRoleConfig .get(APIConstants.API_TENANT_CONF_DEFAULT_ROLES_ROLENAME)); if (!StringUtils.isBlank(publisherRoleName)) { createPublisherRole(publisherRoleName, tenantId); } } // create creator role if it's creation is enabled in tenant-conf.json JSONObject creatorRoleConfig = (JSONObject) defaultRoles .get(APIConstants.API_TENANT_CONF_DEFAULT_ROLES_CREATOR_ROLE); if (isRoleCreationEnabled(creatorRoleConfig)) { String creatorRoleName = String.valueOf(creatorRoleConfig .get(APIConstants.API_TENANT_CONF_DEFAULT_ROLES_ROLENAME)); if (!StringUtils.isBlank(creatorRoleName)) { createCreatorRole(creatorRoleName, tenantId); } } createSelfSignUpRoles(tenantId); } } /** * Returns whether role creation enabled for the provided role config * * @param roleConfig role config in tenat-conf.json * @return true if role creation enabled for the provided role config */ private static boolean isRoleCreationEnabled (JSONObject roleConfig) { boolean roleCreationEnabled = false; if (roleConfig != null && roleConfig.get( APIConstants.API_TENANT_CONF_DEFAULT_ROLES_CREATE_ON_TENANT_LOAD) != null && (Boolean) (roleConfig.get( APIConstants.API_TENANT_CONF_DEFAULT_ROLES_CREATE_ON_TENANT_LOAD))) { roleCreationEnabled = true; } return roleCreationEnabled; } public static boolean isAnalyticsEnabled() { return APIManagerAnalyticsConfiguration.getInstance().isAnalyticsEnabled(); } /** * Add all the custom sequences of given type to registry * * @param registry Registry instance * @param customSequenceType Custom sequence type which is in/out or fault * @throws APIManagementException */ public static void addDefinedAllSequencesToRegistry(UserRegistry registry, String customSequenceType) throws APIManagementException { InputStream inSeqStream = null; String seqFolderLocation = APIConstants.API_CUSTOM_SEQUENCES_FOLDER_LOCATION + File.separator + customSequenceType; try { File inSequenceDir = new File(seqFolderLocation); File[] sequences; sequences = inSequenceDir.listFiles(); if (sequences != null) { //Tracks whether new sequences are there to deploy boolean availableNewSequences = false; //Tracks whether json_fault.xml is in the registry boolean jsonFaultSeqInRegistry = false; for (File sequenceFile : sequences) { String sequenceFileName = sequenceFile.getName(); String regResourcePath = APIConstants.API_CUSTOM_SEQUENCE_LOCATION + '/' + customSequenceType + '/' + sequenceFileName; if (registry.resourceExists(regResourcePath)) { if (APIConstants.API_CUSTOM_SEQ_JSON_FAULT.equals(sequenceFileName)) { jsonFaultSeqInRegistry = true; } if (log.isDebugEnabled()) { log.debug("The sequence file with the name " + sequenceFileName + " already exists in the registry path " + regResourcePath); } } else { availableNewSequences = true; if (log.isDebugEnabled()) { log.debug( "Adding sequence file with the name " + sequenceFileName + " to the registry path " + regResourcePath); } inSeqStream = new FileInputStream(sequenceFile); byte[] inSeqData = IOUtils.toByteArray(inSeqStream); Resource inSeqResource = registry.newResource(); inSeqResource.setContent(inSeqData); registry.put(regResourcePath, inSeqResource); } } //On the fly migration of json_fault.xml for 2.0.0 to 2.1.0 if (APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT.equals(customSequenceType) && availableNewSequences && jsonFaultSeqInRegistry) { String oldFaultStatHandler = "org.wso2.carbon.apimgt.usage.publisher.APIMgtFaultHandler"; String newFaultStatHandler = "org.wso2.carbon.apimgt.gateway.handlers.analytics.APIMgtFaultHandler"; String regResourcePath = APIConstants.API_CUSTOM_SEQUENCE_LOCATION + '/' + customSequenceType + '/' + APIConstants.API_CUSTOM_SEQ_JSON_FAULT; Resource jsonFaultSeqResource = registry.get(regResourcePath); String oldJsonFaultSeqContent = new String((byte[]) jsonFaultSeqResource.getContent(), Charset.defaultCharset()); if (oldJsonFaultSeqContent != null && oldJsonFaultSeqContent.contains(oldFaultStatHandler)) { String newJsonFaultContent = oldJsonFaultSeqContent.replace(oldFaultStatHandler, newFaultStatHandler); jsonFaultSeqResource.setContent(newJsonFaultContent); registry.put(regResourcePath, jsonFaultSeqResource); } } } else { log.error( "Custom sequence template location unavailable for custom sequence type " + customSequenceType + " : " + seqFolderLocation ); } } catch (RegistryException e) { throw new APIManagementException( "Error while saving defined sequences to the registry ", e); } catch (IOException e) { throw new APIManagementException("Error while reading defined sequence ", e); } finally { IOUtils.closeQuietly(inSeqStream); } } /** * Adds the sequences defined in repository/resources/customsequences folder to tenant registry * * @param tenantID tenant Id * @throws APIManagementException */ public static void writeDefinedSequencesToTenantRegistry(int tenantID) throws APIManagementException { try { RegistryService registryService = ServiceReferenceHolder.getInstance().getRegistryService(); UserRegistry govRegistry = registryService.getGovernanceSystemRegistry(tenantID); //Add all custom in,out and fault sequences to tenant registry APIUtil.addDefinedAllSequencesToRegistry(govRegistry, APIConstants.API_CUSTOM_SEQUENCE_TYPE_IN); APIUtil.addDefinedAllSequencesToRegistry(govRegistry, APIConstants.API_CUSTOM_SEQUENCE_TYPE_OUT); APIUtil.addDefinedAllSequencesToRegistry(govRegistry, APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT); } catch (RegistryException e) { throw new APIManagementException( "Error while saving defined sequences to the registry of tenant with id " + tenantID, e); } } /** * Load the API RXT to the registry for tenants * * @param tenant * @param tenantID * @throws APIManagementException */ public static void loadloadTenantAPIRXT(String tenant, int tenantID) throws APIManagementException { RegistryService registryService = ServiceReferenceHolder.getInstance().getRegistryService(); UserRegistry registry = null; try { registry = registryService.getGovernanceSystemRegistry(tenantID); } catch (RegistryException e) { throw new APIManagementException("Error when create registry instance ", e); } String rxtDir = CarbonUtils.getCarbonHome() + File.separator + "repository" + File.separator + "resources" + File.separator + "rxts"; File file = new File(rxtDir); FilenameFilter filenameFilter = new FilenameFilter() { @Override public boolean accept(File dir, String name) { // if the file extension is .rxt return true, else false return name.endsWith(".rxt"); } }; String[] rxtFilePaths = file.list(filenameFilter); if (rxtFilePaths == null) { throw new APIManagementException("rxt files not found in directory " + rxtDir); } for (String rxtPath : rxtFilePaths) { String resourcePath = GovernanceConstants.RXT_CONFIGS_PATH + RegistryConstants.PATH_SEPARATOR + rxtPath; //This is "registry" is a governance registry instance, therefore calculate the relative path to governance. String govRelativePath = RegistryUtils.getRelativePathToOriginal(resourcePath, APIUtil.getMountedPath(RegistryContext.getBaseInstance(), RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH)); try { // calculate resource path RegistryAuthorizationManager authorizationManager = new RegistryAuthorizationManager (ServiceReferenceHolder.getUserRealm()); resourcePath = authorizationManager.computePathOnMount(resourcePath); org.wso2.carbon.user.api.AuthorizationManager authManager = ServiceReferenceHolder.getInstance().getRealmService(). getTenantUserRealm(tenantID).getAuthorizationManager(); if (registry.resourceExists(govRelativePath)) { // set anonymous user permission to RXTs authManager.authorizeRole(APIConstants.ANONYMOUS_ROLE, resourcePath, ActionConstants.GET); continue; } String rxt = FileUtil.readFileToString(rxtDir + File.separator + rxtPath); Resource resource = registry.newResource(); resource.setContent(rxt.getBytes(Charset.defaultCharset())); resource.setMediaType(APIConstants.RXT_MEDIA_TYPE); registry.put(govRelativePath, resource); authManager.authorizeRole(APIConstants.ANONYMOUS_ROLE, resourcePath, ActionConstants.GET); } catch (UserStoreException e) { throw new APIManagementException("Error while adding role permissions to API", e); } catch (IOException e) { String msg = "Failed to read rxt files"; throw new APIManagementException(msg, e); } catch (RegistryException e) { String msg = "Failed to add rxt to registry "; throw new APIManagementException(msg, e); } } } /** * Converting the user store domain name to uppercase. * * @param username Username to be modified * @return Username with domain name set to uppercase. */ public static String setDomainNameToUppercase(String username) { String modifiedName = username; if (username != null) { String[] nameParts = username.split(CarbonConstants.DOMAIN_SEPARATOR); if (nameParts.length > 1) { modifiedName = nameParts[0].toUpperCase() + CarbonConstants.DOMAIN_SEPARATOR + nameParts[1]; } } return modifiedName; } /** * Create APIM Subscriber role with the given name in specified tenant * * @param roleName role name * @param tenantId id of the tenant * @throws APIManagementException */ public static void createSubscriberRole(String roleName, int tenantId) throws APIManagementException { Permission[] subscriberPermissions = new Permission[] { new Permission(APIConstants.Permissions.LOGIN, UserMgtConstants.EXECUTE_ACTION), new Permission(APIConstants.Permissions.API_SUBSCRIBE, UserMgtConstants.EXECUTE_ACTION) }; createRole (roleName, subscriberPermissions, tenantId); } /** * Create APIM Publisher roles with the given name in specified tenant * * @param roleName role name * @param tenantId id of the tenant * @throws APIManagementException */ public static void createPublisherRole(String roleName, int tenantId) throws APIManagementException { Permission[] publisherPermissions = new Permission[] { new Permission(APIConstants.Permissions.LOGIN, UserMgtConstants.EXECUTE_ACTION), new Permission(APIConstants.Permissions.API_PUBLISH, UserMgtConstants.EXECUTE_ACTION) }; createRole (roleName, publisherPermissions, tenantId); } /** * Create APIM Creator roles with the given name in specified tenant * * @param roleName role name * @param tenantId id of the tenant * @throws APIManagementException */ public static void createCreatorRole(String roleName, int tenantId) throws APIManagementException { Permission[] creatorPermissions = new Permission[] { new Permission(APIConstants.Permissions.LOGIN, UserMgtConstants.EXECUTE_ACTION), new Permission(APIConstants.Permissions.API_CREATE, UserMgtConstants.EXECUTE_ACTION), new Permission(APIConstants.Permissions.CONFIGURE_GOVERNANCE, UserMgtConstants.EXECUTE_ACTION), new Permission(APIConstants.Permissions.RESOURCE_GOVERN, UserMgtConstants.EXECUTE_ACTION)}; createRole (roleName, creatorPermissions, tenantId); } /** * Creates a role with a given set of permissions for the specified tenant * * @param roleName role name * @param permissions a set of permissions to be associated with the role * @param tenantId id of the tenant * @throws APIManagementException */ public static void createRole(String roleName, Permission[] permissions, int tenantId) throws APIManagementException { try { RealmService realmService = ServiceReferenceHolder.getInstance().getRealmService(); UserRealm realm; org.wso2.carbon.user.api.UserRealm tenantRealm; UserStoreManager manager; if (tenantId < 0) { realm = realmService.getBootstrapRealm(); manager = realm.getUserStoreManager(); } else { tenantRealm = realmService.getTenantUserRealm(tenantId); manager = tenantRealm.getUserStoreManager(); } if (!manager.isExistingRole(roleName)) { if (log.isDebugEnabled()) { log.debug("Creating role: " + roleName); } String tenantAdminName = ServiceReferenceHolder.getInstance().getRealmService() .getTenantUserRealm(tenantId).getRealmConfiguration().getAdminUserName(); String[] userList = new String[] { tenantAdminName }; manager.addRole(roleName, userList, permissions); } } catch (UserStoreException e) { throw new APIManagementException("Error while creating role: " + roleName, e); } } public void setupSelfRegistration(APIManagerConfiguration config, int tenantId) throws APIManagementException { boolean enabled = Boolean.parseBoolean(config.getFirstProperty(APIConstants.SELF_SIGN_UP_ENABLED)); if (!enabled) { return; } // Create the subscriber role as an internal role String role = UserCoreConstants.INTERNAL_DOMAIN + CarbonConstants.DOMAIN_SEPARATOR + config.getFirstProperty(APIConstants.SELF_SIGN_UP_ROLE); if ((UserCoreConstants.INTERNAL_DOMAIN + CarbonConstants.DOMAIN_SEPARATOR).equals(role)) { // Required parameter missing - Throw an exception and interrupt startup throw new APIManagementException("Required subscriber role parameter missing " + "in the self sign up configuration"); } try { RealmService realmService = ServiceReferenceHolder.getInstance().getRealmService(); UserRealm realm; org.wso2.carbon.user.api.UserRealm tenantRealm; UserStoreManager manager; if (tenantId < 0) { realm = realmService.getBootstrapRealm(); manager = realm.getUserStoreManager(); } else { tenantRealm = realmService.getTenantUserRealm(tenantId); manager = tenantRealm.getUserStoreManager(); } if (!manager.isExistingRole(role)) { if (log.isDebugEnabled()) { log.debug("Creating subscriber role: " + role); } Permission[] subscriberPermissions = new Permission[]{ new Permission("/permission/admin/login", UserMgtConstants.EXECUTE_ACTION), new Permission(APIConstants.Permissions.API_SUBSCRIBE, UserMgtConstants.EXECUTE_ACTION)}; String tenantAdminName = ServiceReferenceHolder.getInstance().getRealmService() .getTenantUserRealm(tenantId).getRealmConfiguration().getAdminUserName(); String[] userList = new String[]{tenantAdminName}; manager.addRole(role, userList, subscriberPermissions); } } catch (UserStoreException e) { throw new APIManagementException("Error while creating subscriber role: " + role + " - " + "Self registration might not function properly.", e); } } public static String removeAnySymbolFromUriTempate(String uriTemplate) { if (uriTemplate != null) { int anySymbolIndex = uriTemplate.indexOf("/*"); if (anySymbolIndex != -1) { return uriTemplate.substring(0, anySymbolIndex); } } return uriTemplate; } public static float getAverageRating(APIIdentifier apiId) throws APIManagementException { return ApiMgtDAO.getInstance().getAverageRating(apiId); } public static float getAverageRating(int apiId) throws APIManagementException { return ApiMgtDAO.getInstance().getAverageRating(apiId); } public static List<Tenant> getAllTenantsWithSuperTenant() throws UserStoreException { Tenant[] tenants = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager().getAllTenants(); ArrayList<Tenant> tenantArrayList = new ArrayList<Tenant>(); Collections.addAll(tenantArrayList, tenants); Tenant superAdminTenant = new Tenant(); superAdminTenant.setDomain(MultitenantConstants.SUPER_TENANT_DOMAIN_NAME); superAdminTenant.setId(org.wso2.carbon.utils.multitenancy.MultitenantConstants.SUPER_TENANT_ID); superAdminTenant.setAdminName(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME); tenantArrayList.add(superAdminTenant); return tenantArrayList; } /** * In multi tenant environment, publishers should allow only to revoke the tokens generated within his domain. * Super tenant should not see the tenant created tokens and vise versa. This method is used to check the logged in * user have permissions to revoke a given users tokens. * * @param loggedInUser current logged in user to publisher * @param authorizedUser access token owner * @return */ public static boolean isLoggedInUserAuthorizedToRevokeToken(String loggedInUser, String authorizedUser) { String loggedUserTenantDomain = MultitenantUtils.getTenantDomain(loggedInUser); String authorizedUserTenantDomain = MultitenantUtils.getTenantDomain(authorizedUser); if (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(loggedUserTenantDomain) && MultitenantConstants .SUPER_TENANT_DOMAIN_NAME.equals(authorizedUserTenantDomain)) { return true; } else if (authorizedUserTenantDomain.equals(loggedUserTenantDomain)) { return true; } return false; } public static int getApplicationId(String appName, String userId) throws APIManagementException { return ApiMgtDAO.getInstance().getApplicationId(appName, userId); } public static boolean isAPIManagementEnabled() { return Boolean.parseBoolean(CarbonUtils.getServerConfiguration().getFirstProperty("APIManagement.Enabled")); } public static boolean isLoadAPIContextsAtStartup() { return Boolean.parseBoolean(CarbonUtils.getServerConfiguration().getFirstProperty( "APIManagement.LoadAPIContextsInServerStartup")); } public static Set<APIStore> getExternalAPIStores(int tenantId) throws APIManagementException { SortedSet<APIStore> apistoreSet = new TreeSet<APIStore>(new APIStoreNameComparator()); apistoreSet.addAll(getExternalStores(tenantId)); return apistoreSet; } public static boolean isAllowDisplayAPIsWithMultipleStatus() { APIManagerConfiguration config = ServiceReferenceHolder.getInstance(). getAPIManagerConfigurationService().getAPIManagerConfiguration(); String displayAllAPIs = config.getFirstProperty(APIConstants.API_STORE_DISPLAY_ALL_APIS); if (displayAllAPIs == null) { log.warn("The configurations related to show deprecated APIs in APIStore " + "are missing in api-manager.xml."); return false; } return Boolean.parseBoolean(displayAllAPIs); } public static boolean isAllowDisplayMultipleVersions() { APIManagerConfiguration config = ServiceReferenceHolder.getInstance(). getAPIManagerConfigurationService().getAPIManagerConfiguration(); String displayMultiVersions = config.getFirstProperty(APIConstants.API_STORE_DISPLAY_MULTIPLE_VERSIONS); if (displayMultiVersions == null) { log.warn("The configurations related to show multiple versions of API in APIStore " + "are missing in api-manager.xml."); return false; } return Boolean.parseBoolean(displayMultiVersions); } public static Set<APIStore> getExternalAPIStores(Set<APIStore> inputStores, int tenantId) throws APIManagementException { SortedSet<APIStore> apiStores = new TreeSet<APIStore>(new APIStoreNameComparator()); apiStores.addAll(getExternalStores(tenantId)); //Retains only the stores that contained in configuration inputStores.retainAll(apiStores); boolean exists = false; if (!apiStores.isEmpty()) { for (APIStore store : apiStores) { for (APIStore inputStore : inputStores) { if (inputStore.getName().equals(store.getName())) { // If the configured apistore already stored in // db,ignore adding it again exists = true; } } if (!exists) { inputStores.add(store); } exists = false; } } return inputStores; } public static boolean isAPIsPublishToExternalAPIStores(int tenantId) throws APIManagementException { return !getExternalStores(tenantId).isEmpty(); } public static boolean isAPIGatewayKeyCacheEnabled() { try { APIManagerConfiguration config = ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService() .getAPIManagerConfiguration(); String serviceURL = config.getFirstProperty(APIConstants.GATEWAY_TOKEN_CACHE_ENABLED); return Boolean.parseBoolean(serviceURL); } catch (Exception e) { log.error("Did not found valid API Validation Information cache configuration. Use default configuration" + e); } return true; } public static Cache getAPIContextCache() { CacheManager contextCacheManager = Caching.getCacheManager(APIConstants.API_CONTEXT_CACHE_MANAGER). getCache(APIConstants.API_CONTEXT_CACHE).getCacheManager(); if (!isContextCacheInitialized) { isContextCacheInitialized = true; return contextCacheManager.<String, Boolean>createCacheBuilder(APIConstants.API_CONTEXT_CACHE_MANAGER). setExpiry(CacheConfiguration.ExpiryType.MODIFIED, new CacheConfiguration.Duration(TimeUnit.DAYS, APIConstants.API_CONTEXT_CACHE_EXPIRY_TIME_IN_DAYS)).setStoreByValue(false).build(); } else { return Caching.getCacheManager(APIConstants.API_CONTEXT_CACHE_MANAGER).getCache(APIConstants.API_CONTEXT_CACHE); } } /** * Get active tenant domains * * @return * @throws UserStoreException */ public static Set<String> getActiveTenantDomains() throws UserStoreException { Set<String> tenantDomains; Tenant[] tenants = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager().getAllTenants(); if (tenants == null || tenants.length == 0) { tenantDomains = Collections.<String>emptySet(); } else { tenantDomains = new HashSet<String>(); for (Tenant tenant : tenants) { if (tenant.isActive()) { tenantDomains.add(tenant.getDomain()); } } if (!tenantDomains.isEmpty()) { tenantDomains.add(MultitenantConstants.SUPER_TENANT_DOMAIN_NAME); } } return tenantDomains; } /** * Retrieves the role list of system * * @throws APIManagementException If an error occurs */ public static String[] getRoleNames(String username) throws APIManagementException { String tenantDomain = MultitenantUtils.getTenantDomain(username); try { if (!org.wso2.carbon.utils.multitenancy.MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(tenantDomain); UserStoreManager manager = ServiceReferenceHolder.getInstance().getRealmService() .getTenantUserRealm(tenantId).getUserStoreManager(); return manager.getRoleNames(); } else { return AuthorizationManager.getInstance().getRoleNames(); } } catch (UserStoreException e) { log.error("Error while getting all the roles", e); return new String[0]; } } /** * check whether given role is exist * * @param userName logged user * @param roleName role name need to check * @return true if exist and false if not * @throws APIManagementException If an error occurs */ public static boolean isRoleNameExist(String userName, String roleName) throws APIManagementException { if (roleName == null || StringUtils.isEmpty(roleName.trim())) { return true; } //disable role validation if "disableRoleValidationAtScopeCreation" system property is set String disableRoleValidation = System.getProperty(DISABLE_ROLE_VALIDATION_AT_SCOPE_CREATION); if (Boolean.parseBoolean(disableRoleValidation)) { return true; } org.wso2.carbon.user.api.UserStoreManager userStoreManager; try { RealmService realmService = ServiceReferenceHolder.getInstance().getRealmService(); int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(MultitenantUtils.getTenantDomain(userName)); userStoreManager = realmService.getTenantUserRealm(tenantId).getUserStoreManager(); String[] roles = roleName.split(","); for (String role : roles) { if (!userStoreManager.isExistingRole(role)) { return false; } } } catch (org.wso2.carbon.user.api.UserStoreException e) { log.error("Error when getting the list of roles", e); } return true; } /** * Create API Definition in JSON * * @param api API * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to generate the content and save * @deprecated */ @Deprecated public static String createSwaggerJSONContent(API api) throws APIManagementException { APIIdentifier identifier = api.getId(); APIManagerConfiguration config = ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService() .getAPIManagerConfiguration(); Environment environment = (Environment) config.getApiGatewayEnvironments().values().toArray()[0]; String endpoints = environment.getApiGatewayEndpoint(); String[] endpointsSet = endpoints.split(","); String apiContext = api.getContext(); String version = identifier.getVersion(); Set<URITemplate> uriTemplates = api.getUriTemplates(); String description = api.getDescription(); // With the new context version strategy, the URL prefix is the apiContext. the verison will be embedded in // the apiContext. String urlPrefix = apiContext; if (endpointsSet.length < 1) { throw new APIManagementException("Error in creating JSON representation of the API" + identifier.getApiName()); } if (description == null || "".equals(description)) { description = ""; } else { description = description.trim(); } Map<String, List<Operation>> uriTemplateDefinitions = new HashMap<String, List<Operation>>(); List<APIResource> apis = new ArrayList<APIResource>(); for (URITemplate template : uriTemplates) { List<Operation> ops; List<Parameter> parameters; String path = urlPrefix + APIUtil.removeAnySymbolFromUriTempate(template.getUriTemplate()); /* path exists in uriTemplateDefinitions */ if (uriTemplateDefinitions.get(path) != null) { ops = uriTemplateDefinitions.get(path); parameters = new ArrayList<Parameter>(); String httpVerb = template.getHTTPVerb(); /* For GET and DELETE Parameter name - Query Parameters */ if (Constants.Configuration.HTTP_METHOD_GET.equals(httpVerb) || Constants.Configuration.HTTP_METHOD_DELETE.equals(httpVerb)) { Parameter queryParam = new Parameter(APIConstants.OperationParameter.QUERY_PARAM_NAME, APIConstants.OperationParameter.QUERY_PARAM_DESCRIPTION, APIConstants.OperationParameter.PAYLOAD_PARAM_TYPE, false, false, "String"); parameters.add(queryParam); } else {/* For POST, PUT and PATCH Parameter name - Payload */ Parameter payLoadParam = new Parameter(APIConstants.OperationParameter.PAYLOAD_PARAM_NAME, APIConstants.OperationParameter.PAYLOAD_PARAM_DESCRIPTION, APIConstants.OperationParameter.PAYLOAD_PARAM_TYPE, false, false, "String"); parameters.add(payLoadParam); } Parameter authParam = new Parameter(APIConstants.OperationParameter.AUTH_PARAM_NAME, APIConstants.OperationParameter.AUTH_PARAM_DESCRIPTION, APIConstants.OperationParameter.AUTH_PARAM_TYPE, false, false, "String"); parameters.add(authParam); if (!"OPTIONS".equals(httpVerb)) { Operation op = new Operation(httpVerb, description, description, parameters); ops.add(op); } } else {/* path not exists in uriTemplateDefinitions */ ops = new ArrayList<Operation>(); parameters = new ArrayList<Parameter>(); String httpVerb = template.getHTTPVerb(); /* For GET and DELETE Parameter name - Query Parameters */ if (Constants.Configuration.HTTP_METHOD_GET.equals(httpVerb) || Constants.Configuration.HTTP_METHOD_DELETE.equals(httpVerb)) { Parameter queryParam = new Parameter(APIConstants.OperationParameter.QUERY_PARAM_NAME, APIConstants.OperationParameter.QUERY_PARAM_DESCRIPTION, APIConstants.OperationParameter.PAYLOAD_PARAM_TYPE, false, false, "String"); parameters.add(queryParam); } else {/* For POST,PUT and PATCH Parameter name - Payload */ Parameter payLoadParam = new Parameter(APIConstants.OperationParameter.PAYLOAD_PARAM_NAME, APIConstants.OperationParameter.PAYLOAD_PARAM_DESCRIPTION, APIConstants.OperationParameter.PAYLOAD_PARAM_TYPE, false, false, "String"); parameters.add(payLoadParam); } Parameter authParam = new Parameter(APIConstants.OperationParameter.AUTH_PARAM_NAME, APIConstants.OperationParameter.AUTH_PARAM_DESCRIPTION, APIConstants.OperationParameter.AUTH_PARAM_TYPE, false, false, "String"); parameters.add(authParam); if (!"OPTIONS".equals(httpVerb)) { Operation op = new Operation(httpVerb, description, description, parameters); ops.add(op); } uriTemplateDefinitions.put(path, ops); } } final Set<Entry<String, List<Operation>>> entries = uriTemplateDefinitions.entrySet(); for (Entry entry : entries) { APIResource apiResource = new APIResource((String) entry.getKey(), description, (List<Operation>) entry.getValue()); apis.add(apiResource); } APIDefinition apidefinition = new APIDefinition(version, APIConstants.SWAGGER_VERSION, endpointsSet[0], apiContext, apis); Gson gson = new Gson(); return gson.toJson(apidefinition); } /** * Helper method to get tenantId from userName * * @param userName user name * @return tenantId */ public static int getTenantId(String userName) { //get tenant domain from user name String tenantDomain = MultitenantUtils.getTenantDomain(userName); return getTenantIdFromTenantDomain(tenantDomain); } /** * Helper method to get tenantId from tenantDomain * * @param tenantDomain tenant Domain * @return tenantId */ public static int getTenantIdFromTenantDomain(String tenantDomain) { RealmService realmService = ServiceReferenceHolder.getInstance().getRealmService(); if (realmService == null) { return MultitenantConstants.SUPER_TENANT_ID; } try { return realmService.getTenantManager().getTenantId(tenantDomain); } catch (UserStoreException e) { log.error(e.getMessage(), e); } return -1; } /** * Helper method to get tenantDomain from tenantId * * @param tenantId tenant Id * @return tenantId */ public static String getTenantDomainFromTenantId(int tenantId) { RealmService realmService = ServiceReferenceHolder.getInstance().getRealmService(); if (realmService == null) { return MultitenantConstants.SUPER_TENANT_DOMAIN_NAME; } try { return realmService.getTenantManager().getDomain(tenantId); } catch (UserStoreException e) { log.error(e.getMessage(), e); } return null; } public static int getSuperTenantId() { return MultitenantConstants.SUPER_TENANT_ID; } /** * Helper method to get username with tenant domain. * * @param userName * @return userName with tenant domain */ public static String getUserNameWithTenantSuffix(String userName) { String userNameWithTenantPrefix = userName; String tenantDomain = MultitenantUtils.getTenantDomain(userName); if (userName != null && !userName.endsWith("@" + MultitenantConstants.SUPER_TENANT_DOMAIN_NAME) && MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { userNameWithTenantPrefix = userName + "@" + tenantDomain; } return userNameWithTenantPrefix; } /** * Build OMElement from inputstream * * @param inputStream * @return * @throws Exception */ public static OMElement buildOMElement(InputStream inputStream) throws Exception { XMLStreamReader parser; StAXOMBuilder builder; try { XMLInputFactory factory = XMLInputFactory.newInstance(); factory.setProperty(XMLInputFactory.IS_SUPPORTING_EXTERNAL_ENTITIES, false); parser = factory.createXMLStreamReader(inputStream); builder = new StAXOMBuilder(parser); } catch (XMLStreamException e) { String msg = "Error in initializing the parser."; log.error(msg, e); throw new Exception(msg, e); } return builder.getDocumentElement(); } /** * Get stored in sequences, out sequences and fault sequences from the governanceSystem registry * * @param sequenceName -The sequence to be retrieved * @param tenantId * @param direction - Direction indicates which sequences to fetch. Values would be * "in", "out" or "fault" * @return * @throws APIManagementException */ public static OMElement getCustomSequence(String sequenceName, int tenantId, String direction, APIIdentifier identifier) throws APIManagementException { org.wso2.carbon.registry.api.Collection seqCollection = null; try { UserRegistry registry = ServiceReferenceHolder.getInstance().getRegistryService() .getGovernanceSystemRegistry(tenantId); if ("in".equals(direction)) { seqCollection = (org.wso2.carbon.registry.api.Collection) registry .get(APIConstants.API_CUSTOM_INSEQUENCE_LOCATION); } else if ("out".equals(direction)) { seqCollection = (org.wso2.carbon.registry.api.Collection) registry .get(APIConstants.API_CUSTOM_OUTSEQUENCE_LOCATION); } else if ("fault".equals(direction)) { seqCollection = (org.wso2.carbon.registry.api.Collection) registry .get(APIConstants.API_CUSTOM_FAULTSEQUENCE_LOCATION); } if (seqCollection == null) { seqCollection = (org.wso2.carbon.registry.api.Collection) registry.get(getSequencePath(identifier, direction)); } if (seqCollection != null) { String[] childPaths = seqCollection.getChildren(); for (String childPath : childPaths) { Resource sequence = registry.get(childPath); OMElement seqElment = APIUtil.buildOMElement(sequence.getContentStream()); if (sequenceName.equals(seqElment.getAttributeValue(new QName("name")))) { return seqElment; } } } // If the sequence not found the default sequences, check in custom sequences seqCollection = (org.wso2.carbon.registry.api.Collection) registry.get(getSequencePath(identifier, direction)); if (seqCollection != null) { String[] childPaths = seqCollection.getChildren(); for (String childPath : childPaths) { Resource sequence = registry.get(childPath); OMElement seqElment = APIUtil.buildOMElement(sequence.getContentStream()); if (sequenceName.equals(seqElment.getAttributeValue(new QName("name")))) { return seqElment; } } } } catch (Exception e) { String msg = "Issue is in accessing the Registry"; log.error(msg); throw new APIManagementException(msg, e); } return null; } /** * Returns true if the sequence is a per API one * * @param sequenceName * @param tenantId * @param identifier API identifier * @param sequenceType in/out/fault * @return true/false * @throws APIManagementException */ public static boolean isPerAPISequence(String sequenceName, int tenantId, APIIdentifier identifier, String sequenceType) throws APIManagementException { org.wso2.carbon.registry.api.Collection seqCollection = null; try { UserRegistry registry = ServiceReferenceHolder.getInstance().getRegistryService() .getGovernanceSystemRegistry(tenantId); // If the sequence not found the default sequences, check in custom sequences if (registry.resourceExists(getSequencePath(identifier, sequenceType))) { seqCollection = (org.wso2.carbon.registry.api.Collection) registry.get(getSequencePath(identifier, sequenceType)); if (seqCollection != null) { String[] childPaths = seqCollection.getChildren(); for (String childPath : childPaths) { Resource sequence = registry.get(childPath); OMElement seqElment = APIUtil.buildOMElement(sequence.getContentStream()); if (sequenceName.equals(seqElment.getAttributeValue(new QName("name")))) { return true; } } } } } catch (RegistryException e) { String msg = "Error while retrieving registry for tenant " + tenantId; log.error(msg); throw new APIManagementException(msg, e); } catch (org.wso2.carbon.registry.api.RegistryException e) { String msg = "Error while processing the " + sequenceType + " sequences of " + identifier + " in the registry"; log.error(msg); throw new APIManagementException(msg, e); } catch (Exception e) { throw new APIManagementException(e.getMessage(), e); } return false; } /** * Returns uuid correspond to the given sequence name and direction * * @param sequenceName name of the sequence * @param tenantId logged in user's tenantId * @param direction in/out/fault * @param identifier API identifier * @return uuid of the given mediation sequence or null * @throws APIManagementException If failed to get the uuid of the mediation sequence */ public static String getMediationSequenceUuid(String sequenceName, int tenantId, String direction, APIIdentifier identifier) throws APIManagementException { org.wso2.carbon.registry.api.Collection seqCollection = null; String seqCollectionPath; try { UserRegistry registry = ServiceReferenceHolder.getInstance().getRegistryService() .getGovernanceSystemRegistry(tenantId); if ("in".equals(direction)) { seqCollection = (org.wso2.carbon.registry.api.Collection) registry .get(APIConstants.API_CUSTOM_SEQUENCE_LOCATION+File.separator+ APIConstants.API_CUSTOM_SEQUENCE_TYPE_IN); } else if ("out".equals(direction)) { seqCollection = (org.wso2.carbon.registry.api.Collection) registry .get(APIConstants.API_CUSTOM_SEQUENCE_LOCATION+File.separator+ APIConstants.API_CUSTOM_SEQUENCE_TYPE_OUT); } else if ("fault".equals(direction)) { seqCollection = (org.wso2.carbon.registry.api.Collection) registry .get(APIConstants.API_CUSTOM_SEQUENCE_LOCATION+File.separator+ APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT); } if (seqCollection == null) { seqCollection = (org.wso2.carbon.registry.api.Collection) registry.get (getSequencePath(identifier, direction)); } if (seqCollection != null) { String[] childPaths = seqCollection.getChildren(); for (String childPath : childPaths) { Resource sequence = registry.get(childPath); OMElement seqElment = APIUtil.buildOMElement(sequence.getContentStream()); String seqElmentName= seqElment.getAttributeValue(new QName("name")); if (sequenceName.equals(seqElmentName)) { return sequence.getUUID(); } } } // If the sequence not found the default sequences, check in custom sequences seqCollection = (org.wso2.carbon.registry.api.Collection) registry.get (getSequencePath(identifier,direction)); if (seqCollection != null) { String[] childPaths = seqCollection.getChildren(); for (String childPath : childPaths) { Resource sequence = registry.get(childPath); OMElement seqElment = APIUtil.buildOMElement(sequence.getContentStream()); if (sequenceName.equals(seqElment.getAttributeValue(new QName("name")))) { return sequence.getUUID(); } } } } catch (Exception e) { String msg = "Issue is in accessing the Registry"; log.error(msg); throw new APIManagementException(msg, e); } return null; } /** * Returns true if sequence is set * * @param sequence * @return */ public static boolean isSequenceDefined(String sequence) { return sequence != null && !"none".equals(sequence); } /** * Return the sequence extension name. * eg: admin--testAPi--v1.00 * * @param api * @return */ public static String getSequenceExtensionName(API api) { return api.getId().getProviderName() + "--" + api.getId().getApiName() + ":v" + api.getId().getVersion(); } /** * @param token * @return */ public static String decryptToken(String token) throws CryptoException { APIManagerConfiguration config = ServiceReferenceHolder.getInstance(). getAPIManagerConfigurationService().getAPIManagerConfiguration(); if (Boolean.parseBoolean(config.getFirstProperty(APIConstants.ENCRYPT_TOKENS_ON_PERSISTENCE))) { return new String(CryptoUtil.getDefaultCryptoUtil().base64DecodeAndDecrypt(token), Charset.defaultCharset()); } return token; } /** * @param token * @return */ public static String encryptToken(String token) throws CryptoException { APIManagerConfiguration config = ServiceReferenceHolder.getInstance(). getAPIManagerConfigurationService().getAPIManagerConfiguration(); if (Boolean.parseBoolean(config.getFirstProperty(APIConstants.ENCRYPT_TOKENS_ON_PERSISTENCE))) { return CryptoUtil.getDefaultCryptoUtil().encryptAndBase64Encode(token.getBytes(Charset.defaultCharset())); } return token; } public static void loadTenantRegistry(int tenantId) throws RegistryException { TenantRegistryLoader tenantRegistryLoader = APIManagerComponent.getTenantRegistryLoader(); ServiceReferenceHolder.getInstance().getIndexLoaderService().loadTenantIndex(tenantId); tenantRegistryLoader.loadTenantRegistry(tenantId); } /** * This is to get the registry resource's HTTP permlink path. * Once this issue is fixed (https://wso2.org/jira/browse/REGISTRY-2110), * we can remove this method, and get permlink from the resource. * * @param path - Registry resource path * @return {@link String} -HTTP permlink */ public static String getRegistryResourceHTTPPermlink(String path) { String schemeHttp = APIConstants.HTTP_PROTOCOL; String schemeHttps = APIConstants.HTTPS_PROTOCOL; ConfigurationContextService contetxservice = ServiceReferenceHolder.getContextService(); //First we will try to generate http permalink and if its disabled then only we will consider https int port = CarbonUtils.getTransportProxyPort(contetxservice.getServerConfigContext(), schemeHttp); if (port == -1) { port = CarbonUtils.getTransportPort(contetxservice.getServerConfigContext(), schemeHttp); } //getting https parameters if http is disabled. If proxy port is not present we will go for default port if (port == -1) { port = CarbonUtils.getTransportProxyPort(contetxservice.getServerConfigContext(), schemeHttps); } if (port == -1) { port = CarbonUtils.getTransportPort(contetxservice.getServerConfigContext(), schemeHttps); } String webContext = ServerConfiguration.getInstance().getFirstProperty("WebContextRoot"); if (webContext == null || "/".equals(webContext)) { webContext = ""; } RegistryService registryService = ServiceReferenceHolder.getInstance().getRegistryService(); String version = ""; if (registryService == null) { log.error("Registry Service has not been set."); } else if (path != null) { try { String[] versions = registryService.getRegistry( CarbonConstants.REGISTRY_SYSTEM_USERNAME, CarbonContext.getThreadLocalCarbonContext().getTenantId()).getVersions(path); if (versions != null && versions.length > 0) { version = versions[0].substring(versions[0].lastIndexOf(";version:")); } } catch (RegistryException e) { log.error("An error occurred while determining the latest version of the " + "resource at the given path: " + path, e); } } if (port != -1 && path != null) { String tenantDomain = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantDomain(true); return webContext + ((tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) ? "/" + MultitenantConstants.TENANT_AWARE_URL_PREFIX + "/" + tenantDomain : "") + "/registry/resource" + org.wso2.carbon.registry.app.Utils.encodeRegistryPath(path) + version; } return null; } public static boolean isSandboxEndpointsExists(API api) { JSONParser parser = new JSONParser(); JSONObject config = null; try { config = (JSONObject) parser.parse(api.getEndpointConfig()); if (config.containsKey("sandbox_endpoints")) { return true; } } catch (ParseException e) { log.error(APIConstants.MSG_JSON_PARSE_ERROR, e); } catch (ClassCastException e) { log.error(APIConstants.MSG_JSON_PARSE_ERROR, e); } return false; } public static boolean isProductionEndpointsExists(API api) { JSONParser parser = new JSONParser(); JSONObject config = null; try { config = (JSONObject) parser.parse(api.getEndpointConfig()); if (config.containsKey("production_endpoints")) { return true; } } catch (ParseException e) { log.error(APIConstants.MSG_JSON_PARSE_ERROR, e); } catch (ClassCastException e) { log.error(APIConstants.MSG_JSON_PARSE_ERROR, e); } return false; } /** * This method used to get API minimum information from governance artifact * * @param artifact API artifact * @param registry Registry * @return API * @throws APIManagementException if failed to get API from artifact */ public static API getAPIInformation(GovernanceArtifact artifact, Registry registry) throws APIManagementException { API api; try { String providerName = artifact.getAttribute(APIConstants.API_OVERVIEW_PROVIDER); String apiName = artifact.getAttribute(APIConstants.API_OVERVIEW_NAME); String apiVersion = artifact.getAttribute(APIConstants.API_OVERVIEW_VERSION); api = new API(new APIIdentifier(providerName, apiName, apiVersion)); //set uuid api.setUUID(artifact.getId()); api.setThumbnailUrl(artifact.getAttribute(APIConstants.API_OVERVIEW_THUMBNAIL_URL)); api.setStatus(getApiStatus(artifact.getAttribute(APIConstants.API_OVERVIEW_STATUS))); api.setContext(artifact.getAttribute(APIConstants.API_OVERVIEW_CONTEXT)); api.setVisibility(artifact.getAttribute(APIConstants.API_OVERVIEW_VISIBILITY)); api.setVisibleRoles(artifact.getAttribute(APIConstants.API_OVERVIEW_VISIBLE_ROLES)); api.setVisibleTenants(artifact.getAttribute(APIConstants.API_OVERVIEW_VISIBLE_TENANTS)); api.setTransports(artifact.getAttribute(APIConstants.API_OVERVIEW_TRANSPORTS)); api.setInSequence(artifact.getAttribute(APIConstants.API_OVERVIEW_INSEQUENCE)); api.setOutSequence(artifact.getAttribute(APIConstants.API_OVERVIEW_OUTSEQUENCE)); api.setFaultSequence(artifact.getAttribute(APIConstants.API_OVERVIEW_FAULTSEQUENCE)); api.setDescription(artifact.getAttribute(APIConstants.API_OVERVIEW_DESCRIPTION)); api.setRedirectURL(artifact.getAttribute(APIConstants.API_OVERVIEW_REDIRECT_URL)); api.setBusinessOwner(artifact.getAttribute(APIConstants.API_OVERVIEW_BUSS_OWNER)); api.setApiOwner(artifact.getAttribute(APIConstants.API_OVERVIEW_OWNER)); api.setAdvertiseOnly(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_ADVERTISE_ONLY))); String environments = artifact.getAttribute(APIConstants.API_OVERVIEW_ENVIRONMENTS); api.setEnvironments(extractEnvironmentsForAPI(environments)); api.setCorsConfiguration(getCorsConfigurationFromArtifact(artifact)); String artifactPath = GovernanceUtils.getArtifactPath(registry, artifact.getId()); api.setLastUpdated(registry.get(artifactPath).getLastModified()); api.setCreatedTime(String.valueOf(registry.get(artifactPath).getCreatedTime().getTime())); } catch (GovernanceException e) { String msg = "Failed to get API from artifact "; throw new APIManagementException(msg, e); } catch (RegistryException e) { String msg = "Failed to get LastAccess time or Rating"; throw new APIManagementException(msg, e); } return api; } /** * Get the cache key of the ResourceInfoDTO * * @param apiContext - Context of the API * @param apiVersion - API Version * @param resourceUri - The resource uri Ex: /name/version * @param httpMethod - The http method. Ex: GET, POST * @return - The cache key */ public static String getResourceInfoDTOCacheKey(String apiContext, String apiVersion, String resourceUri, String httpMethod) { return apiContext + "/" + apiVersion + resourceUri + ":" + httpMethod; } /** * Get the key of the Resource ( used in scopes) * * @param api - API * @param template - URI Template * @return - The resource key */ public static String getResourceKey(API api, URITemplate template) { return APIUtil.getResourceKey(api.getContext(), api.getId().getVersion(), template.getUriTemplate(), template.getHTTPVerb()); } /** * Get the key of the Resource ( used in scopes) * * @param apiContext - Context of the API * @param apiVersion - API Version * @param resourceUri - The resource uri Ex: /name/version * @param httpMethod - The http method. Ex: GET, POST * @return - The resource key */ public static String getResourceKey(String apiContext, String apiVersion, String resourceUri, String httpMethod) { return apiContext + "/" + apiVersion + resourceUri + ":" + httpMethod; } /** * Find scope object in a set based on the key * * @param scopes - Set of scopes * @param key - Key to search with * @return Scope - scope object */ public static Scope findScopeByKey(Set<Scope> scopes, String key) { for (Scope scope : scopes) { if (scope.getKey().equals(key)) { return scope; } } return null; } /** * Get the cache key of the APIInfoDTO * * @param apiContext - Context of the API * @param apiVersion - API Version * @return - The cache key of the APIInfoDTO */ public static String getAPIInfoDTOCacheKey(String apiContext, String apiVersion) { return apiContext + "/" + apiVersion; } /** * Get the cache key of the Access Token * * @param accessToken - The access token which is cached * @param apiContext - The context of the API * @param apiVersion - The version of the API * @param resourceUri - The value of the resource url * @param httpVerb - The http method. Ex: GET, POST * @param authLevel - Required Authentication level. Ex: Application/Application User * @return - The Key which will be used to cache the access token */ public static String getAccessTokenCacheKey(String accessToken, String apiContext, String apiVersion, String resourceUri, String httpVerb, String authLevel) { return accessToken + ':' + apiContext + '/' + apiVersion + resourceUri + ':' + httpVerb + ':' + authLevel; } /** * Resolves system properties and replaces in given in text * * @param text * @return System properties resolved text */ public static String replaceSystemProperty(String text) { int indexOfStartingChars = -1; int indexOfClosingBrace; // The following condition deals with properties. // Properties are specified as ${system.property}, // and are assumed to be System properties while (indexOfStartingChars < text.indexOf("${") && (indexOfStartingChars = text.indexOf("${")) != -1 && (indexOfClosingBrace = text.indexOf('}')) != -1) { // Is a // property // used? String sysProp = text.substring(indexOfStartingChars + 2, indexOfClosingBrace); String propValue = System.getProperty(sysProp); if (propValue == null) { if ("carbon.context".equals(sysProp)) { propValue = ServiceReferenceHolder.getContextService().getServerConfigContext().getContextRoot(); } else if ("admin.username".equals(sysProp) || "admin.password".equals(sysProp)) { try { RealmConfiguration realmConfig = new RealmConfigXMLProcessor().buildRealmConfigurationFromFile(); if ("admin.username".equals(sysProp)) { propValue = realmConfig.getAdminUserName(); } else { propValue = realmConfig.getAdminPassword(); } } catch (UserStoreException e) { // Can't throw an exception because the server is // starting and can't be halted. log.error("Unable to build the Realm Configuration", e); return null; } } } //Derive original text value with resolved system property value if (propValue != null) { text = text.substring(0, indexOfStartingChars) + propValue + text.substring(indexOfClosingBrace + 1); } if ("carbon.home".equals(sysProp) && propValue != null && ".".equals(propValue)) { text = new File(".").getAbsolutePath() + File.separator + text; } } return text; } public static String encryptPassword(String plainTextPassword) throws APIManagementException { try { return CryptoUtil.getDefaultCryptoUtil().encryptAndBase64Encode(plainTextPassword.getBytes(Charset.defaultCharset())); } catch (CryptoException e) { String errorMsg = "Error while encrypting the password. " + e.getMessage(); throw new APIManagementException(errorMsg, e); } } /** * Search Apis by Doc Content * * @param registry - Registry which is searched * @param tenantID - Tenant id of logged in domain * @param username - Logged in username * @param searchTerm - Search value for doc * @param searchClient - Search client * @return - Documentation to APIs map * @throws APIManagementException - If failed to get ArtifactManager for given tenant */ public static Map<Documentation, API> searchAPIsByDoc(Registry registry, int tenantID, String username, String searchTerm, String searchClient) throws APIManagementException { Map<Documentation, API> apiDocMap = new HashMap<Documentation, API>(); try { PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(username); GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY); GenericArtifactManager docArtifactManager = APIUtil.getArtifactManager(registry, APIConstants.DOCUMENTATION_KEY); SolrClient client = SolrClient.getInstance(); Map<String, String> fields = new HashMap<String, String>(); fields.put(APIConstants.DOCUMENTATION_SEARCH_PATH_FIELD, "*" + APIConstants.API_ROOT_LOCATION + "*"); fields.put(APIConstants.DOCUMENTATION_SEARCH_MEDIA_TYPE_FIELD, "*"); if (tenantID == -1) { tenantID = MultitenantConstants.SUPER_TENANT_ID; } //PaginationContext.init(0, 10000, "ASC", APIConstants.DOCUMENTATION_SEARCH_PATH_FIELD, Integer.MAX_VALUE); SolrDocumentList documentList = client.query(searchTerm, tenantID, fields); org.wso2.carbon.user.api.AuthorizationManager manager = ServiceReferenceHolder.getInstance(). getRealmService().getTenantUserRealm(tenantID). getAuthorizationManager(); username = MultitenantUtils.getTenantAwareUsername(username); for (SolrDocument document : documentList) { String filePath = (String) document.getFieldValue("path_s"); int index = filePath.indexOf(APIConstants.APIMGT_REGISTRY_LOCATION); filePath = filePath.substring(index); Association[] associations = registry.getAllAssociations(filePath); API api = null; Documentation doc = null; for (Association association : associations) { boolean isAuthorized; String documentationPath = association.getSourcePath(); String path = RegistryUtils.getAbsolutePath(RegistryContext.getBaseInstance(), APIUtil.getMountedPath(RegistryContext.getBaseInstance(), RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH) + documentationPath); if (CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME.equalsIgnoreCase(username)) { isAuthorized = manager.isRoleAuthorized(APIConstants.ANONYMOUS_ROLE, path, ActionConstants.GET); } else { isAuthorized = manager.isUserAuthorized(username, path, ActionConstants.GET); } if (isAuthorized) { Resource docResource = registry.get(documentationPath); String docArtifactId = docResource.getUUID(); if (docArtifactId != null) { GenericArtifact docArtifact = docArtifactManager.getGenericArtifact(docArtifactId); doc = APIUtil.getDocumentation(docArtifact); } Association[] docAssociations = registry.getAssociations(documentationPath, APIConstants.DOCUMENTATION_ASSOCIATION); /* There will be only one document association, for a document path which is by its owner API*/ if (docAssociations.length > 0) { String apiPath = docAssociations[0].getSourcePath(); path = RegistryUtils.getAbsolutePath(RegistryContext.getBaseInstance(), APIUtil.getMountedPath(RegistryContext.getBaseInstance(), RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH) + apiPath); if (CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME.equalsIgnoreCase(username)) { isAuthorized = manager.isRoleAuthorized(APIConstants.ANONYMOUS_ROLE, path, ActionConstants.GET); } else { isAuthorized = manager.isUserAuthorized(username, path, ActionConstants.GET); } if (isAuthorized) { Resource resource = registry.get(apiPath); String apiArtifactId = resource.getUUID(); if (apiArtifactId != null) { GenericArtifact apiArtifact = artifactManager.getGenericArtifact(apiArtifactId); api = APIUtil.getAPI(apiArtifact, registry); } else { throw new GovernanceException("artifact id is null of " + apiPath); } } } } if (doc != null && api != null) { if (APIConstants.STORE_CLIENT.equals(searchClient)) { if (api.getStatus().equals(getApiStatus(APIConstants.PUBLISHED)) || api.getStatus().equals(getApiStatus(APIConstants.PROTOTYPED))) { apiDocMap.put(doc, api); } } else { apiDocMap.put(doc, api); } } } } } catch (IndexerException e) { handleException("Failed to search APIs with type Doc", e); } catch (RegistryException e) { handleException("Failed to search APIs with type Doc", e); } catch (UserStoreException e) { handleException("Failed to search APIs with type Doc", e); } return apiDocMap; } public static Map<String, Object> searchAPIsByURLPattern(Registry registry, String searchTerm, int start, int end) throws APIManagementException { SortedSet<API> apiSet = new TreeSet<API>(new APINameComparator()); List<API> apiList = new ArrayList<API>(); final String searchValue = searchTerm.trim(); Map<String, Object> result = new HashMap<String, Object>(); int totalLength = 0; String criteria; Map<String, List<String>> listMap = new HashMap<String, List<String>>(); GenericArtifact[] genericArtifacts = new GenericArtifact[0]; GenericArtifactManager artifactManager = null; try { artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY); PaginationContext.init(0, 10000, "ASC", APIConstants.API_OVERVIEW_NAME, Integer.MAX_VALUE); if (artifactManager != null) { for (int i = 0; i < 20; i++) { //This need to fix in future.We don't have a way to get max value of // "url_template" entry stores in registry,unless we search in each API criteria = APIConstants.API_URI_PATTERN + i; listMap.put(criteria, new ArrayList<String>() { { add(searchValue); } }); genericArtifacts = (GenericArtifact[]) ArrayUtils.addAll(genericArtifacts, artifactManager .findGenericArtifacts(listMap)); } if (genericArtifacts == null || genericArtifacts.length == 0) { result.put("apis", apiSet); result.put("length", 0); return result; } totalLength = genericArtifacts.length; StringBuilder apiNames = new StringBuilder(); for (GenericArtifact artifact : genericArtifacts) { if (apiNames.indexOf(artifact.getAttribute(APIConstants.API_OVERVIEW_NAME)) < 0) { String status = artifact.getAttribute(APIConstants.API_OVERVIEW_STATUS); if (isAllowDisplayAPIsWithMultipleStatus()) { if (APIConstants.PUBLISHED.equals(status) || APIConstants.DEPRECATED.equals(status)) { API api = APIUtil.getAPI(artifact, registry); if (api != null) { apiList.add(api); apiNames.append(api.getId().getApiName()); } } } else { if (APIConstants.PUBLISHED.equals(status)) { API api = APIUtil.getAPI(artifact, registry); if (api != null) { apiList.add(api); apiNames.append(api.getId().getApiName()); } } } } totalLength = apiList.size(); } if (totalLength <= ((start + end) - 1)) { end = totalLength; } for (int i = start; i < end; i++) { apiSet.add(apiList.get(i)); } } } catch (APIManagementException e) { handleException("Failed to search APIs with input url-pattern", e); } catch (GovernanceException e) { handleException("Failed to search APIs with input url-pattern", e); } result.put("apis", apiSet); result.put("length", totalLength); return result; } /** * This method will check the validity of given url. WSDL url should be * contain http, https or file system patch * otherwise we will mark it as invalid wsdl url. How ever here we do not * validate wsdl content. * * @param wsdlURL wsdl url tobe tested * @return true if its valid url else fale */ public static boolean isValidWSDLURL(String wsdlURL, boolean required) { if (wsdlURL != null && !"".equals(wsdlURL)) { if (wsdlURL.startsWith("http:") || wsdlURL.startsWith("https:") || wsdlURL.startsWith("file:") || wsdlURL.startsWith("/registry")) { return true; } } else if (!required) { // If the WSDL in not required and URL is empty, then we don't need // to add debug log. // Hence returning. return false; } if (log.isDebugEnabled()) { log.debug("WSDL url validation failed. Provided wsdl url is not valid url: " + wsdlURL); } return false; } /** * load tenant axis configurations. * * @param tenantDomain */ public static void loadTenantConfig(String tenantDomain) { final String finalTenantDomain = tenantDomain; ConfigurationContext ctx = ServiceReferenceHolder.getContextService().getServerConfigContext(); //Cannot use the tenantDomain directly because it's getting locked in createTenantConfigurationContext() // method in TenantAxisUtils String accessFlag = tenantDomain + "@WSO2"; long lastAccessed = TenantAxisUtils.getLastAccessed(tenantDomain, ctx); //Only if the tenant is in unloaded state, we do the loading if (System.currentTimeMillis() - lastAccessed >= tenantIdleTimeMillis) { synchronized (accessFlag.intern()) { // Currently loading tenants are added to a set. // If a tenant domain is in the set it implies that particular tenant is being loaded. // Therefore if and only if the set does not contain the tenant. if (!currentLoadingTenants.contains(tenantDomain)) { //Only one concurrent request is allowed to add to the currentLoadingTenants currentLoadingTenants.add(tenantDomain); ctx.getThreadPool().execute(new Runnable() { @Override public void run() { Thread.currentThread().setName("APIMHostObjectUtils-loadTenantConfig-thread"); try { PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext() .setTenantDomain(finalTenantDomain, true); ConfigurationContext ctx = ServiceReferenceHolder.getContextService() .getServerConfigContext(); TenantAxisUtils.getTenantAxisConfiguration(finalTenantDomain, ctx); } catch (Exception e) { log.error("Error while creating axis configuration for tenant " + finalTenantDomain, e); } finally { //only after the tenant is loaded completely, the tenant domain is removed from the set currentLoadingTenants.remove(finalTenantDomain); PrivilegedCarbonContext.endTenantFlow(); } } }); } } } } /** * load tenant axis configurations. * * @param tenantDomain */ public static void loadTenantConfigBlockingMode(String tenantDomain) { try { ConfigurationContext ctx = ServiceReferenceHolder.getContextService().getServerConfigContext(); TenantAxisUtils.getTenantAxisConfiguration(tenantDomain, ctx); } catch (Exception e) { log.error("Error while creating axis configuration for tenant " + tenantDomain, e); } } public static String extractCustomerKeyFromAuthHeader(Map headersMap) { //From 1.0.7 version of this component onwards remove the OAuth authorization header from // the message is configurable. So we dont need to remove headers at this point. String authHeader = (String) headersMap.get(HttpHeaders.AUTHORIZATION); if (authHeader == null) { return null; } if (authHeader.startsWith("OAuth ") || authHeader.startsWith("oauth ")) { authHeader = authHeader.substring(authHeader.indexOf("o")); } String[] headers = authHeader.split(APIConstants.OAUTH_HEADER_SPLITTER); for (String header : headers) { String[] elements = header.split(APIConstants.CONSUMER_KEY_SEGMENT_DELIMITER); if (elements.length > 1) { int j = 0; boolean isConsumerKeyHeaderAvailable = false; for (String element : elements) { if (!"".equals(element.trim())) { if (APIConstants.CONSUMER_KEY_SEGMENT.equals(elements[j].trim())) { isConsumerKeyHeaderAvailable = true; } else if (isConsumerKeyHeaderAvailable) { return removeLeadingAndTrailing(elements[j].trim()); } } j++; } } } return null; } private static String removeLeadingAndTrailing(String base) { String result = base; if (base.startsWith("\"") || base.endsWith("\"")) { result = base.replace("\"", ""); } return result.trim(); } /** * This method will return mounted path of the path if the path * is mounted. Else path will be returned. * * @param registryContext Registry Context instance which holds path mappings * @param path default path of the registry * @return mounted path or path */ public static String getMountedPath(RegistryContext registryContext, String path) { if (registryContext != null && path != null) { List<Mount> mounts = registryContext.getMounts(); if (mounts != null) { for (Mount mount : mounts) { if (path.equals(mount.getPath())) { return mount.getTargetPath(); } } } } return path; } /** * Returns a map of gateway / store domains for the tenant * * @return a Map of domain names for tenant * @throws org.wso2.carbon.apimgt.api.APIManagementException if an error occurs when loading tiers from the registry */ public static Map<String, String> getDomainMappings(String tenantDomain, String appType) throws APIManagementException { Map<String, String> domains = new HashMap<String, String>(); String resourcePath; try { Registry registry = ServiceReferenceHolder.getInstance().getRegistryService(). getGovernanceSystemRegistry(); resourcePath = APIConstants.API_DOMAIN_MAPPINGS.replace("<tenant-id>", tenantDomain); if (registry.resourceExists(resourcePath)) { Resource resource = registry.get(resourcePath); String content = new String((byte[]) resource.getContent(), Charset.defaultCharset()); JSONParser parser = new JSONParser(); JSONObject mappings = (JSONObject) parser.parse(content); if (mappings.get(appType) != null) { mappings = (JSONObject) mappings.get(appType); for (Object o : mappings.entrySet()) { Entry thisEntry = (Entry) o; String key = (String) thisEntry.getKey(); //Instead strictly comparing customUrl, checking whether name is starting with customUrl //to allow users to add multiple URLs if needed if(!StringUtils.isEmpty(key) && key.startsWith(APIConstants.CUSTOM_URL)) { String value = (String) thisEntry.getValue(); domains.put(key, value); } } } } } catch (RegistryException e) { String msg = "Error while retrieving gateway domain mappings from registry"; log.error(msg, e); throw new APIManagementException(msg, e); } catch (ClassCastException e) { String msg = "Invalid JSON found in the gateway tenant domain mappings"; log.error(msg, e); throw new APIManagementException(msg, e); } catch (ParseException e) { String msg = "Malformed JSON found in the gateway tenant domain mappings"; log.error(msg, e); throw new APIManagementException(msg, e); } return domains; } /** * This method used to Downloaded Uploaded Documents from publisher * * @param userName logged in username * @param resourceUrl resource want to download * @param tenantDomain loggedUserTenantDomain * @return map that contains Data of the resource * @throws APIManagementException */ public static Map<String, Object> getDocument(String userName, String resourceUrl, String tenantDomain) throws APIManagementException { Map<String, Object> documentMap = new HashMap<String, Object>(); InputStream inStream = null; String[] resourceSplitPath = resourceUrl.split(RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH); if (resourceSplitPath.length == 2) { resourceUrl = resourceSplitPath[1]; } else { handleException("Invalid resource Path " + resourceUrl); } Resource apiDocResource; Registry registryType = null; boolean isTenantFlowStarted = false; try { int tenantId; if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { PrivilegedCarbonContext.startTenantFlow(); isTenantFlowStarted = true; PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); } else { tenantId = MultitenantConstants.SUPER_TENANT_ID; } userName = MultitenantUtils.getTenantAwareUsername(userName); registryType = ServiceReferenceHolder .getInstance(). getRegistryService().getGovernanceUserRegistry(userName, tenantId); if (registryType.resourceExists(resourceUrl)) { apiDocResource = registryType.get(resourceUrl); inStream = apiDocResource.getContentStream(); documentMap.put("Data", inStream); documentMap.put("contentType", apiDocResource.getMediaType()); String[] content = apiDocResource.getPath().split("/"); documentMap.put("name", content[content.length - 1]); } } catch (RegistryException e) { String msg = "Couldn't retrieve registry for User " + userName + " Tenant " + tenantDomain; log.error(msg, e); handleException(msg, e); } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } return documentMap; } /** * this method used to set environments values to api object. * * @param environments environments values in json format * @return set of environments that Published */ public static Set<String> extractEnvironmentsForAPI(String environments) { Set<String> environmentStringSet = null; if (environments == null) { environmentStringSet = new HashSet<String>( ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService() .getAPIManagerConfiguration().getApiGatewayEnvironments().keySet()); } else { //handle not to publish to any of the gateways if (APIConstants.API_GATEWAY_NONE.equals(environments)) { environmentStringSet = new HashSet<String>(); } //handle to set published gateways nto api object else if (!"".equals(environments)) { String[] publishEnvironmentArray = environments.split(","); environmentStringSet = new HashSet<String>(Arrays.asList(publishEnvironmentArray)); environmentStringSet.remove(APIConstants.API_GATEWAY_NONE); } //handle to publish to any of the gateways when api creating stage else if ("".equals(environments)) { environmentStringSet = new HashSet<String>( ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService() .getAPIManagerConfiguration().getApiGatewayEnvironments().keySet()); } } return environmentStringSet; } /** * This method used to set environment values to governance artifact of API . * * @param api API object with the attributes value */ public static String writeEnvironmentsToArtifact(API api) { StringBuilder publishedEnvironments = new StringBuilder(); Set<String> apiEnvironments = api.getEnvironments(); if (apiEnvironments != null) { for (String environmentName : apiEnvironments) { publishedEnvironments.append(environmentName).append(','); } if (apiEnvironments.isEmpty()) { publishedEnvironments.append("none,"); } if (!publishedEnvironments.toString().isEmpty()) { publishedEnvironments.deleteCharAt(publishedEnvironments.length() - 1); } } return publishedEnvironments.toString(); } /** * This method used to get the currently published gateway environments of an API . * * @param api API object with the attributes value */ public static List<Environment> getEnvironmentsOfAPI(API api) { Map<String, Environment> gatewayEnvironments = ServiceReferenceHolder.getInstance() .getAPIManagerConfigurationService() .getAPIManagerConfiguration() .getApiGatewayEnvironments(); Set<String> apiEnvironments = api.getEnvironments(); List<Environment> returnEnvironments = new ArrayList<Environment>(); for (Environment environment : gatewayEnvironments.values()) { for (String apiEnvironment : apiEnvironments) { if (environment.getName().equals(apiEnvironment)) { returnEnvironments.add(environment); break; } } } return returnEnvironments; } /** * Given the apps and the application name to check for, it will check if the application already exists. * * @param apps The collection of applications * @param name The application to be checked if exists * @return true - if an application of the name <name> already exists in the collection <apps> * false- if an application of the name <name> does not already exists in the collection <apps> */ public static boolean doesApplicationExist(Application[] apps, String name) { boolean doesApplicationExist = false; if (apps != null) { for (Application app : apps) { if (app.getName().equals(name)) { doesApplicationExist = true; } } } return doesApplicationExist; } /** * Read the group id extractor class reference from api-manager.xml. * * @return group id extractor class reference. */ public static String getGroupingExtractorImplementation() { APIManagerConfiguration config = ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService() .getAPIManagerConfiguration(); return config.getFirstProperty(APIConstants.API_STORE_GROUP_EXTRACTOR_IMPLEMENTATION); } /** * This method will update the permission cache of the tenant which is related to the given usename * * @param username User name to find the relevant tenant * @throws UserStoreException if the permission update failed */ public static void updatePermissionCache(String username) throws UserStoreException { String tenantDomain = MultitenantUtils.getTenantDomain(username); int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager().getTenantId(tenantDomain); PermissionUpdateUtil.updatePermissionTree(tenantId); } /** * Check whether given application name is available under current subscriber or group * * @param subscriber subscriber name * @param applicationName application name * @param groupId group of the subscriber * @return true if application is available for the subscriber * @throws APIManagementException if failed to get applications for given subscriber */ public static boolean isApplicationExist(String subscriber, String applicationName, String groupId) throws APIManagementException { return ApiMgtDAO.getInstance().isApplicationExist(applicationName, subscriber, groupId); } public static String getHostAddress() { if (hostAddress != null) { return hostAddress; } hostAddress = ServerConfiguration.getInstance().getFirstProperty(APIConstants.API_MANAGER_HOSTNAME); if (null == hostAddress) { if (getLocalAddress() != null) { hostAddress = getLocalAddress().getHostName(); } if (hostAddress == null) { hostAddress = APIConstants.API_MANAGER_HOSTNAME_UNKNOWN; } return hostAddress; } else { return hostAddress; } } private static InetAddress getLocalAddress() { Enumeration<NetworkInterface> ifaces = null; try { ifaces = NetworkInterface.getNetworkInterfaces(); } catch (SocketException e) { log.error("Failed to get host address", e); } if (ifaces != null) { while (ifaces.hasMoreElements()) { NetworkInterface iface = ifaces.nextElement(); Enumeration<InetAddress> addresses = iface.getInetAddresses(); while (addresses.hasMoreElements()) { InetAddress addr = addresses.nextElement(); if (addr instanceof Inet4Address && !addr.isLoopbackAddress()) { return addr; } } } } return null; } public static boolean isStringArray(Object[] args) { for (Object arg : args) { if (!(arg instanceof String)) { return false; } } return true; } public static String appendDomainWithUser(String username, String domain) { if (username.contains(APIConstants.EMAIL_DOMAIN_SEPARATOR) || username.contains(APIConstants.EMAIL_DOMAIN_SEPARATOR_REPLACEMENT) || MultitenantConstants.SUPER_TENANT_NAME.equalsIgnoreCase(username)) { return username; } return username + APIConstants.EMAIL_DOMAIN_SEPARATOR + domain; } /* * Util method to convert a java object to a json object * */ public static String convertToString(Object obj) { Gson gson = new Gson(); return gson.toJson(obj); } public static String getSequencePath(APIIdentifier identifier, String pathFlow) { String artifactPath = APIConstants.API_ROOT_LOCATION + RegistryConstants.PATH_SEPARATOR + identifier.getProviderName() + RegistryConstants.PATH_SEPARATOR + identifier.getApiName() + RegistryConstants.PATH_SEPARATOR + identifier.getVersion(); return artifactPath + RegistryConstants.PATH_SEPARATOR + pathFlow + RegistryConstants.PATH_SEPARATOR; } private static String getAPIMonetizationCategory(Set<Tier> tiers, String tenantDomain) throws APIManagementException { boolean isPaidFound = false; boolean isFreeFound = false; for (Tier tier : tiers) { if (isTierPaid(tier.getName(), tenantDomain)) { isPaidFound = true; } else { isFreeFound = true; if (isPaidFound) { break; } } } if (!isPaidFound) { return APIConstants.API_CATEGORY_FREE; } else if (!isFreeFound) { return APIConstants.API_CATEGORY_PAID; } else { return APIConstants.API_CATEGORY_FREEMIUM; } } private static boolean isTierPaid(String tierName, String tenantDomainName) throws APIManagementException { String tenantDomain = tenantDomainName; if (tenantDomain == null) { tenantDomain = MultitenantConstants.SUPER_TENANT_DOMAIN_NAME; } if (APIConstants.UNLIMITED_TIER.equalsIgnoreCase(tierName)) { return isUnlimitedTierPaid(tenantDomain); } boolean isPaid = false; Tier tier = getTierFromCache(tierName, tenantDomain); if (tier != null) { final Map<String, Object> tierAttributes = tier.getTierAttributes(); if (tierAttributes != null) { String isPaidValue = tier.getTierPlan(); if (isPaidValue != null && APIConstants.COMMERCIAL_TIER_PLAN.equals(isPaidValue)) { isPaid = true; } } } else { throw new APIManagementException("Tier " + tierName + "cannot be found"); } return isPaid; } private static boolean isUnlimitedTierPaid(String tenantDomain) throws APIManagementException { JSONObject apiTenantConfig = null; try { String content = null; PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(tenantDomain); Registry registry = ServiceReferenceHolder.getInstance().getRegistryService() .getConfigSystemRegistry(tenantId); if (registry.resourceExists(APIConstants.API_TENANT_CONF_LOCATION)) { Resource resource = registry.get(APIConstants.API_TENANT_CONF_LOCATION); content = new String((byte[]) resource.getContent(), Charset.defaultCharset()); } if (content != null) { JSONParser parser = new JSONParser(); apiTenantConfig = (JSONObject) parser.parse(content); } } catch (UserStoreException e) { handleException("UserStoreException thrown when getting API tenant config from registry", e); } catch (RegistryException e) { handleException("RegistryException thrown when getting API tenant config from registry", e); } catch (ParseException e) { handleException("ParseException thrown when passing API tenant config from registry", e); } finally { PrivilegedCarbonContext.endTenantFlow(); } if (apiTenantConfig != null) { Object value = apiTenantConfig.get(APIConstants.API_TENANT_CONF_IS_UNLIMITED_TIER_PAID); if (value != null) { return Boolean.parseBoolean(value.toString()); } else { throw new APIManagementException(APIConstants.API_TENANT_CONF_IS_UNLIMITED_TIER_PAID + " config does not exist for tenant " + tenantDomain); } } return false; } public static Tier getTierFromCache(String tierName, String tenantDomain) throws APIManagementException { Map<String, Tier> tierMap = null; try { PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); if (getTiersCache().containsKey(tierName)) { tierMap = (Map<String, Tier>) getTiersCache().get(tierName); } else { int requestedTenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); if (!APIUtil.isAdvanceThrottlingEnabled()) { if (requestedTenantId == 0) { tierMap = APIUtil.getTiers(); } else { tierMap = APIUtil.getTiers(requestedTenantId); } } else { if (requestedTenantId == 0) { tierMap = APIUtil.getAdvancedSubsriptionTiers(); } else { tierMap = APIUtil.getAdvancedSubsriptionTiers(requestedTenantId); } } getTiersCache().put(tierName, tierMap); } } finally { PrivilegedCarbonContext.endTenantFlow(); } return tierMap.get(tierName); } public static void clearTiersCache(String tenantDomain) { try { PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); getTiersCache().removeAll(); } finally { PrivilegedCarbonContext.endTenantFlow(); } } private static Cache getTiersCache() { return Caching.getCacheManager(APIConstants.API_MANAGER_CACHE_MANAGER). getCache(APIConstants.TIERS_CACHE); } /** * Util method to return the artifact from a registry resource path * * @param apiIdentifier * @param registry * @return * @throws APIManagementException */ public static GenericArtifact getAPIArtifact(APIIdentifier apiIdentifier, Registry registry) throws APIManagementException { String apiPath = APIUtil.getAPIPath(apiIdentifier); GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY); try { Resource apiResource = registry.get(apiPath); String artifactId = apiResource.getUUID(); if (artifactId == null) { throw new APIManagementException("artifact id is null for : " + apiPath); } return artifactManager.getGenericArtifact(artifactId); } catch (RegistryException e) { handleException("Failed to get API artifact from : " + apiPath, e); return null; } } /** * Return a http client instance * * @param port - server port * @param protocol- service endpoint protocol http/https * @return */ public static HttpClient getHttpClient(int port, String protocol) { SchemeRegistry registry = new SchemeRegistry(); SSLSocketFactory socketFactory = SSLSocketFactory.getSocketFactory(); String hostnameVerifierOption = System.getProperty(HOST_NAME_VERIFIER); String sslValue = null; AxisConfiguration axis2Config = ServiceReferenceHolder.getContextService().getServerConfigContext() .getAxisConfiguration(); org.apache.axis2.description.Parameter sslVerifyClient = axis2Config.getTransportIn(APIConstants.HTTPS_PROTOCOL) .getParameter(APIConstants.SSL_VERIFY_CLIENT); if (sslVerifyClient != null) { sslValue = (String) sslVerifyClient.getValue(); } X509HostnameVerifier hostnameVerifier; if (ALLOW_ALL.equalsIgnoreCase(hostnameVerifierOption)) { hostnameVerifier = SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER; } else if (STRICT.equalsIgnoreCase(hostnameVerifierOption)) { hostnameVerifier = SSLSocketFactory.STRICT_HOSTNAME_VERIFIER; } else { hostnameVerifier = SSLSocketFactory.BROWSER_COMPATIBLE_HOSTNAME_VERIFIER; } socketFactory.setHostnameVerifier(hostnameVerifier); if (APIConstants.HTTPS_PROTOCOL.equals(protocol)) { try { if (APIConstants.SSL_VERIFY_CLIENT_STATUS_REQUIRE.equals(sslValue)) { socketFactory = createSocketFactory(); socketFactory.setHostnameVerifier(hostnameVerifier); } if (port >= 0) { registry.register(new Scheme(APIConstants.HTTPS_PROTOCOL, port, socketFactory)); } else { registry.register(new Scheme(APIConstants.HTTPS_PROTOCOL, 443, socketFactory)); } } catch (APIManagementException e) { log.error(e); } } else if (APIConstants.HTTP_PROTOCOL.equals(protocol)) { if (port >= 0) { registry.register(new Scheme(APIConstants.HTTP_PROTOCOL, port, PlainSocketFactory.getSocketFactory())); } else { registry.register(new Scheme(APIConstants.HTTP_PROTOCOL, 80, PlainSocketFactory.getSocketFactory())); } } HttpParams params = new BasicHttpParams(); ThreadSafeClientConnManager tcm = new ThreadSafeClientConnManager(registry); return new DefaultHttpClient(tcm, params); } private static SSLSocketFactory createSocketFactory() throws APIManagementException { KeyStore keyStore; String keyStorePath = null; String keyStorePassword; try { keyStorePath = CarbonUtils.getServerConfiguration().getFirstProperty("Security.KeyStore.Location"); keyStorePassword = CarbonUtils.getServerConfiguration() .getFirstProperty("Security.KeyStore.Password"); keyStore = KeyStore.getInstance("JKS"); keyStore.load(new FileInputStream(keyStorePath), keyStorePassword.toCharArray()); SSLSocketFactory sslSocketFactory = new SSLSocketFactory(keyStore, keyStorePassword); return sslSocketFactory; } catch (KeyStoreException e) { handleException("Failed to read from Key Store", e); } catch (CertificateException e) { handleException("Failed to read Certificate", e); } catch (NoSuchAlgorithmException e) { handleException("Failed to load Key Store from " + keyStorePath, e); } catch (IOException e) { handleException("Key Store not found in " + keyStorePath, e); } catch (UnrecoverableKeyException e) { handleException("Failed to load key from" + keyStorePath, e); } catch (KeyManagementException e) { handleException("Failed to load key from" + keyStorePath, e); } return null; } /** * This method will return a relative URL for given registry resource which we can used to retrieve the resource * from the web UI. For example, URI for a thumbnail icon of a tag can be generated from this method. * * @param resourceType Type of the registry resource. Based on this value the way URL is generate can be changed. * @param tenantDomain tenant domain of the resource * @param resourcePath path of the resource * @return relative path of the registry resource from the web context level */ public static String getRegistryResourcePathForUI(APIConstants.RegistryResourceTypesForUI resourceType, String tenantDomain, String resourcePath) { StringBuilder resourcePathBuilder = new StringBuilder(); if (APIConstants.RegistryResourceTypesForUI.TAG_THUMBNAIL.equals(resourceType)) { if (tenantDomain != null && !"".equals(tenantDomain) && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { // The compiler will concatenate the 2 constants. If we use the builder to append the 2 constants, then // it will happen during the runtime. resourcePathBuilder.append(RegistryConstants.PATH_SEPARATOR + MultitenantConstants .TENANT_AWARE_URL_PREFIX + RegistryConstants.PATH_SEPARATOR).append(tenantDomain); } // The compiler will concatenate the 2 constants. If we use the builder to append the 2 constants, then // it will happen during the runtime. resourcePathBuilder.append(APIConstants.REGISTRY_RESOURCE_PREFIX + RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH); resourcePathBuilder.append(resourcePath); } return resourcePathBuilder.toString(); } /** * Gets the class given the class name. * * @param className the fully qualified name of the class. * @return an instance of the class with the given name * @throws ClassNotFoundException * @throws IllegalAccessException * @throws InstantiationException */ public static Class getClassForName(String className) throws ClassNotFoundException, IllegalAccessException, InstantiationException { return Class.forName(className); } /** * This method will check the validity of given url. * otherwise we will mark it as invalid url. * * @param url url tobe tested * @return true if its valid url else fale */ public static boolean isValidURL(String url) { if (url == null) { return false; } try { URL urlVal = new URL(url); // If there are no issues, then this is a valid URL. Hence returning true. return true; } catch (MalformedURLException e) { return false; } } /** * @param tenantDomain Tenant domain to be used to get configurations for REST API scopes * @return JSON object which contains configuration for REST API scopes * @throws APIManagementException */ public static JSONObject getTenantRESTAPIScopesConfig(String tenantDomain) throws APIManagementException { JSONObject apiTenantConfig = null; JSONObject restAPIConfigJSON = null; try { String content = new APIMRegistryServiceImpl().getConfigRegistryResourceContent(tenantDomain, APIConstants.API_TENANT_CONF_LOCATION); if (content != null) { JSONParser parser = new JSONParser(); apiTenantConfig = (JSONObject) parser.parse(content); if (apiTenantConfig != null) { Object value = apiTenantConfig.get(APIConstants.REST_API_SCOPES_CONFIG); if (value != null) { restAPIConfigJSON = (JSONObject) value; } else { throw new APIManagementException("RESTAPIScopes" + " config does not exist for tenant " + tenantDomain); } } } } catch (UserStoreException e) { handleException("UserStoreException thrown when getting API tenant config from registry", e); } catch (RegistryException e) { handleException("RegistryException thrown when getting API tenant config from registry", e); } catch (ParseException e) { handleException("ParseException thrown when passing API tenant config from registry", e); } return restAPIConfigJSON; } /** * @param tenantDomain Tenant domain to be used to get default role configurations * @return JSON object which contains configuration for default roles * @throws APIManagementException */ public static JSONObject getTenantDefaultRoles(String tenantDomain) throws APIManagementException { JSONObject apiTenantConfig; JSONObject defaultRolesConfigJSON = null; try { String content = new APIMRegistryServiceImpl().getConfigRegistryResourceContent(tenantDomain, APIConstants.API_TENANT_CONF_LOCATION); if (content != null) { JSONParser parser = new JSONParser(); apiTenantConfig = (JSONObject) parser.parse(content); if (apiTenantConfig != null) { Object value = apiTenantConfig.get(APIConstants.API_TENANT_CONF_DEFAULT_ROLES); if (value != null) { defaultRolesConfigJSON = (JSONObject) value; } else { //Config might not exist for migrated environments from previous release if (log.isDebugEnabled()) { log.debug(APIConstants.API_TENANT_CONF_DEFAULT_ROLES + " config does not exist for tenant " + tenantDomain); } } } } } catch (UserStoreException e) { handleException("Error while retrieving user realm for tenant " + tenantDomain, e); } catch (RegistryException e) { handleException("Error while retrieving tenant configuration file for tenant " + tenantDomain, e); } catch (ParseException e) { handleException( "Error while parsing tenant configuration file while retrieving default roles for tenant " + tenantDomain, e); } return defaultRolesConfigJSON; } /** * @param config JSON configuration object with scopes and associated roles * @return Map of scopes which contains scope names and associated role list */ public static Map<String, String> getRESTAPIScopesFromConfig(JSONObject config) { Map<String, String> scopes = new HashMap<String, String>(); JSONArray scopesArray = (JSONArray) config.get("Scope"); for (Object scopeObj : scopesArray) { JSONObject scope = (JSONObject) scopeObj; String scopeName = scope.get(APIConstants.REST_API_SCOPE_NAME).toString(); String scopeRoles = scope.get(APIConstants.REST_API_SCOPE_ROLE).toString(); scopes.put(scopeName, scopeRoles); } return scopes; } /** * Determines if the scope is specified in the whitelist. * * @param scope - The scope key to check * @return - 'true' if the scope is white listed. 'false' if not. */ public static boolean isWhiteListedScope(String scope) { if (whiteListedScopes == null) { APIManagerConfiguration configuration = ServiceReferenceHolder.getInstance(). getAPIManagerConfigurationService().getAPIManagerConfiguration(); // Read scope whitelist from Configuration. List<String> whitelist = configuration.getProperty(APIConstants.WHITELISTED_SCOPES); // If whitelist is null, default scopes will be put. if (whitelist == null) { whitelist = new ArrayList<String>(); whitelist.add(APIConstants.OPEN_ID_SCOPE_NAME); whitelist.add(APIConstants.DEVICE_SCOPE_PATTERN); } whiteListedScopes = new HashSet<String>(whitelist); } for (String scopeTobeSkipped : whiteListedScopes) { if (scope.matches(scopeTobeSkipped)) { return true; } } return false; } public static String getServerURL() throws APIManagementException { String hostName = ServerConfiguration.getInstance().getFirstProperty(APIConstants.HOST_NAME); try { if (hostName == null) { hostName = NetworkUtils.getLocalHostname(); } } catch (SocketException e) { throw new APIManagementException("Error while trying to read hostname.", e); } String mgtTransport = CarbonUtils.getManagementTransport(); AxisConfiguration axisConfiguration = ServiceReferenceHolder .getContextService().getServerConfigContext().getAxisConfiguration(); int mgtTransportPort = CarbonUtils.getTransportProxyPort(axisConfiguration, mgtTransport); if (mgtTransportPort <= 0) { mgtTransportPort = CarbonUtils.getTransportPort(axisConfiguration, mgtTransport); } String serverUrl = mgtTransport + "://" + hostName.toLowerCase(); // If it's well known HTTPS port, skip adding port if (mgtTransportPort != APIConstants.DEFAULT_HTTPS_PORT) { serverUrl += ":" + mgtTransportPort; } // If ProxyContextPath is defined then append it String proxyContextPath = ServerConfiguration.getInstance().getFirstProperty(APIConstants.PROXY_CONTEXT_PATH); if (proxyContextPath != null && !proxyContextPath.trim().isEmpty()) { if (proxyContextPath.charAt(0) == '/') { serverUrl += proxyContextPath; } else { serverUrl += "/" + proxyContextPath; } } return serverUrl; } /** * Extract the provider of the API from name * * @param apiVersion - API Name with version * @param tenantDomain - tenant domain of the API * @return API publisher name */ public static String getAPIProviderFromRESTAPI(String apiVersion, String tenantDomain) { int index = apiVersion.indexOf("--"); if (StringUtils.isEmpty(tenantDomain)) { tenantDomain = org.wso2.carbon.utils.multitenancy.MultitenantConstants.SUPER_TENANT_DOMAIN_NAME; } String apiProvider; if (index != -1) { apiProvider = apiVersion.substring(0, index); if (apiProvider.contains(APIConstants.EMAIL_DOMAIN_SEPARATOR_REPLACEMENT)) { apiProvider = apiProvider.replace(APIConstants.EMAIL_DOMAIN_SEPARATOR_REPLACEMENT, APIConstants.EMAIL_DOMAIN_SEPARATOR); } if (!apiProvider.endsWith(tenantDomain)) { apiProvider = apiProvider + '@' + tenantDomain; } return apiProvider; } return null; } /** * Used to generate CORS Configuration object from CORS Configuration Json * * @param jsonString json representation of CORS configuration * @return CORSConfiguration Object */ public static CORSConfiguration getCorsConfigurationDtoFromJson(String jsonString) { return new Gson().fromJson(jsonString, CORSConfiguration.class); } /** * Used to generate Json string from CORS Configuration object * * @param corsConfiguration CORSConfiguration Object * @return Json string according to CORSConfiguration Object */ public static String getCorsConfigurationJsonFromDto(CORSConfiguration corsConfiguration) { return new Gson().toJson(corsConfiguration); } /** * Used to get access control allowed headers according to the api-manager.xml * * @return access control allowed headers string */ public static String getAllowedHeaders() { return ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService().getAPIManagerConfiguration(). getFirstProperty(APIConstants.CORS_CONFIGURATION_ACCESS_CTL_ALLOW_HEADERS); } /** * Used to get access control allowed methods define in api-manager.xml * * @return access control allowed methods string */ public static String getAllowedMethods() { return ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService().getAPIManagerConfiguration(). getFirstProperty(APIConstants.CORS_CONFIGURATION_ACCESS_CTL_ALLOW_METHODS); } /** * Used to get access control allowed credential define in api-manager.xml * * @return true if access control allow credential enabled */ public static boolean isAllowCredentials() { String allowCredentials = ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService().getAPIManagerConfiguration(). getFirstProperty(APIConstants.CORS_CONFIGURATION_ACCESS_CTL_ALLOW_CREDENTIALS); return Boolean.parseBoolean(allowCredentials); } /** * Used to get CORS Configuration enabled from api-manager.xml * * @return true if CORS-Configuration is enabled in api-manager.xml */ public static boolean isCORSEnabled() { String corsEnabled = ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService().getAPIManagerConfiguration(). getFirstProperty(APIConstants.CORS_CONFIGURATION_ENABLED); return Boolean.parseBoolean(corsEnabled); } /** * Used to get access control allowed origins define in api-manager.xml * * @return allow origins list defined in api-manager.xml */ public static String getAllowedOrigins() { return ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService().getAPIManagerConfiguration(). getFirstProperty(APIConstants.CORS_CONFIGURATION_ACCESS_CTL_ALLOW_ORIGIN); } /** * Used to get CORSConfiguration according to the API artifact * * @param artifact registry artifact for the API * @return CORS Configuration object extract from the artifact * @throws GovernanceException if attribute couldn't fetch from the artifact. */ public static CORSConfiguration getCorsConfigurationFromArtifact(GovernanceArtifact artifact) throws GovernanceException { CORSConfiguration corsConfiguration = APIUtil.getCorsConfigurationDtoFromJson( artifact.getAttribute(APIConstants.API_OVERVIEW_CORS_CONFIGURATION)); if (corsConfiguration == null) { corsConfiguration = getDefaultCorsConfiguration(); } return corsConfiguration; } /** * Used to get Default CORS Configuration object according to configuration define in api-manager.xml * * @return CORSConfiguration object accordine to the defined values in api-manager.xml */ public static CORSConfiguration getDefaultCorsConfiguration() { List<String> allowHeadersStringSet = Arrays.asList(getAllowedHeaders().split(",")); List<String> allowMethodsStringSet = Arrays.asList(getAllowedMethods().split(",")); List<String> allowOriginsStringSet = Arrays.asList(getAllowedOrigins().split(",")); return new CORSConfiguration(false, allowOriginsStringSet, false, allowHeadersStringSet, allowMethodsStringSet); } /** * Used to get API name from synapse API Name * * @param api_version API name from synapse configuration * @return api name according to the tenant */ public static String getAPINamefromRESTAPI(String api_version) { int index = api_version.indexOf("--"); String api; if (index != -1) { api_version = api_version.substring(index + 2); } api = api_version.split(":")[0]; index = api.indexOf("--"); if (index != -1) { api = api.substring(index + 2); } return api; } /** * @param stakeHolder value "publisher" for publisher value "subscriber" for subscriber value "admin-dashboard" for admin * Return all alert types. * @return Hashmap of alert types. * @throws APIManagementException */ public static HashMap<Integer, String> getAllAlertTypeByStakeHolder(String stakeHolder) throws APIManagementException { HashMap<Integer, String> map; map = ApiMgtDAO.getInstance().getAllAlertTypesByStakeHolder(stakeHolder); return map; } /** * @param userName user name with tenant domain ex: [email protected] * @param stakeHolder value "p" for publisher value "s" for subscriber value "a" for admin * @return map of saved values of alert types. * @throws APIManagementException */ public static List<Integer> getSavedAlertTypesIdsByUserNameAndStakeHolder(String userName, String stakeHolder) throws APIManagementException { List<Integer> list; list = ApiMgtDAO.getInstance().getSavedAlertTypesIdsByUserNameAndStakeHolder(userName, stakeHolder); return list; } /** * This util method retrieves saved email list by user and stakeHolder name * * @param userName user name with tenant ID. * @param stakeHolder if its publisher values should "p", if it is store value is "s" if admin dashboard value is "a" * @return List of eamil list. * @throws APIManagementException */ public static List<String> retrieveSavedEmailList(String userName, String stakeHolder) throws APIManagementException { List<String> list; list = ApiMgtDAO.getInstance().retrieveSavedEmailList(userName, stakeHolder); return list; } private static boolean isDefaultQuotaPolicyContentAware(Policy policy) { if (PolicyConstants.BANDWIDTH_TYPE.equalsIgnoreCase(policy.getDefaultQuotaPolicy().getType())) { return true; } return false; } public static void addDefaultSuperTenantAdvancedThrottlePolicies() throws APIManagementException { int tenantId = MultitenantConstants.SUPER_TENANT_ID; long[] requestCount = new long[]{50, 20, 10, Integer.MAX_VALUE}; //Adding application level throttle policies String[] appPolicies = new String[]{APIConstants.DEFAULT_APP_POLICY_FIFTY_REQ_PER_MIN, APIConstants.DEFAULT_APP_POLICY_TWENTY_REQ_PER_MIN, APIConstants.DEFAULT_APP_POLICY_TEN_REQ_PER_MIN, APIConstants.DEFAULT_APP_POLICY_UNLIMITED}; String[] appPolicyDecs = new String[]{APIConstants.DEFAULT_APP_POLICY_LARGE_DESC, APIConstants.DEFAULT_APP_POLICY_MEDIUM_DESC, APIConstants.DEFAULT_APP_POLICY_SMALL_DESC, APIConstants.DEFAULT_APP_POLICY_UNLIMITED_DESC}; ApiMgtDAO apiMgtDAO = ApiMgtDAO.getInstance(); String policyName; //Add application level throttle policies for (int i = 0; i < appPolicies.length; i++) { policyName = appPolicies[i]; if (!apiMgtDAO.isPolicyExist(PolicyConstants.POLICY_LEVEL_APP, tenantId, policyName)) { ApplicationPolicy applicationPolicy = new ApplicationPolicy(policyName); applicationPolicy.setDisplayName(policyName); applicationPolicy.setDescription(appPolicyDecs[i]); applicationPolicy.setTenantId(tenantId); applicationPolicy.setDeployed(true); QuotaPolicy defaultQuotaPolicy = new QuotaPolicy(); RequestCountLimit requestCountLimit = new RequestCountLimit(); requestCountLimit.setRequestCount(requestCount[i]); requestCountLimit.setUnitTime(1); requestCountLimit.setTimeUnit(APIConstants.TIME_UNIT_MINUTE); defaultQuotaPolicy.setType(PolicyConstants.REQUEST_COUNT_TYPE); defaultQuotaPolicy.setLimit(requestCountLimit); applicationPolicy.setDefaultQuotaPolicy(defaultQuotaPolicy); apiMgtDAO.addApplicationPolicy(applicationPolicy); } } //Adding Subscription level policies long[] requestCountSubPolicies = new long[]{5000, 2000, 1000, 500, Integer.MAX_VALUE}; String[] subPolicies = new String[]{APIConstants.DEFAULT_SUB_POLICY_GOLD, APIConstants.DEFAULT_SUB_POLICY_SILVER, APIConstants.DEFAULT_SUB_POLICY_BRONZE, APIConstants.DEFAULT_SUB_POLICY_UNAUTHENTICATED, APIConstants.DEFAULT_SUB_POLICY_UNLIMITED}; String[] subPolicyDecs = new String[]{APIConstants.DEFAULT_SUB_POLICY_GOLD_DESC, APIConstants.DEFAULT_SUB_POLICY_SILVER_DESC, APIConstants.DEFAULT_SUB_POLICY_BRONZE_DESC, APIConstants.DEFAULT_SUB_POLICY_UNAUTHENTICATED_DESC, APIConstants.DEFAULT_SUB_POLICY_UNLIMITED_DESC}; for (int i = 0; i < subPolicies.length; i++) { policyName = subPolicies[i]; if (!apiMgtDAO.isPolicyExist(PolicyConstants.POLICY_LEVEL_SUB, tenantId, policyName)) { SubscriptionPolicy subscriptionPolicy = new SubscriptionPolicy(policyName); subscriptionPolicy.setDisplayName(policyName); subscriptionPolicy.setDescription(subPolicyDecs[i]); subscriptionPolicy.setTenantId(tenantId); subscriptionPolicy.setDeployed(true); QuotaPolicy defaultQuotaPolicy = new QuotaPolicy(); RequestCountLimit requestCountLimit = new RequestCountLimit(); requestCountLimit.setRequestCount(requestCountSubPolicies[i]); requestCountLimit.setUnitTime(1); requestCountLimit.setTimeUnit(APIConstants.TIME_UNIT_MINUTE); defaultQuotaPolicy.setType(PolicyConstants.REQUEST_COUNT_TYPE); defaultQuotaPolicy.setLimit(requestCountLimit); subscriptionPolicy.setDefaultQuotaPolicy(defaultQuotaPolicy); subscriptionPolicy.setStopOnQuotaReach(true); subscriptionPolicy.setBillingPlan(APIConstants.BILLING_PLAN_FREE); apiMgtDAO.addSubscriptionPolicy(subscriptionPolicy); } } //Adding Resource level policies String[] apiPolicies = new String[]{APIConstants.DEFAULT_API_POLICY_FIFTY_THOUSAND_REQ_PER_MIN, APIConstants.DEFAULT_API_POLICY_TWENTY_THOUSAND_REQ_PER_MIN, APIConstants.DEFAULT_API_POLICY_TEN_THOUSAND_REQ_PER_MIN, APIConstants.DEFAULT_API_POLICY_UNLIMITED}; String[] apiPolicyDecs = new String[]{APIConstants.DEFAULT_API_POLICY_ULTIMATE_DESC, APIConstants.DEFAULT_API_POLICY_PLUS_DESC, APIConstants.DEFAULT_API_POLICY_BASIC_DESC, APIConstants.DEFAULT_API_POLICY_UNLIMITED_DESC}; long[] requestCountApiPolicies = new long[]{50000, 20000, 10000, Integer.MAX_VALUE}; for (int i = 0; i < apiPolicies.length; i++) { policyName = apiPolicies[i]; if (!apiMgtDAO.isPolicyExist(PolicyConstants.POLICY_LEVEL_API, tenantId, policyName)) { APIPolicy apiPolicy = new APIPolicy(policyName); apiPolicy.setDisplayName(policyName); apiPolicy.setDescription(apiPolicyDecs[i]); apiPolicy.setTenantId(tenantId); apiPolicy.setUserLevel(APIConstants.API_POLICY_API_LEVEL); apiPolicy.setDeployed(true); QuotaPolicy defaultQuotaPolicy = new QuotaPolicy(); RequestCountLimit requestCountLimit = new RequestCountLimit(); requestCountLimit.setRequestCount(requestCountApiPolicies[i]); requestCountLimit.setUnitTime(1); requestCountLimit.setTimeUnit(APIConstants.TIME_UNIT_MINUTE); defaultQuotaPolicy.setType(PolicyConstants.REQUEST_COUNT_TYPE); defaultQuotaPolicy.setLimit(requestCountLimit); apiPolicy.setDefaultQuotaPolicy(defaultQuotaPolicy); apiMgtDAO.addAPIPolicy(apiPolicy); } } } public static void addDefaultTenantAdvancedThrottlePolicies(String tenantDomain, int tenantId) throws APIManagementException { ThrottlePolicyDeploymentManager deploymentManager = ThrottlePolicyDeploymentManager.getInstance(); ThrottlePolicyTemplateBuilder policyBuilder = new ThrottlePolicyTemplateBuilder(); Map<String, Long> defualtLimits = ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService().getAPIManagerConfiguration() .getThrottleProperties().getDefaultThrottleTierLimits(); long tenPerMinTier = defualtLimits.containsKey(APIConstants.DEFAULT_APP_POLICY_TEN_REQ_PER_MIN) ? defualtLimits.get(APIConstants.DEFAULT_APP_POLICY_TEN_REQ_PER_MIN) : 10; long twentyPerMinTier = defualtLimits.containsKey(APIConstants.DEFAULT_APP_POLICY_TWENTY_REQ_PER_MIN) ? defualtLimits.get(APIConstants.DEFAULT_APP_POLICY_TWENTY_REQ_PER_MIN) : 20; long fiftyPerMinTier = defualtLimits.containsKey(APIConstants.DEFAULT_APP_POLICY_FIFTY_REQ_PER_MIN) ? defualtLimits.get(APIConstants.DEFAULT_APP_POLICY_FIFTY_REQ_PER_MIN) : 50; long[] requestCount = new long[]{fiftyPerMinTier, twentyPerMinTier, tenPerMinTier, Integer.MAX_VALUE}; //Adding application level throttle policies String[] appPolicies = new String[]{APIConstants.DEFAULT_APP_POLICY_FIFTY_REQ_PER_MIN, APIConstants.DEFAULT_APP_POLICY_TWENTY_REQ_PER_MIN, APIConstants.DEFAULT_APP_POLICY_TEN_REQ_PER_MIN, APIConstants.DEFAULT_APP_POLICY_UNLIMITED}; String[] appPolicyDecs = new String[]{APIConstants.DEFAULT_APP_POLICY_LARGE_DESC, APIConstants.DEFAULT_APP_POLICY_MEDIUM_DESC, APIConstants.DEFAULT_APP_POLICY_SMALL_DESC, APIConstants.DEFAULT_APP_POLICY_UNLIMITED_DESC}; ApiMgtDAO apiMgtDAO = ApiMgtDAO.getInstance(); String policyName; //Add application level throttle policies for (int i = 0; i < appPolicies.length; i++) { policyName = appPolicies[i]; boolean needDeployment = false; ApplicationPolicy applicationPolicy = new ApplicationPolicy(policyName); applicationPolicy.setDisplayName(policyName); applicationPolicy.setDescription(appPolicyDecs[i]); applicationPolicy.setTenantId(tenantId); applicationPolicy.setDeployed(false); applicationPolicy.setTenantDomain(tenantDomain); QuotaPolicy defaultQuotaPolicy = new QuotaPolicy(); RequestCountLimit requestCountLimit = new RequestCountLimit(); requestCountLimit.setRequestCount(requestCount[i]); requestCountLimit.setUnitTime(1); requestCountLimit.setTimeUnit(APIConstants.TIME_UNIT_MINUTE); defaultQuotaPolicy.setType(PolicyConstants.REQUEST_COUNT_TYPE); defaultQuotaPolicy.setLimit(requestCountLimit); applicationPolicy.setDefaultQuotaPolicy(defaultQuotaPolicy); if (!apiMgtDAO.isPolicyExist(PolicyConstants.POLICY_LEVEL_APP, tenantId, policyName)) { apiMgtDAO.addApplicationPolicy(applicationPolicy); needDeployment = true; } if (!apiMgtDAO.isPolicyDeployed(PolicyConstants.POLICY_LEVEL_APP, tenantId, policyName)) { needDeployment = true; } if (needDeployment) { String policyString; try { policyString = policyBuilder.getThrottlePolicyForAppLevel(applicationPolicy); String policyFile = applicationPolicy.getTenantDomain() + "_" + PolicyConstants.POLICY_LEVEL_APP + "_" + applicationPolicy.getPolicyName(); if(!APIConstants.DEFAULT_APP_POLICY_UNLIMITED.equalsIgnoreCase(policyName)) { deploymentManager.deployPolicyToGlobalCEP(policyString); } apiMgtDAO.setPolicyDeploymentStatus(PolicyConstants.POLICY_LEVEL_APP, applicationPolicy.getPolicyName(), applicationPolicy.getTenantId(), true); } catch (APITemplateException e) { throw new APIManagementException("Error while adding default subscription policy" + applicationPolicy.getPolicyName(), e); } } } long bronzeTierLimit = defualtLimits.containsKey(APIConstants.DEFAULT_SUB_POLICY_BRONZE) ? defualtLimits.get(APIConstants.DEFAULT_SUB_POLICY_BRONZE) : 1000; long silverTierLimit = defualtLimits.containsKey(APIConstants.DEFAULT_SUB_POLICY_SILVER) ? defualtLimits.get(APIConstants.DEFAULT_SUB_POLICY_SILVER) : 2000; long goldTierLimit = defualtLimits.containsKey(APIConstants.DEFAULT_SUB_POLICY_GOLD) ? defualtLimits.get(APIConstants.DEFAULT_SUB_POLICY_GOLD) : 5000; long unauthenticatedTierLimit = defualtLimits.containsKey(APIConstants.DEFAULT_APP_POLICY_FIFTY_REQ_PER_MIN) ? defualtLimits.get(APIConstants.DEFAULT_SUB_POLICY_UNAUTHENTICATED) : 500; //Adding Subscription level policies long[] requestCountSubPolicies = new long[]{goldTierLimit, silverTierLimit, bronzeTierLimit, unauthenticatedTierLimit, Integer.MAX_VALUE}; String[] subPolicies = new String[]{APIConstants.DEFAULT_SUB_POLICY_GOLD, APIConstants.DEFAULT_SUB_POLICY_SILVER, APIConstants.DEFAULT_SUB_POLICY_BRONZE, APIConstants.DEFAULT_SUB_POLICY_UNAUTHENTICATED, APIConstants.DEFAULT_SUB_POLICY_UNLIMITED}; String[] subPolicyDecs = new String[]{APIConstants.DEFAULT_SUB_POLICY_GOLD_DESC, APIConstants.DEFAULT_SUB_POLICY_SILVER_DESC, APIConstants.DEFAULT_SUB_POLICY_BRONZE_DESC, APIConstants.DEFAULT_SUB_POLICY_UNAUTHENTICATED_DESC, APIConstants.DEFAULT_SUB_POLICY_UNLIMITED_DESC}; for (int i = 0; i < subPolicies.length; i++) { policyName = subPolicies[i]; boolean needDeployment = false; SubscriptionPolicy subscriptionPolicy = new SubscriptionPolicy(policyName); subscriptionPolicy.setDisplayName(policyName); subscriptionPolicy.setDescription(subPolicyDecs[i]); subscriptionPolicy.setTenantId(tenantId); subscriptionPolicy.setDeployed(false); subscriptionPolicy.setTenantDomain(tenantDomain); QuotaPolicy defaultQuotaPolicy = new QuotaPolicy(); RequestCountLimit requestCountLimit = new RequestCountLimit(); requestCountLimit.setRequestCount(requestCountSubPolicies[i]); requestCountLimit.setUnitTime(1); requestCountLimit.setTimeUnit(APIConstants.TIME_UNIT_MINUTE); defaultQuotaPolicy.setType(PolicyConstants.REQUEST_COUNT_TYPE); defaultQuotaPolicy.setLimit(requestCountLimit); subscriptionPolicy.setDefaultQuotaPolicy(defaultQuotaPolicy); subscriptionPolicy.setStopOnQuotaReach(true); subscriptionPolicy.setBillingPlan(APIConstants.BILLING_PLAN_FREE); if (!apiMgtDAO.isPolicyExist(PolicyConstants.POLICY_LEVEL_SUB, tenantId, policyName)) { apiMgtDAO.addSubscriptionPolicy(subscriptionPolicy); needDeployment = true; } if (!apiMgtDAO.isPolicyDeployed(PolicyConstants.POLICY_LEVEL_SUB, tenantId, policyName)) { needDeployment = true; } if (needDeployment) { String policyString; try { policyString = policyBuilder.getThrottlePolicyForSubscriptionLevel(subscriptionPolicy); String policyFile = subscriptionPolicy.getTenantDomain() + "_" +PolicyConstants.POLICY_LEVEL_SUB + "_" + subscriptionPolicy.getPolicyName(); if(!APIConstants.DEFAULT_SUB_POLICY_UNLIMITED.equalsIgnoreCase(policyName)) { deploymentManager.deployPolicyToGlobalCEP(policyString); } apiMgtDAO.setPolicyDeploymentStatus(PolicyConstants.POLICY_LEVEL_SUB, subscriptionPolicy.getPolicyName(), subscriptionPolicy.getTenantId(), true); } catch (APITemplateException e) { throw new APIManagementException("Error while adding default application policy " + subscriptionPolicy.getPolicyName(), e); } } } long tenThousandPerMinTier = defualtLimits.containsKey(APIConstants.DEFAULT_API_POLICY_TEN_THOUSAND_REQ_PER_MIN) ? defualtLimits.get(APIConstants.DEFAULT_API_POLICY_TEN_THOUSAND_REQ_PER_MIN) : 10000; long twentyThousandPerMinTier = defualtLimits.containsKey(APIConstants.DEFAULT_API_POLICY_TWENTY_THOUSAND_REQ_PER_MIN) ? defualtLimits.get(APIConstants.DEFAULT_API_POLICY_TWENTY_THOUSAND_REQ_PER_MIN) : 20000; long fiftyThousandPerMinTier = defualtLimits.containsKey(APIConstants.DEFAULT_API_POLICY_FIFTY_THOUSAND_REQ_PER_MIN) ? defualtLimits.get(APIConstants.DEFAULT_API_POLICY_FIFTY_THOUSAND_REQ_PER_MIN) : 50000; long[] requestCountAPIPolicies = new long[]{fiftyThousandPerMinTier, twentyThousandPerMinTier, tenThousandPerMinTier, Integer.MAX_VALUE}; //Adding Resource level policies String[] apiPolicies = new String[]{APIConstants.DEFAULT_API_POLICY_FIFTY_THOUSAND_REQ_PER_MIN, APIConstants.DEFAULT_API_POLICY_TWENTY_THOUSAND_REQ_PER_MIN, APIConstants.DEFAULT_API_POLICY_TEN_THOUSAND_REQ_PER_MIN, APIConstants.DEFAULT_API_POLICY_UNLIMITED}; String[] apiPolicyDecs = new String[]{APIConstants.DEFAULT_API_POLICY_ULTIMATE_DESC, APIConstants.DEFAULT_API_POLICY_PLUS_DESC, APIConstants.DEFAULT_API_POLICY_BASIC_DESC, APIConstants.DEFAULT_API_POLICY_UNLIMITED_DESC}; for (int i = 0; i < apiPolicies.length; i++) { boolean needDeployment = false; policyName = apiPolicies[i]; APIPolicy apiPolicy = new APIPolicy(policyName); apiPolicy.setDisplayName(policyName); apiPolicy.setDescription(apiPolicyDecs[i]); apiPolicy.setTenantId(tenantId); apiPolicy.setUserLevel(APIConstants.API_POLICY_API_LEVEL); apiPolicy.setDeployed(false); apiPolicy.setTenantDomain(tenantDomain); QuotaPolicy defaultQuotaPolicy = new QuotaPolicy(); RequestCountLimit requestCountLimit = new RequestCountLimit(); requestCountLimit.setRequestCount(requestCountAPIPolicies[i]); requestCountLimit.setUnitTime(1); requestCountLimit.setTimeUnit(APIConstants.TIME_UNIT_MINUTE); defaultQuotaPolicy.setType(PolicyConstants.REQUEST_COUNT_TYPE); defaultQuotaPolicy.setLimit(requestCountLimit); apiPolicy.setDefaultQuotaPolicy(defaultQuotaPolicy); if (!apiMgtDAO.isPolicyExist(PolicyConstants.POLICY_LEVEL_API, tenantId, policyName)) { apiMgtDAO.addAPIPolicy(apiPolicy); } if (!apiMgtDAO.isPolicyDeployed(PolicyConstants.POLICY_LEVEL_API, tenantId, policyName)) { needDeployment = true; } if (needDeployment) { String policyString; try { policyString = policyBuilder.getThrottlePolicyForAPILevelDefault(apiPolicy); String policyFile = apiPolicy.getTenantDomain() + "_" +PolicyConstants.POLICY_LEVEL_API + "_" + apiPolicy.getPolicyName() + "_default"; if(!APIConstants.DEFAULT_API_POLICY_UNLIMITED.equalsIgnoreCase(policyName)) { deploymentManager.deployPolicyToGlobalCEP(policyString); } apiMgtDAO.setPolicyDeploymentStatus(PolicyConstants.POLICY_LEVEL_API, apiPolicy.getPolicyName(), apiPolicy.getTenantId(), true); } catch (APITemplateException e) { throw new APIManagementException("Error while adding default api policy " + apiPolicy.getPolicyName(), e); } } } } /** * Used to get advence throttling is enable * * @return condition of advance throttling */ public static boolean isAdvanceThrottlingEnabled() { return ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService().getAPIManagerConfiguration() .getThrottleProperties().isEnabled(); } /** * Used to get unlimited throttling tier is enable * * @return condition of enable unlimited tier */ public static boolean isEnabledUnlimitedTier() { ThrottleProperties throttleProperties = ServiceReferenceHolder.getInstance() .getAPIManagerConfigurationService().getAPIManagerConfiguration() .getThrottleProperties(); if (throttleProperties.isEnabled()) { return throttleProperties.isEnableUnlimitedTier(); } else { APIManagerConfiguration config = ServiceReferenceHolder.getInstance(). getAPIManagerConfigurationService().getAPIManagerConfiguration(); return JavaUtils.isTrueExplicitly(config.getFirstProperty(APIConstants.ENABLE_UNLIMITED_TIER)); } } /** * Used to get subscription Spike arrest Enable * * @return condition of Subscription Spike arrest configuration */ public static boolean isEnabledSubscriptionSpikeArrest() { ThrottleProperties throttleProperties = ServiceReferenceHolder.getInstance() .getAPIManagerConfigurationService().getAPIManagerConfiguration() .getThrottleProperties(); return throttleProperties.isEnabledSubscriptionLevelSpikeArrest(); } public static Map<String, Tier> getTiersFromPolicies(String policyLevel, int tenantId) throws APIManagementException { Map<String, Tier> tierMap = new HashMap<String, Tier>(); ApiMgtDAO apiMgtDAO = ApiMgtDAO.getInstance(); Policy[] policies; if (PolicyConstants.POLICY_LEVEL_SUB.equalsIgnoreCase(policyLevel)) { policies = apiMgtDAO.getSubscriptionPolicies(tenantId); } else if (PolicyConstants.POLICY_LEVEL_API.equalsIgnoreCase(policyLevel)) { policies = apiMgtDAO.getAPIPolicies(tenantId); } else if (PolicyConstants.POLICY_LEVEL_APP.equalsIgnoreCase(policyLevel)) { policies = apiMgtDAO.getApplicationPolicies(tenantId); } else { throw new APIManagementException("No such a policy type : " + policyLevel); } for (Policy policy : policies) { if (!APIConstants.UNLIMITED_TIER.equalsIgnoreCase(policy.getPolicyName())) { Tier tier = new Tier(policy.getPolicyName()); tier.setDescription(policy.getDescription()); tier.setDisplayName(policy.getDisplayName()); Limit limit = policy.getDefaultQuotaPolicy().getLimit(); tier.setTimeUnit(limit.getTimeUnit()); tier.setUnitTime(limit.getUnitTime()); //If the policy is a subscription policy if(policy instanceof SubscriptionPolicy){ SubscriptionPolicy subscriptionPolicy = (SubscriptionPolicy)policy; setBillingPlanAndCustomAttributesToTier(subscriptionPolicy, tier); } if(limit instanceof RequestCountLimit) { RequestCountLimit countLimit = (RequestCountLimit) limit; tier.setRequestsPerMin(countLimit.getRequestCount()); tier.setRequestCount(countLimit.getRequestCount()); } else { BandwidthLimit bandwidthLimit = (BandwidthLimit) limit; tier.setRequestsPerMin(bandwidthLimit.getDataAmount()); tier.setRequestCount(bandwidthLimit.getDataAmount()); } tierMap.put(policy.getPolicyName(), tier); } else { if (APIUtil.isEnabledUnlimitedTier()) { Tier tier = new Tier(policy.getPolicyName()); tier.setDescription(policy.getDescription()); tier.setDisplayName(policy.getDisplayName()); tier.setRequestsPerMin(Integer.MAX_VALUE); tier.setRequestCount(Integer.MAX_VALUE); if (isUnlimitedTierPaid(getTenantDomainFromTenantId(tenantId))) { tier.setTierPlan(APIConstants.COMMERCIAL_TIER_PLAN); } else { tier.setTierPlan(APIConstants.BILLING_PLAN_FREE); } tierMap.put(policy.getPolicyName(), tier); } } } if (PolicyConstants.POLICY_LEVEL_SUB.equalsIgnoreCase(policyLevel)) { tierMap.remove(APIConstants.UNAUTHENTICATED_TIER); } return tierMap; } /** * Extract custom attributes and billing plan from subscription policy and set to tier. * @param subscriptionPolicy - The SubscriptionPolicy object to extract details from * @param tier - The Tier to set information into */ public static void setBillingPlanAndCustomAttributesToTier(SubscriptionPolicy subscriptionPolicy, Tier tier){ //set the billing plan. tier.setTierPlan(subscriptionPolicy.getBillingPlan()); //If the tier has custom attributes if(subscriptionPolicy.getCustomAttributes() != null && subscriptionPolicy.getCustomAttributes().length > 0){ Map<String, Object> tierAttributes = new HashMap<String, Object>(); try { String customAttr = new String(subscriptionPolicy.getCustomAttributes(), "UTF-8"); JSONParser parser = new JSONParser(); JSONArray jsonArr = (JSONArray) parser.parse(customAttr); Iterator jsonArrIterator = jsonArr.iterator(); while(jsonArrIterator.hasNext()){ JSONObject json = (JSONObject)jsonArrIterator.next(); tierAttributes.put(String.valueOf(json.get("name")), json.get("value")); } tier.setTierAttributes(tierAttributes); } catch (ParseException e) { log.error("Unable to convert String to Json", e); tier.setTierAttributes(null); } catch (UnsupportedEncodingException e) { log.error("Custom attribute byte array does not use UTF-8 character set", e); tier.setTierAttributes(null); } } } public static Set<Tier> getAvailableTiers(Map<String, Tier> definedTiers, String tiers, String apiName) { Set<Tier> availableTier = new HashSet<Tier>(); if (tiers != null && !"".equals(tiers)) { String[] tierNames = tiers.split("\\|\\|"); for (String tierName : tierNames) { Tier definedTier = definedTiers.get(tierName); if (definedTier != null) { availableTier.add(definedTier); } else { log.warn("Unknown tier: " + tierName + " found on API: " + apiName); } } } return availableTier; } public static byte[] toByteArray(InputStream is) throws IOException { return IOUtils.toByteArray(is); } public static long ipToLong(String ipAddress) { long result = 0; String[] ipAddressInArray = ipAddress.split("\\."); for (int i = 3; i >= 0; i--) { long ip = Long.parseLong(ipAddressInArray[3 - i]); //left shifting 24,16,8,0 and bitwise OR //1. 192 << 24 //1. 168 << 16 //1. 1 << 8 //1. 2 << 0 result |= ip << (i * 8); } return result; } public String getFullLifeCycleData(Registry registry) throws XMLStreamException, RegistryException { return CommonUtil.getLifecycleConfiguration(APIConstants.API_LIFE_CYCLE, registry); } /** * Composes OR based search criteria from provided array of values * * @param values * @return */ public static String getORBasedSearchCriteria(String[] values) { String criteria = "("; if (values != null) { for (int i = 0; i < values.length; i++) { criteria = criteria + values[i]; if (i != values.length - 1) { criteria = criteria + " OR "; } else { criteria = criteria + ")"; } } return criteria; } return null; } /** * Generates solr compatible search criteria synatax from user entered query criteria. * Ex: From version:1.0.0, this returns version=*1.0.0* * * @param criteria * @return solar compatible criteria * @throws APIManagementException */ public static String getSingleSearchCriteria(String criteria) throws APIManagementException { criteria = criteria.trim(); String searchValue = criteria; String searchKey = APIConstants.NAME_TYPE_PREFIX; if (criteria.contains(":")) { if (criteria.split(":").length > 1) { searchKey = criteria.split(":")[0].trim(); //if search key is 'tag' instead of 'tags', allow it as well since rest api document says query // param to use for tag search is 'tag' if (APIConstants.TAG_SEARCH_TYPE_PREFIX3.equals(searchKey)) { searchKey = APIConstants.TAG_SEARCH_TYPE_PREFIX; } searchValue = criteria.split(":")[1]; if (!APIConstants.DOCUMENTATION_SEARCH_TYPE_PREFIX.equalsIgnoreCase(searchKey) && !APIConstants.TAG_SEARCH_TYPE_PREFIX.equalsIgnoreCase(searchKey)) { if (!searchValue.endsWith("*")) { searchValue = searchValue + "*"; } if (!searchValue.startsWith("*")) { searchValue = "*" + searchValue; } } } else { throw new APIManagementException("Search term is missing. Try again with valid search query."); } } else { if (!searchValue.endsWith("*")) { searchValue = searchValue + "*"; } if (!searchValue.startsWith("*")) { searchValue = "*" + searchValue; } } if (APIConstants.API_PROVIDER.equalsIgnoreCase(searchKey)) { searchValue = searchValue.replaceAll("@", "-AT-"); } return searchKey + "=" + searchValue; } /** * return whether store forum feature is enabled * * @return true or false indicating enable or not */ public static boolean isStoreForumEnabled() { APIManagerConfiguration config = ServiceReferenceHolder.getInstance(). getAPIManagerConfigurationService().getAPIManagerConfiguration(); String forumEnabled = config.getFirstProperty(APIConstants.API_STORE_FORUM_ENABLED); if (forumEnabled == null) { return true; } return Boolean.parseBoolean(forumEnabled); } /** * Returns a secured DocumentBuilderFactory instance * * @return DocumentBuilderFactory */ public static DocumentBuilderFactory getSecuredDocumentBuilder() { org.apache.xerces.impl.Constants Constants = null; DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); dbf.setNamespaceAware(true); dbf.setXIncludeAware(false); dbf.setExpandEntityReferences(false); try { dbf.setFeature(Constants.SAX_FEATURE_PREFIX + Constants.EXTERNAL_GENERAL_ENTITIES_FEATURE, false); dbf.setFeature(Constants.SAX_FEATURE_PREFIX + Constants.EXTERNAL_PARAMETER_ENTITIES_FEATURE, false); dbf.setFeature(Constants.XERCES_FEATURE_PREFIX + Constants.LOAD_EXTERNAL_DTD_FEATURE, false); } catch (ParserConfigurationException e) { log.error( "Failed to load XML Processor Feature " + Constants.EXTERNAL_GENERAL_ENTITIES_FEATURE + " or " + Constants.EXTERNAL_PARAMETER_ENTITIES_FEATURE + " or " + Constants.LOAD_EXTERNAL_DTD_FEATURE); } SecurityManager securityManager = new SecurityManager(); securityManager.setEntityExpansionLimit(ENTITY_EXPANSION_LIMIT); dbf.setAttribute(Constants.XERCES_PROPERTY_PREFIX + Constants.SECURITY_MANAGER_PROPERTY, securityManager); return dbf; } /** * Logs an audit message on actions performed on entities (APIs, Applications, etc). The log is printed in the * following JSON format * { * "typ": "API", * "action": "update", * "performedBy": "[email protected]", * "info": { * "name": "Twitter", * "context": "/twitter", * "version": "1.0.0", * "provider": "nuwan" * } * } * * @param entityType - The entity type. Ex: API, Application * @param entityInfo - The details of the entity. Ex: API Name, Context * @param action - The type of action performed. Ex: Create, Update * @param performedBy - The user who performs the action. */ public static void logAuditMessage(String entityType, String entityInfo, String action, String performedBy) { JSONObject jsonObject = new JSONObject(); jsonObject.put("typ", entityType); jsonObject.put("action", action); jsonObject.put("performedBy", performedBy); jsonObject.put("info", entityInfo); audit.info(jsonObject.toString()); } public static int getPortOffset() { ServerConfiguration carbonConfig = ServerConfiguration.getInstance(); String portOffset = System.getProperty(APIConstants.PORT_OFFSET_SYSTEM_VAR, carbonConfig.getFirstProperty(APIConstants.PORT_OFFSET_CONFIG)); try { if ((portOffset != null)) { return Integer.parseInt(portOffset.trim()); } else { return 0; } } catch (NumberFormatException e) { log.error("Invalid Port Offset: " + portOffset + ". Default value 0 will be used.", e); return 0; } } public static boolean isQueryParamDataPublishingEnabled() { return ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService().getAPIManagerConfiguration(). getThrottleProperties().isEnableQueryParamConditions(); } public static boolean isHeaderDataPublishingEnabled() { return ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService().getAPIManagerConfiguration(). getThrottleProperties().isEnableHeaderConditions(); } public static boolean isJwtTokenPublishingEnabled() { return ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService().getAPIManagerConfiguration(). getThrottleProperties().isEnableJwtConditions(); } public static String getAnalyticsServerURL() { return ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService().getAPIAnalyticsConfiguration(). getDasServerUrl(); } public static String getAnalyticsServerUserName() { return ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService().getAPIAnalyticsConfiguration(). getDasReceiverServerUser(); } public static String getAnalyticsServerPassword() { return ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService().getAPIAnalyticsConfiguration(). getDasReceiverServerPassword(); } /** * Create the Cache object from the given parameters * @param cacheManagerName - Name of the Cache Manager * @param cacheName - Name of the Cache * @param modifiedExp - Value of the MODIFIED Expiry Type * @param accessExp - Value of the ACCESSED Expiry Type * @return - The cache object */ public static Cache getCache(final String cacheManagerName, final String cacheName, final long modifiedExp, final long accessExp){ return Caching.getCacheManager( cacheManagerName).createCacheBuilder(cacheName). setExpiry(CacheConfiguration.ExpiryType.MODIFIED, new CacheConfiguration.Duration(TimeUnit.SECONDS, modifiedExp)). setExpiry(CacheConfiguration.ExpiryType.ACCESSED, new CacheConfiguration.Duration(TimeUnit.SECONDS, accessExp)).setStoreByValue(false).build(); } /** * This method is used to get the actual endpoint password of an API from the hidden property * in the case where the handler APIEndpointPasswordRegistryHandler is enabled in registry.xml * * @param api The API * @param registry The registry object * @return The actual password of the endpoint if exists * @throws RegistryException Throws if the api resource doesn't exist */ private static String getActualEpPswdFromHiddenProperty(API api, Registry registry) throws RegistryException { String apiPath = APIUtil.getAPIPath(api.getId()); Resource apiResource = registry.get(apiPath); return apiResource.getProperty(APIConstants.REGISTRY_HIDDEN_ENDPOINT_PROPERTY); } /** * To check whether given role exist in the array of roles. * * @param userRoleList Role list to check against. * @param accessControlRole Access Control Role. * @return true if the Array contains the role specified. */ public static boolean compareRoleList(String[] userRoleList, String accessControlRole) { if (userRoleList != null) { for (String userRole : userRoleList) { if (userRole.equalsIgnoreCase(accessControlRole)) { return true; } } } return false; } /** * To clear the publisherRoleCache for certain users. * * @param userName Names of the user. */ public static void clearRoleCache(String userName) { if (isPublisherRoleCacheEnabled) { Caching.getCacheManager(APIConstants.API_MANAGER_CACHE_MANAGER).getCache(APIConstants .API_PUBLISHER_ADMIN_PERMISSION_CACHE).remove(userName); Caching.getCacheManager(APIConstants.API_MANAGER_CACHE_MANAGER).getCache(APIConstants .API_PUBLISHER_USER_ROLE_CACHE).remove(userName); } } /** * Used to reconstruct the input search query as sub context and doc content doesn't support AND search * * @param query Input search query * @return Reconstructed new search query * @throws APIManagementException If there is an error in the search query */ public static String constructNewSearchQuery(String query) throws APIManagementException { String newSearchQuery = ""; String inputSearchQuery = query.trim(); // sub context and doc content doesn't support AND search if (inputSearchQuery != null && inputSearchQuery.contains(" ")) { if (inputSearchQuery.split(" ").length > 1) { String[] searchCriterias = inputSearchQuery.split(" "); for (int i = 0; i < searchCriterias.length; i++) { if (searchCriterias[i].contains(":") && searchCriterias[i].split(":").length > 1) { if (APIConstants.DOCUMENTATION_SEARCH_TYPE_PREFIX .equalsIgnoreCase(searchCriterias[i].split(":")[0]) || APIConstants.SUBCONTEXT_SEARCH_TYPE_PREFIX .equalsIgnoreCase(searchCriterias[i].split(":")[0])) { throw new APIManagementException("Invalid query. AND based search is not supported for " + "doc and subcontext prefixes"); } } if (i == 0) { newSearchQuery = APIUtil.getSingleSearchCriteria(searchCriterias[i]); } else { newSearchQuery = newSearchQuery + APIConstants.SEARCH_AND_TAG + APIUtil .getSingleSearchCriteria(searchCriterias[i]); } } } } else { newSearchQuery = APIUtil.getSingleSearchCriteria(inputSearchQuery); } return newSearchQuery; } }
components/apimgt/org.wso2.carbon.apimgt.impl/src/main/java/org/wso2/carbon/apimgt/impl/utils/APIUtil.java
/* * Copyright (c) 2005-2011, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.apimgt.impl.utils; import com.google.gson.Gson; import org.apache.axiom.om.OMElement; import org.apache.axiom.om.impl.builder.StAXOMBuilder; import org.apache.axiom.om.util.AXIOMUtil; import org.apache.axis2.AxisFault; import org.apache.axis2.Constants; import org.apache.axis2.client.Options; import org.apache.axis2.client.ServiceClient; import org.apache.axis2.context.ConfigurationContext; import org.apache.axis2.engine.AxisConfiguration; import org.apache.axis2.transport.http.HTTPConstants; import org.apache.axis2.util.JavaUtils; import org.apache.commons.codec.binary.Base64; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.http.HttpHeaders; import org.apache.http.client.HttpClient; import org.apache.http.conn.scheme.PlainSocketFactory; import org.apache.http.conn.scheme.Scheme; import org.apache.http.conn.scheme.SchemeRegistry; import org.apache.http.conn.ssl.SSLSocketFactory; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.impl.conn.tsccm.ThreadSafeClientConnManager; import org.apache.http.params.BasicHttpParams; import org.apache.http.params.HttpParams; import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocumentList; import org.apache.xerces.util.SecurityManager; import org.json.simple.JSONArray; import org.json.simple.JSONObject; import org.json.simple.parser.JSONParser; import org.json.simple.parser.ParseException; import org.w3c.dom.Document; import org.wso2.carbon.CarbonConstants; import org.wso2.carbon.apimgt.api.APIManagementException; import org.wso2.carbon.apimgt.api.doc.model.APIDefinition; import org.wso2.carbon.apimgt.api.doc.model.APIResource; import org.wso2.carbon.apimgt.api.doc.model.Operation; import org.wso2.carbon.apimgt.api.doc.model.Parameter; import org.wso2.carbon.apimgt.api.model.API; import org.wso2.carbon.apimgt.api.model.APIIdentifier; import org.wso2.carbon.apimgt.api.model.APIPublisher; import org.wso2.carbon.apimgt.api.model.APIStatus; import org.wso2.carbon.apimgt.api.model.APIStore; import org.wso2.carbon.apimgt.api.model.Application; import org.wso2.carbon.apimgt.api.model.CORSConfiguration; import org.wso2.carbon.apimgt.api.model.Documentation; import org.wso2.carbon.apimgt.api.model.DocumentationType; import org.wso2.carbon.apimgt.api.model.KeyManagerConfiguration; import org.wso2.carbon.apimgt.api.model.Provider; import org.wso2.carbon.apimgt.api.model.Scope; import org.wso2.carbon.apimgt.api.model.Tier; import org.wso2.carbon.apimgt.api.model.URITemplate; import org.wso2.carbon.apimgt.api.model.policy.APIPolicy; import org.wso2.carbon.apimgt.api.model.policy.ApplicationPolicy; import org.wso2.carbon.apimgt.api.model.policy.BandwidthLimit; import org.wso2.carbon.apimgt.api.model.policy.Limit; import org.wso2.carbon.apimgt.api.model.policy.Policy; import org.wso2.carbon.apimgt.api.model.policy.PolicyConstants; import org.wso2.carbon.apimgt.api.model.policy.QuotaPolicy; import org.wso2.carbon.apimgt.api.model.policy.RequestCountLimit; import org.wso2.carbon.apimgt.api.model.policy.SubscriptionPolicy; import org.wso2.carbon.apimgt.impl.APIConstants; import org.wso2.carbon.apimgt.impl.APIMRegistryServiceImpl; import org.wso2.carbon.apimgt.impl.APIManagerAnalyticsConfiguration; import org.wso2.carbon.apimgt.impl.APIManagerConfiguration; import org.wso2.carbon.apimgt.impl.ThrottlePolicyDeploymentManager; import org.wso2.carbon.apimgt.impl.clients.ApplicationManagementServiceClient; import org.wso2.carbon.apimgt.impl.clients.OAuthAdminClient; import org.wso2.carbon.apimgt.impl.clients.UserInformationRecoveryClient; import org.wso2.carbon.apimgt.impl.dao.ApiMgtDAO; import org.wso2.carbon.apimgt.impl.dto.APIKeyValidationInfoDTO; import org.wso2.carbon.apimgt.impl.dto.Environment; import org.wso2.carbon.apimgt.impl.dto.ThrottleProperties; import org.wso2.carbon.apimgt.impl.factory.KeyManagerHolder; import org.wso2.carbon.apimgt.impl.internal.APIManagerComponent; import org.wso2.carbon.apimgt.impl.internal.ServiceReferenceHolder; import org.wso2.carbon.apimgt.impl.template.APITemplateException; import org.wso2.carbon.apimgt.impl.template.ThrottlePolicyTemplateBuilder; import org.wso2.carbon.apimgt.keymgt.client.SubscriberKeyMgtClient; import org.wso2.carbon.base.MultitenantConstants; import org.wso2.carbon.base.ServerConfiguration; import org.wso2.carbon.context.CarbonContext; import org.wso2.carbon.context.PrivilegedCarbonContext; import org.wso2.carbon.core.commons.stub.loggeduserinfo.ExceptionException; import org.wso2.carbon.core.commons.stub.loggeduserinfo.LoggedUserInfo; import org.wso2.carbon.core.commons.stub.loggeduserinfo.LoggedUserInfoAdminStub; import org.wso2.carbon.core.multitenancy.utils.TenantAxisUtils; import org.wso2.carbon.core.util.CryptoException; import org.wso2.carbon.core.util.CryptoUtil; import org.wso2.carbon.core.util.PermissionUpdateUtil; import org.wso2.carbon.governance.api.common.dataobjects.GovernanceArtifact; import org.wso2.carbon.governance.api.endpoints.EndpointManager; import org.wso2.carbon.governance.api.endpoints.dataobjects.Endpoint; import org.wso2.carbon.governance.api.exception.GovernanceException; import org.wso2.carbon.governance.api.generic.GenericArtifactManager; import org.wso2.carbon.governance.api.generic.dataobjects.GenericArtifact; import org.wso2.carbon.governance.api.util.GovernanceConstants; import org.wso2.carbon.governance.api.util.GovernanceUtils; import org.wso2.carbon.governance.lcm.util.CommonUtil; import org.wso2.carbon.identity.oauth.config.OAuthServerConfiguration; import org.wso2.carbon.identity.user.profile.stub.UserProfileMgtServiceStub; import org.wso2.carbon.identity.user.profile.stub.UserProfileMgtServiceUserProfileExceptionException; import org.wso2.carbon.identity.user.profile.stub.types.UserProfileDTO; import org.wso2.carbon.registry.core.ActionConstants; import org.wso2.carbon.registry.core.Association; import org.wso2.carbon.registry.core.Registry; import org.wso2.carbon.registry.core.RegistryConstants; import org.wso2.carbon.registry.core.Resource; import org.wso2.carbon.registry.core.Tag; import org.wso2.carbon.registry.core.config.Mount; import org.wso2.carbon.registry.core.config.RegistryContext; import org.wso2.carbon.registry.core.exceptions.RegistryException; import org.wso2.carbon.registry.core.jdbc.realm.RegistryAuthorizationManager; import org.wso2.carbon.registry.core.pagination.PaginationContext; import org.wso2.carbon.registry.core.service.RegistryService; import org.wso2.carbon.registry.core.service.TenantRegistryLoader; import org.wso2.carbon.registry.core.session.UserRegistry; import org.wso2.carbon.registry.core.utils.RegistryUtils; import org.wso2.carbon.registry.indexing.indexer.IndexerException; import org.wso2.carbon.registry.indexing.solr.SolrClient; import org.wso2.carbon.user.api.Permission; import org.wso2.carbon.user.api.RealmConfiguration; import org.wso2.carbon.user.api.Tenant; import org.wso2.carbon.user.api.UserStoreException; import org.wso2.carbon.user.api.UserStoreManager; import org.wso2.carbon.user.core.UserCoreConstants; import org.wso2.carbon.user.core.UserRealm; import org.wso2.carbon.user.core.config.RealmConfigXMLProcessor; import org.wso2.carbon.user.core.service.RealmService; import org.wso2.carbon.user.mgt.UserMgtConstants; import org.wso2.carbon.utils.CarbonUtils; import org.wso2.carbon.utils.ConfigurationContextService; import org.wso2.carbon.utils.FileUtil; import org.wso2.carbon.utils.NetworkUtils; import org.wso2.carbon.utils.multitenancy.MultitenantUtils; import org.xml.sax.SAXException; import java.io.File; import java.io.FileInputStream; import java.io.FilenameFilter; import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.math.BigDecimal; import java.math.RoundingMode; import java.net.Inet4Address; import java.net.InetAddress; import java.net.MalformedURLException; import java.net.NetworkInterface; import java.net.SocketException; import java.net.URL; import java.nio.charset.Charset; import java.rmi.RemoteException; import java.security.KeyManagementException; import java.security.KeyStore; import java.security.KeyStoreException; import java.security.NoSuchAlgorithmException; import java.security.UnrecoverableKeyException; import java.security.cert.CertificateException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.SortedSet; import java.util.TreeMap; import java.util.TreeSet; import java.util.concurrent.TimeUnit; import javax.cache.Cache; import javax.cache.CacheConfiguration; import javax.cache.CacheManager; import javax.cache.Caching; import javax.xml.XMLConstants; import javax.xml.namespace.QName; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; /** * This class contains the utility methods used by the implementations of APIManager, APIProvider * and APIConsumer interfaces. */ public final class APIUtil { private static final Log log = LogFactory.getLog(APIUtil.class); private static final Log audit = CarbonConstants.AUDIT_LOG; private static boolean isContextCacheInitialized = false; public static final String DISABLE_ROLE_VALIDATION_AT_SCOPE_CREATION = "disableRoleValidationAtScopeCreation"; private static final int ENTITY_EXPANSION_LIMIT = 0; private static final String DESCRIPTION = "Allows [1] request(s) per minute."; private static final int DEFAULT_TENANT_IDLE_MINS = 30; private static long tenantIdleTimeMillis; private static Set<String> currentLoadingTenants = new HashSet<String>(); private static volatile Set<String> whiteListedScopes; private static boolean isPublisherRoleCacheEnabled = true; //Need tenantIdleTime to check whether the tenant is in idle state in loadTenantConfig method static { tenantIdleTimeMillis = Long.parseLong(System.getProperty( org.wso2.carbon.utils.multitenancy.MultitenantConstants.TENANT_IDLE_TIME, String.valueOf(DEFAULT_TENANT_IDLE_MINS))) * 60 * 1000; } private static String hostAddress = null; /** * To initialize the publisherRoleCache configurations, based on configurations. */ public static void init() { APIManagerConfiguration apiManagerConfiguration = ServiceReferenceHolder.getInstance() .getAPIManagerConfigurationService().getAPIManagerConfiguration(); String isPublisherRoleCacheEnabledConfiguration = apiManagerConfiguration .getFirstProperty(APIConstants.PUBLISHER_ROLE_CACHE_ENABLED); isPublisherRoleCacheEnabled = isPublisherRoleCacheEnabledConfiguration == null || Boolean .parseBoolean(isPublisherRoleCacheEnabledConfiguration); } /** * This method used to get API from governance artifact * * @param artifact API artifact * @param registry Registry * @return API * @throws APIManagementException if failed to get API from artifact */ public static API getAPI(GovernanceArtifact artifact, Registry registry) throws APIManagementException { API api; try { String providerName = artifact.getAttribute(APIConstants.API_OVERVIEW_PROVIDER); String apiName = artifact.getAttribute(APIConstants.API_OVERVIEW_NAME); String apiVersion = artifact.getAttribute(APIConstants.API_OVERVIEW_VERSION); APIIdentifier apiIdentifier = new APIIdentifier(providerName, apiName, apiVersion); int apiId = ApiMgtDAO.getInstance().getAPIID(apiIdentifier, null); if (apiId == -1) { return null; } api = new API(apiIdentifier); // set rating String artifactPath = GovernanceUtils.getArtifactPath(registry, artifact.getId()); api.setRating(getAverageRating(apiId)); //set description api.setDescription(artifact.getAttribute(APIConstants.API_OVERVIEW_DESCRIPTION)); //set last access time api.setLastUpdated(registry.get(artifactPath).getLastModified()); //set uuid api.setUUID(artifact.getId()); // set url api.setStatus(getApiStatus(artifact.getAttribute(APIConstants.API_OVERVIEW_STATUS))); api.setThumbnailUrl(artifact.getAttribute(APIConstants.API_OVERVIEW_THUMBNAIL_URL)); api.setWsdlUrl(artifact.getAttribute(APIConstants.API_OVERVIEW_WSDL)); api.setWadlUrl(artifact.getAttribute(APIConstants.API_OVERVIEW_WADL)); api.setTechnicalOwner(artifact.getAttribute(APIConstants.API_OVERVIEW_TEC_OWNER)); api.setTechnicalOwnerEmail(artifact.getAttribute(APIConstants.API_OVERVIEW_TEC_OWNER_EMAIL)); api.setBusinessOwner(artifact.getAttribute(APIConstants.API_OVERVIEW_BUSS_OWNER)); api.setBusinessOwnerEmail(artifact.getAttribute(APIConstants.API_OVERVIEW_BUSS_OWNER_EMAIL)); api.setVisibility(artifact.getAttribute(APIConstants.API_OVERVIEW_VISIBILITY)); api.setVisibleRoles(artifact.getAttribute(APIConstants.API_OVERVIEW_VISIBLE_ROLES)); api.setVisibleTenants(artifact.getAttribute(APIConstants.API_OVERVIEW_VISIBLE_TENANTS)); api.setEndpointSecured(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_SECURED))); api.setEndpointAuthDigest(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_AUTH_DIGEST))); api.setEndpointUTUsername(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_USERNAME)); if (!((APIConstants.DEFAULT_MODIFIED_ENDPOINT_PASSWORD) .equals(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_PASSWORD)))) { api.setEndpointUTPassword(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_PASSWORD)); } else { //If APIEndpointPasswordRegistryHandler is enabled take password from the registry hidden property api.setEndpointUTPassword(getActualEpPswdFromHiddenProperty(api, registry)); } api.setTransports(artifact.getAttribute(APIConstants.API_OVERVIEW_TRANSPORTS)); api.setInSequence(artifact.getAttribute(APIConstants.API_OVERVIEW_INSEQUENCE)); api.setOutSequence(artifact.getAttribute(APIConstants.API_OVERVIEW_OUTSEQUENCE)); api.setFaultSequence(artifact.getAttribute(APIConstants.API_OVERVIEW_FAULTSEQUENCE)); api.setResponseCache(artifact.getAttribute(APIConstants.API_OVERVIEW_RESPONSE_CACHING)); api.setImplementation(artifact.getAttribute(APIConstants.PROTOTYPE_OVERVIEW_IMPLEMENTATION)); api.setProductionMaxTps(artifact.getAttribute(APIConstants.API_PRODUCTION_THROTTLE_MAXTPS)); int cacheTimeout = APIConstants.API_RESPONSE_CACHE_TIMEOUT; try { cacheTimeout = Integer.parseInt(artifact.getAttribute(APIConstants.API_OVERVIEW_CACHE_TIMEOUT)); } catch (NumberFormatException e) { //ignore } api.setCacheTimeout(cacheTimeout); api.setEndpointConfig(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_CONFIG)); api.setRedirectURL(artifact.getAttribute(APIConstants.API_OVERVIEW_REDIRECT_URL)); api.setApiOwner(artifact.getAttribute(APIConstants.API_OVERVIEW_OWNER)); api.setAdvertiseOnly(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_ADVERTISE_ONLY))); api.setSubscriptionAvailability(artifact.getAttribute(APIConstants.API_OVERVIEW_SUBSCRIPTION_AVAILABILITY)); api.setSubscriptionAvailableTenants(artifact.getAttribute(APIConstants.API_OVERVIEW_SUBSCRIPTION_AVAILABLE_TENANTS)); String tenantDomainName = MultitenantUtils.getTenantDomain(replaceEmailDomainBack(providerName)); int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(tenantDomainName); boolean isGlobalThrottlingEnabled = APIUtil.isAdvanceThrottlingEnabled(); if (isGlobalThrottlingEnabled) { String apiLevelTier = ApiMgtDAO.getInstance().getAPILevelTier(apiId); api.setApiLevelPolicy(apiLevelTier); } String tiers = artifact.getAttribute(APIConstants.API_OVERVIEW_TIER); Map<String, Tier> definedTiers = getTiers(tenantId); Set<Tier> availableTier = getAvailableTiers(definedTiers, tiers, apiName); api.addAvailableTiers(availableTier); api.setMonetizationCategory(getAPIMonetizationCategory(availableTier, tenantDomainName)); api.setContext(artifact.getAttribute(APIConstants.API_OVERVIEW_CONTEXT)); // We set the context template here api.setContextTemplate(artifact.getAttribute(APIConstants.API_OVERVIEW_CONTEXT_TEMPLATE)); api.setLatest(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_IS_LATEST))); Set<URITemplate> uriTemplates = new LinkedHashSet<URITemplate>(); List<String> uriTemplateNames = new ArrayList<String>(); Set<Scope> scopes = ApiMgtDAO.getInstance().getAPIScopes(api.getId()); api.setScopes(scopes); HashMap<String, String> urlPatternsSet; urlPatternsSet = ApiMgtDAO.getInstance().getURITemplatesPerAPIAsString(api.getId()); HashMap<String, String> resourceScopesMap; resourceScopesMap = ApiMgtDAO.getInstance().getResourceToScopeMapping(api.getId()); Set<String> urlPatternsKeySet = urlPatternsSet.keySet(); String resourceScopeKey; for (String urlPattern : urlPatternsKeySet) { URITemplate uriTemplate = new URITemplate(); String[] urlPatternComponents = urlPattern.split("::"); String uTemplate = (urlPatternComponents.length >= 1) ? urlPatternComponents[0] : null; String method = (urlPatternComponents.length >= 2) ? urlPatternComponents[1] : null; String authType = (urlPatternComponents.length >= 3) ? urlPatternComponents[2] : null; String throttlingTier = (urlPatternComponents.length >= 4) ? urlPatternComponents[3] : null; String mediationScript = (urlPatternComponents.length >= 5) ? urlPatternComponents[4] : null; uriTemplate.setHTTPVerb(method); uriTemplate.setAuthType(authType); uriTemplate.setThrottlingTier(throttlingTier); uriTemplate.setHttpVerbs(method); uriTemplate.setAuthTypes(authType); uriTemplate.setUriTemplate(uTemplate); uriTemplate.setResourceURI(api.getUrl()); uriTemplate.setResourceSandboxURI(api.getSandboxUrl()); uriTemplate.setThrottlingTiers(throttlingTier); uriTemplate.setMediationScript(mediationScript); resourceScopeKey = APIUtil.getResourceKey(api.getContext(), apiVersion, uTemplate, method); uriTemplate.setScopes(findScopeByKey(scopes, resourceScopesMap.get(resourceScopeKey))); //Checking for duplicate uri template names if (uriTemplateNames.contains(uTemplate)) { for (URITemplate tmp : uriTemplates) { if (uTemplate.equals(tmp.getUriTemplate())) { tmp.setHttpVerbs(method); tmp.setAuthTypes(authType); tmp.setThrottlingTiers(throttlingTier); resourceScopeKey = APIUtil.getResourceKey(api.getContext(), apiVersion, uTemplate, method); tmp.setScopes(findScopeByKey(scopes, resourceScopesMap.get(resourceScopeKey))); break; } } } else { uriTemplates.add(uriTemplate); } uriTemplateNames.add(uTemplate); } api.setUriTemplates(uriTemplates); api.setAsDefaultVersion(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_IS_DEFAULT_VERSION))); Set<String> tags = new HashSet<String>(); Tag[] tag = registry.getTags(artifactPath); for (Tag tag1 : tag) { tags.add(tag1.getTagName()); } api.addTags(tags); api.setLastUpdated(registry.get(artifactPath).getLastModified()); api.setImplementation(artifact.getAttribute(APIConstants.PROTOTYPE_OVERVIEW_IMPLEMENTATION)); String environments = artifact.getAttribute(APIConstants.API_OVERVIEW_ENVIRONMENTS); api.setEnvironments(extractEnvironmentsForAPI(environments)); api.setCorsConfiguration(getCorsConfigurationFromArtifact(artifact)); } catch (GovernanceException e) { String msg = "Failed to get API for artifact "; throw new APIManagementException(msg, e); } catch (RegistryException e) { String msg = "Failed to get LastAccess time or Rating"; throw new APIManagementException(msg, e); } catch (UserStoreException e) { String msg = "Failed to get User Realm of API Provider"; throw new APIManagementException(msg, e); } return api; } /** * This Method is different from getAPI method, as this one returns * URLTemplates without aggregating duplicates. This is to be used for building synapse config. * * @param artifact * @param registry * @return API * @throws org.wso2.carbon.apimgt.api.APIManagementException */ public static API getAPIForPublishing(GovernanceArtifact artifact, Registry registry) throws APIManagementException { API api; try { String providerName = artifact.getAttribute(APIConstants.API_OVERVIEW_PROVIDER); String apiName = artifact.getAttribute(APIConstants.API_OVERVIEW_NAME); String apiVersion = artifact.getAttribute(APIConstants.API_OVERVIEW_VERSION); APIIdentifier apiIdentifier = new APIIdentifier(providerName, apiName, apiVersion); int apiId = ApiMgtDAO.getInstance().getAPIID(apiIdentifier, null); if (apiId == -1) { return null; } api = new API(apiIdentifier); //set uuid api.setUUID(artifact.getId()); // set rating String artifactPath = GovernanceUtils.getArtifactPath(registry, artifact.getId()); Resource apiResource = registry.get(artifactPath); api.setAccessControl(apiResource.getProperty(APIConstants.ACCESS_CONTROL)); api.setAccessControlRoles( APIConstants.NULL_USER_ROLE_LIST.equals(apiResource.getProperty(APIConstants.PUBLISHER_ROLES)) ? null : apiResource.getProperty(APIConstants.PUBLISHER_ROLES)); api.setRating(getAverageRating(apiId)); //set description api.setDescription(artifact.getAttribute(APIConstants.API_OVERVIEW_DESCRIPTION)); //set last access time api.setLastUpdated(registry.get(artifactPath).getLastModified()); // set url api.setStatus(getApiStatus(artifact.getAttribute(APIConstants.API_OVERVIEW_STATUS))); api.setThumbnailUrl(artifact.getAttribute(APIConstants.API_OVERVIEW_THUMBNAIL_URL)); api.setWsdlUrl(artifact.getAttribute(APIConstants.API_OVERVIEW_WSDL)); api.setWadlUrl(artifact.getAttribute(APIConstants.API_OVERVIEW_WADL)); api.setTechnicalOwner(artifact.getAttribute(APIConstants.API_OVERVIEW_TEC_OWNER)); api.setTechnicalOwnerEmail(artifact.getAttribute(APIConstants.API_OVERVIEW_TEC_OWNER_EMAIL)); api.setBusinessOwner(artifact.getAttribute(APIConstants.API_OVERVIEW_BUSS_OWNER)); api.setBusinessOwnerEmail(artifact.getAttribute(APIConstants.API_OVERVIEW_BUSS_OWNER_EMAIL)); api.setVisibility(artifact.getAttribute(APIConstants.API_OVERVIEW_VISIBILITY)); api.setVisibleRoles(artifact.getAttribute(APIConstants.API_OVERVIEW_VISIBLE_ROLES)); api.setVisibleTenants(artifact.getAttribute(APIConstants.API_OVERVIEW_VISIBLE_TENANTS)); api.setEndpointSecured(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_SECURED))); api.setEndpointAuthDigest(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_AUTH_DIGEST))); api.setEndpointUTUsername(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_USERNAME)); if (!((APIConstants.DEFAULT_MODIFIED_ENDPOINT_PASSWORD) .equals(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_PASSWORD)))) { api.setEndpointUTPassword(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_PASSWORD)); } else { //If APIEndpointPasswordRegistryHandler is enabled take password from the registry hidden property api.setEndpointUTPassword(getActualEpPswdFromHiddenProperty(api, registry)); } api.setTransports(artifact.getAttribute(APIConstants.API_OVERVIEW_TRANSPORTS)); api.setInSequence(artifact.getAttribute(APIConstants.API_OVERVIEW_INSEQUENCE)); api.setOutSequence(artifact.getAttribute(APIConstants.API_OVERVIEW_OUTSEQUENCE)); api.setFaultSequence(artifact.getAttribute(APIConstants.API_OVERVIEW_FAULTSEQUENCE)); api.setResponseCache(artifact.getAttribute(APIConstants.API_OVERVIEW_RESPONSE_CACHING)); api.setImplementation(artifact.getAttribute(APIConstants.PROTOTYPE_OVERVIEW_IMPLEMENTATION)); api.setType(artifact.getAttribute(APIConstants.API_OVERVIEW_TYPE)); api.setProductionMaxTps(artifact.getAttribute(APIConstants.API_PRODUCTION_THROTTLE_MAXTPS)); api.setSandboxMaxTps(artifact.getAttribute(APIConstants.API_SANDBOX_THROTTLE_MAXTPS)); int cacheTimeout = APIConstants.API_RESPONSE_CACHE_TIMEOUT; try { String strCacheTimeout = artifact.getAttribute(APIConstants.API_OVERVIEW_CACHE_TIMEOUT); if (strCacheTimeout != null && !strCacheTimeout.isEmpty()) { cacheTimeout = Integer.parseInt(strCacheTimeout); } } catch (NumberFormatException e) { if (log.isWarnEnabled()) { log.warn("Error while retrieving cache timeout from the registry for " + apiIdentifier); } // ignore the exception and use default cache timeout value } api.setCacheTimeout(cacheTimeout); api.setEndpointConfig(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_CONFIG)); api.setRedirectURL(artifact.getAttribute(APIConstants.API_OVERVIEW_REDIRECT_URL)); api.setApiOwner(artifact.getAttribute(APIConstants.API_OVERVIEW_OWNER)); api.setAdvertiseOnly(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_ADVERTISE_ONLY))); api.setType(artifact.getAttribute(APIConstants.API_OVERVIEW_TYPE)); api.setSubscriptionAvailability(artifact.getAttribute(APIConstants.API_OVERVIEW_SUBSCRIPTION_AVAILABILITY)); api.setSubscriptionAvailableTenants(artifact.getAttribute(APIConstants.API_OVERVIEW_SUBSCRIPTION_AVAILABLE_TENANTS)); String tenantDomainName = MultitenantUtils.getTenantDomain(replaceEmailDomainBack(providerName)); int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(tenantDomainName); APIManagerConfiguration config = ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService() .getAPIManagerConfiguration(); boolean isGlobalThrottlingEnabled = APIUtil.isAdvanceThrottlingEnabled(); if (isGlobalThrottlingEnabled) { String apiLevelTier = ApiMgtDAO.getInstance().getAPILevelTier(apiId); api.setApiLevelPolicy(apiLevelTier); } String tiers = artifact.getAttribute(APIConstants.API_OVERVIEW_TIER); Map<String, Tier> definedTiers = getTiers(tenantId); Set<Tier> availableTier = getAvailableTiers(definedTiers, tiers, apiName); api.addAvailableTiers(availableTier); // This contains the resolved context api.setContext(artifact.getAttribute(APIConstants.API_OVERVIEW_CONTEXT)); // We set the context template here api.setContextTemplate(artifact.getAttribute(APIConstants.API_OVERVIEW_CONTEXT_TEMPLATE)); api.setLatest(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_IS_LATEST))); Set<URITemplate> uriTemplates = new LinkedHashSet<URITemplate>(); List<String> uriTemplateNames = new ArrayList<String>(); Set<Scope> scopes = ApiMgtDAO.getInstance().getAPIScopes(api.getId()); api.setScopes(scopes); HashMap<String, String> urlPatternsSet; urlPatternsSet = ApiMgtDAO.getInstance().getURITemplatesPerAPIAsString(api.getId()); HashMap<String, String> resourceScopes; resourceScopes = ApiMgtDAO.getInstance().getResourceToScopeMapping(api.getId()); Set<String> urlPatternsKeySet = urlPatternsSet.keySet(); String resourceScopeKey; for (String urlPattern : urlPatternsKeySet) { URITemplate uriTemplate = new URITemplate(); String[] urlPatternComponents = urlPattern.split("::"); String uTemplate = (urlPatternComponents.length >= 1) ? urlPatternComponents[0] : null; String method = (urlPatternComponents.length >= 2) ? urlPatternComponents[1] : null; String authType = (urlPatternComponents.length >= 3) ? urlPatternComponents[2] : null; String throttlingTier = (urlPatternComponents.length >= 4) ? urlPatternComponents[3] : null; String mediationScript = (urlPatternComponents.length >= 5) ? urlPatternComponents[4] : null; uriTemplate.setHTTPVerb(method); uriTemplate.setAuthType(authType); uriTemplate.setThrottlingTier(throttlingTier); uriTemplate.setHttpVerbs(method); uriTemplate.setAuthTypes(authType); uriTemplate.setUriTemplate(uTemplate); uriTemplate.setResourceURI(api.getUrl()); uriTemplate.setResourceSandboxURI(api.getSandboxUrl()); uriTemplate.setThrottlingTiers(throttlingTier); uriTemplate.setMediationScript(mediationScript); uriTemplate.setMediationScripts(method, mediationScript); resourceScopeKey = APIUtil.getResourceKey(api.getContext(), apiVersion, uTemplate, method); uriTemplate.setScopes(findScopeByKey(scopes, resourceScopes.get(resourceScopeKey))); //Checking for duplicate uri template names if (uriTemplateNames.contains(uTemplate)) { for (URITemplate tmp : uriTemplates) { if (uTemplate.equals(tmp.getUriTemplate())) { tmp.setHttpVerbs(method); tmp.setAuthTypes(authType); tmp.setThrottlingTiers(throttlingTier); tmp.setMediationScripts(method, mediationScript); resourceScopeKey = APIUtil.getResourceKey(api.getContext(), apiVersion, uTemplate, method); tmp.setScopes(findScopeByKey(scopes, resourceScopes.get(resourceScopeKey))); break; } } } else { uriTemplates.add(uriTemplate); } uriTemplateNames.add(uTemplate); } if (APIConstants.IMPLEMENTATION_TYPE_INLINE.equalsIgnoreCase(api.getImplementation())) { for (URITemplate template : uriTemplates) { template.setMediationScript(template.getAggregatedMediationScript()); } } api.setUriTemplates(uriTemplates); api.setAsDefaultVersion(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_IS_DEFAULT_VERSION))); Set<String> tags = new HashSet<String>(); Tag[] tag = registry.getTags(artifactPath); for (Tag tag1 : tag) { tags.add(tag1.getTagName()); } api.addTags(tags); api.setLastUpdated(registry.get(artifactPath).getLastModified()); api.setCreatedTime(String.valueOf(registry.get(artifactPath).getCreatedTime().getTime())); api.setImplementation(artifact.getAttribute(APIConstants.PROTOTYPE_OVERVIEW_IMPLEMENTATION)); String environments = artifact.getAttribute(APIConstants.API_OVERVIEW_ENVIRONMENTS); api.setEnvironments(extractEnvironmentsForAPI(environments)); api.setCorsConfiguration(getCorsConfigurationFromArtifact(artifact)); } catch (GovernanceException e) { String msg = "Failed to get API for artifact "; throw new APIManagementException(msg, e); } catch (RegistryException e) { String msg = "Failed to get LastAccess time or Rating"; throw new APIManagementException(msg, e); } catch (UserStoreException e) { String msg = "Failed to get User Realm of API Provider"; throw new APIManagementException(msg, e); } return api; } public static API getAPI(GovernanceArtifact artifact) throws APIManagementException { API api; try { String providerName = artifact.getAttribute(APIConstants.API_OVERVIEW_PROVIDER); String apiName = artifact.getAttribute(APIConstants.API_OVERVIEW_NAME); String apiVersion = artifact.getAttribute(APIConstants.API_OVERVIEW_VERSION); APIIdentifier apiIdentifier = new APIIdentifier(providerName, apiName, apiVersion); api = new API(apiIdentifier); int apiId = ApiMgtDAO.getInstance().getAPIID(apiIdentifier, null); if (apiId == -1) { return null; } //set uuid api.setUUID(artifact.getId()); api.setRating(getAverageRating(apiId)); api.setThumbnailUrl(artifact.getAttribute(APIConstants.API_OVERVIEW_THUMBNAIL_URL)); api.setStatus(getApiStatus(artifact.getAttribute(APIConstants.API_OVERVIEW_STATUS))); api.setContext(artifact.getAttribute(APIConstants.API_OVERVIEW_CONTEXT)); api.setVisibility(artifact.getAttribute(APIConstants.API_OVERVIEW_VISIBILITY)); api.setVisibleRoles(artifact.getAttribute(APIConstants.API_OVERVIEW_VISIBLE_ROLES)); api.setVisibleTenants(artifact.getAttribute(APIConstants.API_OVERVIEW_VISIBLE_TENANTS)); api.setTransports(artifact.getAttribute(APIConstants.API_OVERVIEW_TRANSPORTS)); api.setInSequence(artifact.getAttribute(APIConstants.API_OVERVIEW_INSEQUENCE)); api.setOutSequence(artifact.getAttribute(APIConstants.API_OVERVIEW_OUTSEQUENCE)); api.setFaultSequence(artifact.getAttribute(APIConstants.API_OVERVIEW_FAULTSEQUENCE)); api.setDescription(artifact.getAttribute(APIConstants.API_OVERVIEW_DESCRIPTION)); api.setResponseCache(artifact.getAttribute(APIConstants.API_OVERVIEW_RESPONSE_CACHING)); api.setType(artifact.getAttribute(APIConstants.API_OVERVIEW_TYPE)); int cacheTimeout = APIConstants.API_RESPONSE_CACHE_TIMEOUT; try { cacheTimeout = Integer.parseInt(artifact.getAttribute(APIConstants.API_OVERVIEW_CACHE_TIMEOUT)); } catch (NumberFormatException e) { //ignore } api.setCacheTimeout(cacheTimeout); boolean isGlobalThrottlingEnabled = APIUtil.isAdvanceThrottlingEnabled(); if (isGlobalThrottlingEnabled) { String apiLevelTier = ApiMgtDAO.getInstance().getAPILevelTier(apiId); api.setApiLevelPolicy(apiLevelTier); Set<Tier> availablePolicy = new HashSet<Tier>(); String[] subscriptionPolicy = ApiMgtDAO.getInstance().getPolicyNames(PolicyConstants.POLICY_LEVEL_SUB, replaceEmailDomainBack(providerName)); List<String> definedPolicyNames = Arrays.asList(subscriptionPolicy); String policies = artifact.getAttribute(APIConstants.API_OVERVIEW_TIER); if (policies != null && !"".equals(policies)) { String[] policyNames = policies.split("\\|\\|"); for (String policyName : policyNames) { if (definedPolicyNames.contains(policyName) || APIConstants.UNLIMITED_TIER.equals(policyName)) { Tier p = new Tier(policyName); availablePolicy.add(p); } else { log.warn("Unknown policy: " + policyName + " found on API: " + apiName); } } } api.addAvailableTiers(availablePolicy); String tenantDomainName = MultitenantUtils.getTenantDomain(replaceEmailDomainBack(providerName)); api.setMonetizationCategory(getAPIMonetizationCategory(availablePolicy, tenantDomainName)); } else { //deprecated throttling method Set<Tier> availableTier = new HashSet<Tier>(); String tiers = artifact.getAttribute(APIConstants.API_OVERVIEW_TIER); String tenantDomainName = MultitenantUtils.getTenantDomain(replaceEmailDomainBack(providerName)); if (tiers != null) { String[] tierNames = tiers.split("\\|\\|"); for (String tierName : tierNames) { Tier tier = new Tier(tierName); availableTier.add(tier); } api.addAvailableTiers(availableTier); api.setMonetizationCategory(getAPIMonetizationCategory(availableTier, tenantDomainName)); } else { api.setMonetizationCategory(getAPIMonetizationCategory(availableTier, tenantDomainName)); } } api.setRedirectURL(artifact.getAttribute(APIConstants.API_OVERVIEW_REDIRECT_URL)); api.setApiOwner(artifact.getAttribute(APIConstants.API_OVERVIEW_OWNER)); api.setAdvertiseOnly(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_ADVERTISE_ONLY))); api.setEndpointConfig(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_CONFIG)); api.setSubscriptionAvailability(artifact.getAttribute(APIConstants.API_OVERVIEW_SUBSCRIPTION_AVAILABILITY)); api.setSubscriptionAvailableTenants(artifact.getAttribute(APIConstants.API_OVERVIEW_SUBSCRIPTION_AVAILABLE_TENANTS)); api.setAsDefaultVersion(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_IS_DEFAULT_VERSION))); api.setImplementation(artifact.getAttribute(APIConstants.PROTOTYPE_OVERVIEW_IMPLEMENTATION)); api.setTechnicalOwner(artifact.getAttribute(APIConstants.API_OVERVIEW_TEC_OWNER)); api.setTechnicalOwnerEmail(artifact.getAttribute(APIConstants.API_OVERVIEW_TEC_OWNER_EMAIL)); api.setBusinessOwner(artifact.getAttribute(APIConstants.API_OVERVIEW_BUSS_OWNER)); api.setBusinessOwnerEmail(artifact.getAttribute(APIConstants.API_OVERVIEW_BUSS_OWNER_EMAIL)); ArrayList<URITemplate> urlPatternsList; urlPatternsList = ApiMgtDAO.getInstance().getAllURITemplates(api.getContext(), api.getId().getVersion()); Set<URITemplate> uriTemplates = new HashSet<URITemplate>(urlPatternsList); for (URITemplate uriTemplate : uriTemplates) { uriTemplate.setResourceURI(api.getUrl()); uriTemplate.setResourceSandboxURI(api.getSandboxUrl()); } api.setUriTemplates(uriTemplates); String environments = artifact.getAttribute(APIConstants.API_OVERVIEW_ENVIRONMENTS); api.setEnvironments(extractEnvironmentsForAPI(environments)); api.setCorsConfiguration(getCorsConfigurationFromArtifact(artifact)); } catch (GovernanceException e) { String msg = "Failed to get API from artifact "; throw new APIManagementException(msg, e); } return api; } /** * This method used to get Provider from provider artifact * * @param artifact provider artifact * @return Provider * @throws APIManagementException if failed to get Provider from provider artifact. */ public static Provider getProvider(GenericArtifact artifact) throws APIManagementException { Provider provider; try { provider = new Provider(artifact.getAttribute(APIConstants.PROVIDER_OVERVIEW_NAME)); provider.setDescription(artifact.getAttribute(APIConstants.PROVIDER_OVERVIEW_DESCRIPTION)); provider.setEmail(artifact.getAttribute(APIConstants.PROVIDER_OVERVIEW_EMAIL)); } catch (GovernanceException e) { String msg = "Failed to get provider "; log.error(msg, e); throw new APIManagementException(msg, e); } return provider; } /** * Returns a list of scopes when passed the Provider Name and Scope Key * * @param scopeKey * @param provider * @return * @throws APIManagementException */ public static Set<Scope> getScopeByScopeKey(String scopeKey, String provider) throws APIManagementException { Set<Scope> scopeSet = null; String tenantDomainName = MultitenantUtils.getTenantDomain(replaceEmailDomainBack(provider)); try { int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(tenantDomainName); scopeSet = ApiMgtDAO.getInstance().getAPIScopesByScopeKey(scopeKey, tenantId); } catch (UserStoreException e) { String msg = "Error while retrieving Scopes"; log.error(msg, e); handleException(msg); } return scopeSet; } /** * Create Governance artifact from given attributes * * @param artifact initial governance artifact * @param api API object with the attributes value * @return GenericArtifact * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to create API */ public static GenericArtifact createAPIArtifactContent(GenericArtifact artifact, API api) throws APIManagementException { try { String apiStatus = api.getStatus().getStatus(); artifact.setAttribute(APIConstants.API_OVERVIEW_NAME, api.getId().getApiName()); artifact.setAttribute(APIConstants.API_OVERVIEW_VERSION, api.getId().getVersion()); artifact.setAttribute(APIConstants.API_OVERVIEW_IS_DEFAULT_VERSION, String.valueOf(api.isDefaultVersion())); artifact.setAttribute(APIConstants.API_OVERVIEW_CONTEXT, api.getContext()); artifact.setAttribute(APIConstants.API_OVERVIEW_PROVIDER, api.getId().getProviderName()); artifact.setAttribute(APIConstants.API_OVERVIEW_DESCRIPTION, api.getDescription()); artifact.setAttribute(APIConstants.API_OVERVIEW_WSDL, api.getWsdlUrl()); artifact.setAttribute(APIConstants.API_OVERVIEW_WADL, api.getWadlUrl()); artifact.setAttribute(APIConstants.API_OVERVIEW_THUMBNAIL_URL, api.getThumbnailUrl()); artifact.setAttribute(APIConstants.API_OVERVIEW_STATUS, apiStatus); artifact.setAttribute(APIConstants.API_OVERVIEW_TEC_OWNER, api.getTechnicalOwner()); artifact.setAttribute(APIConstants.API_OVERVIEW_TEC_OWNER_EMAIL, api.getTechnicalOwnerEmail()); artifact.setAttribute(APIConstants.API_OVERVIEW_BUSS_OWNER, api.getBusinessOwner()); artifact.setAttribute(APIConstants.API_OVERVIEW_BUSS_OWNER_EMAIL, api.getBusinessOwnerEmail()); artifact.setAttribute(APIConstants.API_OVERVIEW_VISIBILITY, api.getVisibility()); artifact.setAttribute(APIConstants.API_OVERVIEW_VISIBLE_ROLES, api.getVisibleRoles()); artifact.setAttribute(APIConstants.API_OVERVIEW_VISIBLE_TENANTS, api.getVisibleTenants()); artifact.setAttribute(APIConstants.API_OVERVIEW_ENDPOINT_SECURED, Boolean.toString(api.isEndpointSecured())); artifact.setAttribute(APIConstants.API_OVERVIEW_ENDPOINT_AUTH_DIGEST, Boolean.toString(api.isEndpointAuthDigest())); artifact.setAttribute(APIConstants.API_OVERVIEW_ENDPOINT_USERNAME, api.getEndpointUTUsername()); artifact.setAttribute(APIConstants.API_OVERVIEW_ENDPOINT_PASSWORD, api.getEndpointUTPassword()); artifact.setAttribute(APIConstants.API_OVERVIEW_TRANSPORTS, api.getTransports()); artifact.setAttribute(APIConstants.API_OVERVIEW_INSEQUENCE, api.getInSequence()); artifact.setAttribute(APIConstants.API_OVERVIEW_OUTSEQUENCE, api.getOutSequence()); artifact.setAttribute(APIConstants.API_OVERVIEW_FAULTSEQUENCE, api.getFaultSequence()); artifact.setAttribute(APIConstants.API_OVERVIEW_RESPONSE_CACHING, api.getResponseCache()); artifact.setAttribute(APIConstants.API_OVERVIEW_CACHE_TIMEOUT, Integer.toString(api.getCacheTimeout())); artifact.setAttribute(APIConstants.API_OVERVIEW_REDIRECT_URL, api.getRedirectURL()); artifact.setAttribute(APIConstants.API_OVERVIEW_OWNER, api.getApiOwner()); artifact.setAttribute(APIConstants.API_OVERVIEW_ADVERTISE_ONLY, Boolean.toString(api.isAdvertiseOnly())); artifact.setAttribute(APIConstants.API_OVERVIEW_ENDPOINT_CONFIG, api.getEndpointConfig()); artifact.setAttribute(APIConstants.API_OVERVIEW_SUBSCRIPTION_AVAILABILITY, api.getSubscriptionAvailability()); artifact.setAttribute(APIConstants.API_OVERVIEW_SUBSCRIPTION_AVAILABLE_TENANTS, api.getSubscriptionAvailableTenants()); artifact.setAttribute(APIConstants.PROTOTYPE_OVERVIEW_IMPLEMENTATION, api.getImplementation()); artifact.setAttribute(APIConstants.API_PRODUCTION_THROTTLE_MAXTPS, api.getProductionMaxTps()); artifact.setAttribute(APIConstants.API_SANDBOX_THROTTLE_MAXTPS, api.getSandboxMaxTps()); //Validate if the API has an unsupported context before setting it in the artifact String tenantDomain = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantDomain(); if (APIConstants.SUPER_TENANT_DOMAIN.equals(tenantDomain)) { String invalidContext = File.separator + APIConstants.VERSION_PLACEHOLDER; if (invalidContext.equals(api.getContextTemplate())) { throw new APIManagementException( "API : " + api.getId() + " has an unsupported context : " + api.getContextTemplate()); } } else { String invalidContext = APIConstants.TENANT_PREFIX + tenantDomain + File.separator + APIConstants.VERSION_PLACEHOLDER; if (invalidContext.equals(api.getContextTemplate())) { throw new APIManagementException( "API : " + api.getId() + " has an unsupported context : " + api.getContextTemplate()); } } // This is to support the pluggable version strategy. artifact.setAttribute(APIConstants.API_OVERVIEW_CONTEXT_TEMPLATE, api.getContextTemplate()); artifact.setAttribute(APIConstants.API_OVERVIEW_VERSION_TYPE, "context"); artifact.setAttribute(APIConstants.API_OVERVIEW_TYPE, api.getType()); StringBuilder policyBuilder = new StringBuilder(); for (Tier tier : api.getAvailableTiers()) { policyBuilder.append(tier.getName()); policyBuilder.append("||"); } String policies = policyBuilder.toString(); if (!"".equals(policies)) { policies = policies.substring(0, policies.length() - 2); artifact.setAttribute(APIConstants.API_OVERVIEW_TIER, policies); } StringBuilder tiersBuilder = new StringBuilder(); for (Tier tier : api.getAvailableTiers()) { tiersBuilder.append(tier.getName()); tiersBuilder.append("||"); } String tiers = tiersBuilder.toString(); if (!"".equals(tiers)) { tiers = tiers.substring(0, tiers.length() - 2); artifact.setAttribute(APIConstants.API_OVERVIEW_TIER, tiers); } if (APIConstants.PUBLISHED.equals(apiStatus)) { artifact.setAttribute(APIConstants.API_OVERVIEW_IS_LATEST, "true"); } String[] keys = artifact.getAttributeKeys(); for (String key : keys) { if (key.contains("URITemplate")) { artifact.removeAttribute(key); } } Set<URITemplate> uriTemplateSet = api.getUriTemplates(); int i = 0; for (URITemplate uriTemplate : uriTemplateSet) { artifact.addAttribute(APIConstants.API_URI_PATTERN + i, uriTemplate.getUriTemplate()); artifact.addAttribute(APIConstants.API_URI_HTTP_METHOD + i, uriTemplate.getHTTPVerb()); artifact.addAttribute(APIConstants.API_URI_AUTH_TYPE + i, uriTemplate.getAuthType()); i++; } artifact.setAttribute(APIConstants.API_OVERVIEW_ENVIRONMENTS, writeEnvironmentsToArtifact(api)); artifact.setAttribute(APIConstants.API_OVERVIEW_CORS_CONFIGURATION, APIUtil.getCorsConfigurationJsonFromDto(api.getCorsConfiguration())); } catch (GovernanceException e) { String msg = "Failed to create API for : " + api.getId().getApiName(); log.error(msg, e); throw new APIManagementException(msg, e); } return artifact; } /** * Create the Documentation from artifact * * @param artifact Documentation artifact * @return Documentation * @throws APIManagementException if failed to create Documentation from artifact */ public static Documentation getDocumentation(GenericArtifact artifact) throws APIManagementException { Documentation documentation; try { DocumentationType type; String docType = artifact.getAttribute(APIConstants.DOC_TYPE); if (docType.equalsIgnoreCase(DocumentationType.HOWTO.getType())) { type = DocumentationType.HOWTO; } else if (docType.equalsIgnoreCase(DocumentationType.PUBLIC_FORUM.getType())) { type = DocumentationType.PUBLIC_FORUM; } else if (docType.equalsIgnoreCase(DocumentationType.SUPPORT_FORUM.getType())) { type = DocumentationType.SUPPORT_FORUM; } else if (docType.equalsIgnoreCase(DocumentationType.API_MESSAGE_FORMAT.getType())) { type = DocumentationType.API_MESSAGE_FORMAT; } else if (docType.equalsIgnoreCase(DocumentationType.SAMPLES.getType())) { type = DocumentationType.SAMPLES; } else { type = DocumentationType.OTHER; } documentation = new Documentation(type, artifact.getAttribute(APIConstants.DOC_NAME)); documentation.setId(artifact.getId()); documentation.setSummary(artifact.getAttribute(APIConstants.DOC_SUMMARY)); String visibilityAttr = artifact.getAttribute(APIConstants.DOC_VISIBILITY); Documentation.DocumentVisibility documentVisibility = Documentation.DocumentVisibility.API_LEVEL; if (visibilityAttr != null) { if (visibilityAttr.equals(Documentation.DocumentVisibility.API_LEVEL.name())) { documentVisibility = Documentation.DocumentVisibility.API_LEVEL; } else if (visibilityAttr.equals(Documentation.DocumentVisibility.PRIVATE.name())) { documentVisibility = Documentation.DocumentVisibility.PRIVATE; } else if (visibilityAttr.equals(Documentation.DocumentVisibility.OWNER_ONLY.name())) { documentVisibility = Documentation.DocumentVisibility.OWNER_ONLY; } } documentation.setVisibility(documentVisibility); Documentation.DocumentSourceType docSourceType = Documentation.DocumentSourceType.INLINE; String artifactAttribute = artifact.getAttribute(APIConstants.DOC_SOURCE_TYPE); if (Documentation.DocumentSourceType.URL.name().equals(artifactAttribute)) { docSourceType = Documentation.DocumentSourceType.URL; documentation.setSourceUrl(artifact.getAttribute(APIConstants.DOC_SOURCE_URL)); } else if (Documentation.DocumentSourceType.FILE.name().equals(artifactAttribute)) { docSourceType = Documentation.DocumentSourceType.FILE; documentation.setFilePath(prependWebContextRoot(artifact.getAttribute(APIConstants.DOC_FILE_PATH))); } documentation.setSourceType(docSourceType); if (documentation.getType() == DocumentationType.OTHER) { documentation.setOtherTypeName(artifact.getAttribute(APIConstants.DOC_OTHER_TYPE_NAME)); } } catch (GovernanceException e) { throw new APIManagementException("Failed to get documentation from artifact", e); } return documentation; } /** * Create the Documentation from artifact * * @param artifact Documentation artifact * @return Documentation * @throws APIManagementException if failed to create Documentation from artifact */ public static Documentation getDocumentation(GenericArtifact artifact, String docCreatorName) throws APIManagementException { Documentation documentation; try { DocumentationType type; String docType = artifact.getAttribute(APIConstants.DOC_TYPE); if (docType.equalsIgnoreCase(DocumentationType.HOWTO.getType())) { type = DocumentationType.HOWTO; } else if (docType.equalsIgnoreCase(DocumentationType.PUBLIC_FORUM.getType())) { type = DocumentationType.PUBLIC_FORUM; } else if (docType.equalsIgnoreCase(DocumentationType.SUPPORT_FORUM.getType())) { type = DocumentationType.SUPPORT_FORUM; } else if (docType.equalsIgnoreCase(DocumentationType.API_MESSAGE_FORMAT.getType())) { type = DocumentationType.API_MESSAGE_FORMAT; } else if (docType.equalsIgnoreCase(DocumentationType.SAMPLES.getType())) { type = DocumentationType.SAMPLES; } else { type = DocumentationType.OTHER; } documentation = new Documentation(type, artifact.getAttribute(APIConstants.DOC_NAME)); documentation.setId(artifact.getId()); documentation.setSummary(artifact.getAttribute(APIConstants.DOC_SUMMARY)); Documentation.DocumentSourceType docSourceType = Documentation.DocumentSourceType.INLINE; String artifactAttribute = artifact.getAttribute(APIConstants.DOC_SOURCE_TYPE); if (artifactAttribute.equals(Documentation.DocumentSourceType.URL.name())) { docSourceType = Documentation.DocumentSourceType.URL; } else if (artifactAttribute.equals(Documentation.DocumentSourceType.FILE.name())) { docSourceType = Documentation.DocumentSourceType.FILE; } documentation.setSourceType(docSourceType); if ("URL".equals(artifact.getAttribute(APIConstants.DOC_SOURCE_TYPE))) { documentation.setSourceUrl(artifact.getAttribute(APIConstants.DOC_SOURCE_URL)); } if (docSourceType == Documentation.DocumentSourceType.FILE) { String filePath = prependTenantPrefix(artifact.getAttribute(APIConstants.DOC_FILE_PATH), docCreatorName); documentation.setFilePath(prependWebContextRoot(filePath)); } if (documentation.getType() == DocumentationType.OTHER) { documentation.setOtherTypeName(artifact.getAttribute(APIConstants.DOC_OTHER_TYPE_NAME)); } } catch (GovernanceException e) { throw new APIManagementException("Failed to get documentation from artifact", e); } return documentation; } public static APIStatus getApiStatus(String status) throws APIManagementException { APIStatus apiStatus = null; for (APIStatus aStatus : APIStatus.values()) { if (aStatus.getStatus().equalsIgnoreCase(status)) { apiStatus = aStatus; } } return apiStatus; } /** * Prepends the Tenant Prefix to a registry path. ex: /t/test1.com * * @param postfixUrl path to be prepended. * @return Path prepended with he Tenant domain prefix. */ public static String prependTenantPrefix(String postfixUrl, String username) { String tenantDomain = MultitenantUtils.getTenantDomain(replaceEmailDomainBack(username)); if (!(MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain))) { String tenantPrefix = "/t/"; postfixUrl = tenantPrefix + tenantDomain + postfixUrl; } return postfixUrl; } /** * Prepends the webcontextroot to a registry path. * * @param postfixUrl path to be prepended. * @return Path prepended with he WebContext root. */ public static String prependWebContextRoot(String postfixUrl) { String webContext = CarbonUtils.getServerConfiguration().getFirstProperty("WebContextRoot"); if (webContext != null && !"/".equals(webContext)) { postfixUrl = webContext + postfixUrl; } return postfixUrl; } /** * Utility method for creating storage path for an icon. * * @param identifier APIIdentifier * @return Icon storage path. */ public static String getIconPath(APIIdentifier identifier) { String artifactPath = APIConstants.API_IMAGE_LOCATION + RegistryConstants.PATH_SEPARATOR + identifier.getProviderName() + RegistryConstants.PATH_SEPARATOR + identifier.getApiName() + RegistryConstants.PATH_SEPARATOR + identifier.getVersion(); return artifactPath + RegistryConstants.PATH_SEPARATOR + APIConstants.API_ICON_IMAGE; } /** * Utility method to generate the path for a file. * * @param identifier APIIdentifier * @return Generated path. * @fileName File name. */ public static String getDocumentationFilePath(APIIdentifier identifier, String fileName) { return APIUtil.getAPIDocPath(identifier) + APIConstants.DOCUMENT_FILE_DIR + RegistryConstants.PATH_SEPARATOR + fileName; } //remove getSwagger12DefinitionFilePath once getSwagger20DefinitionFilePath operates public static String getSwagger12DefinitionFilePath(String apiName, String apiVersion, String apiProvider) { return APIConstants.API_DOC_LOCATION + RegistryConstants.PATH_SEPARATOR + apiName + '-' + apiVersion + '-' + apiProvider + RegistryConstants.PATH_SEPARATOR + APIConstants.API_DOC_1_2_LOCATION; } public static String getSwagger20DefinitionFilePath(String apiName, String apiVersion, String apiProvider) { return APIConstants.API_ROOT_LOCATION + RegistryConstants.PATH_SEPARATOR + apiProvider + RegistryConstants.PATH_SEPARATOR + apiName + RegistryConstants.PATH_SEPARATOR + apiVersion + RegistryConstants.PATH_SEPARATOR; } public static String getWSDLDefinitionFilePath(String apiName, String apiVersion, String apiProvider) { return APIConstants.API_WSDL_RESOURCE_LOCATION + apiProvider + "--" + apiName + apiVersion + ".wsdl"; } /** * Utility method to get api path from APIIdentifier * * @param identifier APIIdentifier * @return API path */ public static String getAPIPath(APIIdentifier identifier) { return APIConstants.API_ROOT_LOCATION + RegistryConstants.PATH_SEPARATOR + identifier.getProviderName() + RegistryConstants.PATH_SEPARATOR + identifier.getApiName() + RegistryConstants.PATH_SEPARATOR + identifier.getVersion() + APIConstants.API_RESOURCE_NAME; } /** * Utility method to get api identifier from api path. * * @param apiPath Path of the API in registry * @return relevant API Identifier */ public static APIIdentifier getAPIIdentifier(String apiPath) { int length = (APIConstants.API_ROOT_LOCATION + RegistryConstants.PATH_SEPARATOR).length(); if (!apiPath.contains(APIConstants.API_ROOT_LOCATION + RegistryConstants.PATH_SEPARATOR)) { length = (APIConstants.API_IMAGE_LOCATION + RegistryConstants.PATH_SEPARATOR).length(); } if (length <= 0) { length = (APIConstants.API_DOC_LOCATION + RegistryConstants.PATH_SEPARATOR).length(); } String relativePath = apiPath.substring(length); String[] values = relativePath.split(RegistryConstants.PATH_SEPARATOR); if (values.length > 3) { return new APIIdentifier(values[0], values[1], values[2]); } return null; } /** * Utility method to get API provider path * * @param identifier APIIdentifier * @return API provider path */ public static String getAPIProviderPath(APIIdentifier identifier) { return APIConstants.API_LOCATION + RegistryConstants.PATH_SEPARATOR + identifier.getProviderName(); } /** * Utility method to get documentation path * * @param apiId APIIdentifier * @return Doc path */ public static String getAPIDocPath(APIIdentifier apiId) { return APIConstants.API_LOCATION + RegistryConstants.PATH_SEPARATOR + apiId.getProviderName() + RegistryConstants.PATH_SEPARATOR + apiId.getApiName() + RegistryConstants.PATH_SEPARATOR + apiId.getVersion() + RegistryConstants.PATH_SEPARATOR + APIConstants.DOC_DIR + RegistryConstants.PATH_SEPARATOR; } /** * Utility method to get documentation content file path * * @param apiId APIIdentifier * @param documentationName String * @return Doc content path */ public static String getAPIDocContentPath(APIIdentifier apiId, String documentationName) { return getAPIDocPath(apiId) + RegistryConstants.PATH_SEPARATOR + documentationName; } /** * This utility method used to create documentation artifact content * * @param artifact GovernanceArtifact * @param apiId APIIdentifier * @param documentation Documentation * @return GenericArtifact * @throws APIManagementException if failed to get GovernanceArtifact from Documentation */ public static GenericArtifact createDocArtifactContent(GenericArtifact artifact, APIIdentifier apiId, Documentation documentation) throws APIManagementException { try { artifact.setAttribute(APIConstants.DOC_NAME, documentation.getName()); artifact.setAttribute(APIConstants.DOC_SUMMARY, documentation.getSummary()); artifact.setAttribute(APIConstants.DOC_TYPE, documentation.getType().getType()); artifact.setAttribute(APIConstants.DOC_VISIBILITY, documentation.getVisibility().name()); Documentation.DocumentSourceType sourceType = documentation.getSourceType(); switch (sourceType) { case INLINE: sourceType = Documentation.DocumentSourceType.INLINE; break; case URL: sourceType = Documentation.DocumentSourceType.URL; break; case FILE: { sourceType = Documentation.DocumentSourceType.FILE; } break; default: throw new APIManagementException("Unknown sourceType " + sourceType + " provided for documentation"); } //Documentation Source URL is a required field in the documentation.rxt for migrated setups //Therefore setting a default value if it is not set. if (documentation.getSourceUrl() == null) { documentation.setSourceUrl(" "); } artifact.setAttribute(APIConstants.DOC_SOURCE_TYPE, sourceType.name()); artifact.setAttribute(APIConstants.DOC_SOURCE_URL, documentation.getSourceUrl()); artifact.setAttribute(APIConstants.DOC_FILE_PATH, documentation.getFilePath()); artifact.setAttribute(APIConstants.DOC_OTHER_TYPE_NAME, documentation.getOtherTypeName()); String basePath = apiId.getProviderName() + RegistryConstants.PATH_SEPARATOR + apiId.getApiName() + RegistryConstants.PATH_SEPARATOR + apiId.getVersion(); artifact.setAttribute(APIConstants.DOC_API_BASE_PATH, basePath); } catch (GovernanceException e) { String msg = "Failed to create doc artifact content from :" + documentation.getName(); log.error(msg, e); throw new APIManagementException(msg, e); } return artifact; } /** * this method used to initialized the ArtifactManager * * @param registry Registry * @param key , key name of the key * @return GenericArtifactManager * @throws APIManagementException if failed to initialized GenericArtifactManager */ public static GenericArtifactManager getArtifactManager(Registry registry, String key) throws APIManagementException { GenericArtifactManager artifactManager = null; try { GovernanceUtils.loadGovernanceArtifacts((UserRegistry) registry); if (GovernanceUtils.findGovernanceArtifactConfiguration(key, registry) != null) { artifactManager = new GenericArtifactManager(registry, key); } else { log.warn("Couldn't find GovernanceArtifactConfiguration of RXT: " + key + ". Tenant id set in registry : " + ((UserRegistry) registry).getTenantId() + ", Tenant domain set in PrivilegedCarbonContext: " + PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId()); } } catch (RegistryException e) { String msg = "Failed to initialize GenericArtifactManager"; log.error(msg, e); throw new APIManagementException(msg, e); } return artifactManager; } private static void handleException(String msg) throws APIManagementException { log.error(msg); throw new APIManagementException(msg); } public static void handleException(String msg, Throwable t) throws APIManagementException { log.error(msg, t); throw new APIManagementException(msg, t); } public static SubscriberKeyMgtClient getKeyManagementClient() throws APIManagementException { KeyManagerConfiguration configuration = KeyManagerHolder.getKeyManagerInstance().getKeyManagerConfiguration(); String serverURL = configuration.getParameter(APIConstants.AUTHSERVER_URL); String username = configuration.getParameter(APIConstants.KEY_MANAGER_USERNAME); String password = configuration.getParameter(APIConstants.KEY_MANAGER_PASSWORD); if (serverURL == null) { handleException("API key manager URL unspecified"); } if (username == null || password == null) { handleException("Authentication credentials for API key manager unspecified"); } try { return new SubscriberKeyMgtClient(serverURL, username, password); } catch (Exception e) { handleException("Error while initializing the subscriber key management client", e); return null; } } public static OAuthAdminClient getOauthAdminClient() throws APIManagementException { try { return new OAuthAdminClient(); } catch (Exception e) { handleException("Error while initializing the OAuth admin client", e); return null; } } public static UserInformationRecoveryClient getUserInformationRecoveryClient() throws APIManagementException { try { return new UserInformationRecoveryClient(); } catch (Exception e) { handleException("Error while initializing the User information recovery client", e); return null; } } public static ApplicationManagementServiceClient getApplicationManagementServiceClient() throws APIManagementException { try { return new ApplicationManagementServiceClient(); } catch (Exception e) { handleException("Error while initializing the Application Management Service client", e); return null; } } /** * Crate an WSDL from given wsdl url. Reset the endpoint details to gateway node * * * * @param registry - Governance Registry space to save the WSDL * @param api -API instance * @return Path of the created resource * @throws APIManagementException If an error occurs while adding the WSDL */ public static String createWSDL(Registry registry, API api) throws RegistryException, APIManagementException { try { String wsdlResourcePath = APIConstants.API_WSDL_RESOURCE_LOCATION + api.getId().getProviderName() + "--" + api.getId().getApiName() + api.getId().getVersion() + ".wsdl"; String absoluteWSDLResourcePath = RegistryUtils.getAbsolutePath(RegistryContext.getBaseInstance(), RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH) + wsdlResourcePath; APIMWSDLReader wsdlReader = new APIMWSDLReader(api.getWsdlUrl()); OMElement wsdlContentEle; String wsdRegistryPath; String tenantDomain = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantDomain(); if (org.wso2.carbon.utils.multitenancy.MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equalsIgnoreCase (tenantDomain)) { wsdRegistryPath = RegistryConstants.PATH_SEPARATOR + "registry" + RegistryConstants.PATH_SEPARATOR + "resource" + absoluteWSDLResourcePath; } else { wsdRegistryPath = "/t/" + tenantDomain + RegistryConstants.PATH_SEPARATOR + "registry" + RegistryConstants.PATH_SEPARATOR + "resource" + absoluteWSDLResourcePath; } Resource wsdlResource = registry.newResource(); // isWSDL2Document(api.getWsdlUrl()) method only understands http or file system urls. // Hence if this is a registry url, should not go in to the following if block if (!api.getWsdlUrl().matches(wsdRegistryPath) && (api.getWsdlUrl().startsWith("http:") || api.getWsdlUrl ().startsWith("https:") || api.getWsdlUrl().startsWith("file:"))) { if (isWSDL2Document(api.getWsdlUrl())) { wsdlContentEle = wsdlReader.readAndCleanWsdl2(api); wsdlResource.setContent(wsdlContentEle.toString()); } else { wsdlContentEle = wsdlReader.readAndCleanWsdl(api); wsdlResource.setContent(wsdlContentEle.toString()); } registry.put(wsdlResourcePath, wsdlResource); //set the anonymous role for wsld resource to avoid basicauth security. String[] visibleRoles = null; if (api.getVisibleRoles() != null) { visibleRoles = api.getVisibleRoles().split(","); } setResourcePermissions(api.getId().getProviderName(), api.getVisibility(), visibleRoles, wsdlResourcePath); } else { byte[] wsdl = (byte[]) registry.get(wsdlResourcePath).getContent(); if (isWSDL2Resource(wsdl)) { wsdlContentEle = wsdlReader.updateWSDL2(wsdl, api); wsdlResource.setContent(wsdlContentEle.toString()); } else { wsdlContentEle = wsdlReader.updateWSDL(wsdl, api); wsdlResource.setContent(wsdlContentEle.toString()); } registry.put(wsdlResourcePath, wsdlResource); //set the anonymous role for wsld resource to avoid basicauth security. String[] visibleRoles = null; if (api.getVisibleRoles() != null) { visibleRoles = api.getVisibleRoles().split(","); } setResourcePermissions(api.getId().getProviderName(), api.getVisibility(), visibleRoles, wsdlResourcePath); } //set the wsdl resource permlink as the wsdlURL. api.setWsdlUrl(getRegistryResourceHTTPPermlink(absoluteWSDLResourcePath)); return wsdlResourcePath; } catch (RegistryException e) { String msg = "Failed to add WSDL " + api.getWsdlUrl() + " to the registry"; log.error(msg, e); throw new RegistryException(msg, e); } catch (APIManagementException e) { String msg = "Failed to process the WSDL : " + api.getWsdlUrl(); log.error(msg, e); throw new APIManagementException(msg, e); } } /** * Given a URL, this method checks if the underlying document is a WSDL2 * * @param url URL to check * @return true if the underlying document is a WSDL2 * @throws APIManagementException if error occurred while validating the URI */ public static boolean isWSDL2Document(String url) throws APIManagementException { APIMWSDLReader wsdlReader = new APIMWSDLReader(url); return wsdlReader.isWSDL2BaseURI(); } /** * Given a wsdl resource, this method checks if the underlying document is a WSDL2 * * @param wsdl byte array of wsdl definition saved in registry * @return true if wsdl2 definition * @throws APIManagementException */ private static boolean isWSDL2Resource(byte[] wsdl) throws APIManagementException { String wsdl2NameSpace = "http://www.w3.org/ns/wsdl"; String wsdlContent = new String(wsdl); return wsdlContent.indexOf(wsdl2NameSpace) > 0; } /** * Read the GateWay Endpoint from the APIConfiguration. If multiple Gateway * environments defined, * take only the production node's Endpoint. * Else, pick what is available as the gateway node. * * @return {@link String} - Gateway URL */ public static String getGatewayendpoint(String transports) { String gatewayURLs; Map<String, Environment> gatewayEnvironments = ServiceReferenceHolder.getInstance() .getAPIManagerConfigurationService() .getAPIManagerConfiguration() .getApiGatewayEnvironments(); if (gatewayEnvironments.size() > 1) { for (Environment environment : gatewayEnvironments.values()) { if (APIConstants.GATEWAY_ENV_TYPE_HYBRID.equals(environment.getType())) { gatewayURLs = environment.getApiGatewayEndpoint(); // This might have http,https // pick correct endpoint return APIUtil.extractHTTPSEndpoint(gatewayURLs, transports); } } for (Environment environment : gatewayEnvironments.values()) { if (APIConstants.GATEWAY_ENV_TYPE_PRODUCTION.equals(environment.getType())) { gatewayURLs = environment.getApiGatewayEndpoint(); // This might have http,https // pick correct endpoint return APIUtil.extractHTTPSEndpoint(gatewayURLs, transports); } } for (Environment environment : gatewayEnvironments.values()) { if (APIConstants.GATEWAY_ENV_TYPE_SANDBOX.equals(environment.getType())) { gatewayURLs = environment.getApiGatewayEndpoint(); // This might have http,https // pick correct endpoint return APIUtil.extractHTTPSEndpoint(gatewayURLs, transports); } } } else { gatewayURLs = ((Environment) gatewayEnvironments.values().toArray()[0]).getApiGatewayEndpoint(); return extractHTTPSEndpoint(gatewayURLs, transports); } return null; } /** * Gateway endpoint has HTTP and HTTPS endpoints. * If both are defined pick HTTPS only. Else, pick whatever available. * eg: <GatewayEndpoint>http://${carbon.local.ip}:${http.nio.port}, * https://${carbon.local.ip}:${https.nio.port}</GatewayEndpoint> * * @param gatewayURLs - String contains comma separated gateway urls. * @return {@link String} - Returns HTTPS gateway endpoint */ private static String extractHTTPSEndpoint(String gatewayURLs, String transports) { String gatewayURL; String gatewayHTTPURL = null; String gatewayHTTPSURL = null; boolean httpsEnabled = false; String[] gatewayURLsArray = gatewayURLs.split(","); String[] transportsArray = transports.split(","); for (String transport : transportsArray) { if (transport.startsWith(APIConstants.HTTPS_PROTOCOL)) { httpsEnabled = true; } } if (gatewayURLsArray.length > 1) { for (String url : gatewayURLsArray) { if (url.startsWith("https:")) { gatewayHTTPSURL = url; } else { if (!url.startsWith("ws:")) { gatewayHTTPURL = url; } } } if (httpsEnabled) { gatewayURL = gatewayHTTPSURL; } else { gatewayURL = gatewayHTTPURL; } } else { gatewayURL = gatewayURLs; } return gatewayURL; } /** * Create an Endpoint * * @param endpointUrl Endpoint url * @param registry Registry space to save the endpoint * @return Path of the created resource * @throws APIManagementException If an error occurs while adding the endpoint */ public static String createEndpoint(String endpointUrl, Registry registry) throws APIManagementException { try { EndpointManager endpointManager = new EndpointManager(registry); Endpoint endpoint = endpointManager.newEndpoint(endpointUrl); endpointManager.addEndpoint(endpoint); return GovernanceUtils.getArtifactPath(registry, endpoint.getId()); } catch (RegistryException e) { String msg = "Failed to import endpoint " + endpointUrl + " to registry "; log.error(msg, e); throw new APIManagementException(msg, e); } } /** * Sorts the list of tiers according to the number of requests allowed per minute in each tier in descending order. * * @param tiers - The list of tiers to be sorted * @return - The sorted list. */ public static List<Tier> sortTiers(Set<Tier> tiers) { List<Tier> tierList = new ArrayList<Tier>(); tierList.addAll(tiers); Collections.sort(tierList); return tierList; } /** * Returns a set of External API Stores as defined in the underlying governance * registry. * * @return a Map of tier names and Tier objects - possibly empty * @throws APIManagementException if an error occurs when loading tiers from the registry */ public static Set<APIStore> getExternalStores(int tenantId) throws APIManagementException { // First checking if ExternalStores are defined in api-manager.xml Set<APIStore> externalAPIStores = ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService() .getAPIManagerConfiguration().getExternalAPIStores(); // If defined, return Store Config provided there. if (externalAPIStores != null && !externalAPIStores.isEmpty()) { return externalAPIStores; } // Else Read the config from Tenant's Registry. externalAPIStores = new HashSet<APIStore>(); try { UserRegistry registry = ServiceReferenceHolder.getInstance().getRegistryService() .getGovernanceSystemRegistry(tenantId); if (registry.resourceExists(APIConstants.EXTERNAL_API_STORES_LOCATION)) { Resource resource = registry.get(APIConstants.EXTERNAL_API_STORES_LOCATION); String content = new String((byte[]) resource.getContent(), Charset.defaultCharset()); OMElement element = AXIOMUtil.stringToOM(content); Iterator apistoreIterator = element.getChildrenWithLocalName("ExternalAPIStore"); while (apistoreIterator.hasNext()) { APIStore store = new APIStore(); OMElement storeElem = (OMElement) apistoreIterator.next(); String type = storeElem.getAttributeValue(new QName(APIConstants.EXTERNAL_API_STORE_TYPE)); String className = storeElem.getAttributeValue(new QName(APIConstants.EXTERNAL_API_STORE_CLASS_NAME)); store.setPublisher((APIPublisher) getClassForName(className).newInstance()); store.setType(type); //Set Store type [eg:wso2] String name = storeElem.getAttributeValue(new QName(APIConstants.EXTERNAL_API_STORE_ID)); if (name == null) { log.error("The ExternalAPIStore name attribute is not defined in api-manager.xml."); } store.setName(name); //Set store name OMElement configDisplayName = storeElem.getFirstChildWithName (new QName(APIConstants.EXTERNAL_API_STORE_DISPLAY_NAME)); String displayName = (configDisplayName != null) ? replaceSystemProperty( configDisplayName.getText()) : name; store.setDisplayName(displayName);//Set store display name store.setEndpoint(replaceSystemProperty(storeElem.getFirstChildWithName( new QName(APIConstants.EXTERNAL_API_STORE_ENDPOINT)).getText())); //Set store endpoint, which is used to publish APIs store.setPublished(false); if (APIConstants.WSO2_API_STORE_TYPE.equals(type)) { OMElement password = storeElem.getFirstChildWithName(new QName( APIConstants.EXTERNAL_API_STORE_PASSWORD)); if (password != null) { String value = password.getText(); store.setPassword(replaceSystemProperty(value)); store.setUsername(replaceSystemProperty(storeElem.getFirstChildWithName( new QName(APIConstants.EXTERNAL_API_STORE_USERNAME)).getText())); //Set store login username } else { log.error("The user-credentials of API Publisher is not defined in the <ExternalAPIStore> " + "config of api-manager.xml."); } } externalAPIStores.add(store); } } } catch (RegistryException e) { String msg = "Error while retrieving External Stores Configuration from registry"; log.error(msg, e); throw new APIManagementException(msg, e); } catch (XMLStreamException e) { String msg = "Malformed XML found in the External Stores Configuration resource"; log.error(msg, e); throw new APIManagementException(msg, e); } catch (ClassNotFoundException e) { String msg = "One or more classes defined in APIConstants.EXTERNAL_API_STORE_CLASS_NAME cannot be found"; log.error(msg, e); throw new APIManagementException(msg, e); } catch (InstantiationException e) { String msg = "One or more classes defined in APIConstants.EXTERNAL_API_STORE_CLASS_NAME cannot be load"; log.error(msg, e); throw new APIManagementException(msg, e); } catch (IllegalAccessException e) { String msg = "One or more classes defined in APIConstants.EXTERNAL_API_STORE_CLASS_NAME cannot be access"; log.error(msg, e); throw new APIManagementException(msg, e); } return externalAPIStores; } /** * Returns the External API Store Configuration with the given Store Name * * @param apiStoreName * @return * @throws APIManagementException */ public static APIStore getExternalAPIStore(String apiStoreName, int tenantId) throws APIManagementException { Set<APIStore> externalAPIStoresConfig = APIUtil.getExternalStores(tenantId); for (APIStore apiStoreConfig : externalAPIStoresConfig) { if (apiStoreConfig.getName().equals(apiStoreName)) { return apiStoreConfig; } } return null; } /** * Returns an unfiltered map of API availability tiers as defined in the underlying governance * registry. * * @return Map<String, Tier> an unfiltered Map of tier names and Tier objects - possibly empty * @throws APIManagementException if an error occurs when loading tiers from the registry */ public static Map<String, Tier> getAllTiers() throws APIManagementException { if (!APIUtil.isAdvanceThrottlingEnabled()) { try { Registry registry = ServiceReferenceHolder.getInstance().getRegistryService(). getGovernanceSystemRegistry(); return getAllTiers(registry, APIConstants.API_TIER_LOCATION, MultitenantConstants.SUPER_TENANT_ID); } catch (RegistryException e) { log.error(APIConstants.MSG_TIER_RET_ERROR, e); throw new APIManagementException(APIConstants.MSG_TIER_RET_ERROR, e); } catch (XMLStreamException e) { log.error(APIConstants.MSG_MALFORMED_XML_ERROR, e); throw new APIManagementException(APIConstants.MSG_MALFORMED_XML_ERROR, e); } } else { return getTiersFromPolicies(PolicyConstants.POLICY_LEVEL_SUB, MultitenantConstants.SUPER_TENANT_ID); } } /** * Returns an unfiltered map of API availability tiers of the tenant as defined in the underlying governance * registry. * * @return Map<String, Tier> an unfiltered Map of tier names and Tier objects - possibly empty * @throws APIManagementException if an error occurs when loading tiers from the registry */ public static Map<String, Tier> getAllTiers(int tenantId) throws APIManagementException { if (!APIUtil.isAdvanceThrottlingEnabled()) { try { Registry registry = ServiceReferenceHolder.getInstance().getRegistryService(). getGovernanceSystemRegistry(tenantId); return getAllTiers(registry, APIConstants.API_TIER_LOCATION, tenantId); } catch (RegistryException e) { log.error(APIConstants.MSG_TIER_RET_ERROR, e); throw new APIManagementException(APIConstants.MSG_TIER_RET_ERROR, e); } catch (XMLStreamException e) { log.error(APIConstants.MSG_MALFORMED_XML_ERROR, e); throw new APIManagementException(APIConstants.MSG_MALFORMED_XML_ERROR, e); } } else { return getTiersFromPolicies(PolicyConstants.POLICY_LEVEL_SUB, tenantId); } } /** * Returns a map of API availability tiers as defined in the underlying governance * registry. * * @return a Map of tier names and Tier objects - possibly empty * @throws APIManagementException if an error occurs when loading tiers from the registry */ public static Map<String, Tier> getTiers() throws APIManagementException { if (!APIUtil.isAdvanceThrottlingEnabled()) { try { Registry registry = ServiceReferenceHolder.getInstance().getRegistryService(). getGovernanceSystemRegistry(); return getTiers(registry, APIConstants.API_TIER_LOCATION, MultitenantConstants.SUPER_TENANT_ID); } catch (RegistryException e) { log.error(APIConstants.MSG_TIER_RET_ERROR, e); throw new APIManagementException(APIConstants.MSG_TIER_RET_ERROR, e); } } else { return getTiersFromPolicies(PolicyConstants.POLICY_LEVEL_SUB, MultitenantConstants.SUPER_TENANT_ID); } } /** * Returns a map of API availability tiers as defined in the underlying governance * registry. * * @return a Map of tier names and Tier objects - possibly empty * @throws APIManagementException if an error occurs when loading tiers from the registry */ public static Map<String, Tier> getAdvancedSubsriptionTiers() throws APIManagementException { return getAdvancedSubsriptionTiers(MultitenantConstants.SUPER_TENANT_ID); } /** * Returns a map of API subscription tiers of the tenant as defined in database * registry. * * @return a Map of tier names and Tier objects - possibly empty * @throws APIManagementException if an error occurs when loading tiers from the registry */ public static Map<String, Tier> getAdvancedSubsriptionTiers(int tenantId) throws APIManagementException { return APIUtil.getTiersFromPolicies(PolicyConstants.POLICY_LEVEL_SUB, tenantId); } /** * Returns a map of API availability tiers of the tenant as defined in the underlying governance * registry. * * @return a Map of tier names and Tier objects - possibly empty * @throws APIManagementException if an error occurs when loading tiers from the registry */ public static Map<String, Tier> getTiers(int tenantId) throws APIManagementException { if (!APIUtil.isAdvanceThrottlingEnabled()) { try { Registry registry = ServiceReferenceHolder.getInstance().getRegistryService(). getGovernanceSystemRegistry(tenantId); return getTiers(registry, APIConstants.API_TIER_LOCATION, tenantId); } catch (RegistryException e) { log.error(APIConstants.MSG_TIER_RET_ERROR, e); throw new APIManagementException(APIConstants.MSG_TIER_RET_ERROR, e); } } else { return getTiersFromPolicies(PolicyConstants.POLICY_LEVEL_SUB, tenantId); } } /** * Returns a map of API availability tiers of the tenant as defined in the underlying governance * registry. * * @return a Map of tier names and Tier objects - possibly empty * @throws APIManagementException if an error occurs when loading tiers from the registry */ public static Map<String, Tier> getTiers(int tierType, String tenantDomain) throws APIManagementException { if (!APIUtil.isAdvanceThrottlingEnabled()) { boolean isTenantFlowStarted = false; try { PrivilegedCarbonContext.startTenantFlow(); isTenantFlowStarted = true; PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); Registry registry = ServiceReferenceHolder.getInstance().getRegistryService(). getGovernanceSystemRegistry(tenantId); if (tierType == APIConstants.TIER_API_TYPE) { return getTiers(registry, APIConstants.API_TIER_LOCATION, tenantId); } else if (tierType == APIConstants.TIER_RESOURCE_TYPE) { return getTiers(registry, APIConstants.RES_TIER_LOCATION, tenantId); } else if (tierType == APIConstants.TIER_APPLICATION_TYPE) { return getTiers(registry, APIConstants.APP_TIER_LOCATION, tenantId); } else { throw new APIManagementException("No such a tier type : " + tierType); } } catch (RegistryException e) { log.error(APIConstants.MSG_TIER_RET_ERROR, e); throw new APIManagementException(APIConstants.MSG_TIER_RET_ERROR, e); } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } } else { boolean isTenantFlowStarted = false; try { PrivilegedCarbonContext.startTenantFlow(); isTenantFlowStarted = true; PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); if (tierType == APIConstants.TIER_API_TYPE) { return getTiersFromPolicies(PolicyConstants.POLICY_LEVEL_SUB, tenantId); } else if (tierType == APIConstants.TIER_RESOURCE_TYPE) { return getTiersFromPolicies(PolicyConstants.POLICY_LEVEL_API, tenantId); } else if (tierType == APIConstants.TIER_APPLICATION_TYPE) { return getTiersFromPolicies(PolicyConstants.POLICY_LEVEL_APP, tenantId); } else { throw new APIManagementException("No such a tier type : " + tierType); } } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } } } /** * Retrieves unfiltered list of all available tiers from registry. * Result will contains all the tiers including unauthenticated tier which is * filtered out in getTiers} * * @param registry registry to access tiers config * @param tierLocation registry location of tiers config * @return Map<String, Tier> containing all available tiers * @throws RegistryException when registry action fails * @throws XMLStreamException when xml parsing fails * @throws APIManagementException when fails to retrieve tier attributes */ private static Map<String, Tier> getAllTiers(Registry registry, String tierLocation, int tenantId) throws RegistryException, XMLStreamException, APIManagementException { // We use a treeMap here to keep the order Map<String, Tier> tiers = new TreeMap<String, Tier>(); if (registry.resourceExists(tierLocation)) { Resource resource = registry.get(tierLocation); String content = new String((byte[]) resource.getContent(), Charset.defaultCharset()); OMElement element = AXIOMUtil.stringToOM(content); OMElement assertion = element.getFirstChildWithName(APIConstants.ASSERTION_ELEMENT); Iterator policies = assertion.getChildrenWithName(APIConstants.POLICY_ELEMENT); while (policies.hasNext()) { OMElement policy = (OMElement) policies.next(); OMElement id = policy.getFirstChildWithName(APIConstants.THROTTLE_ID_ELEMENT); String tierName = id.getText(); // Constructing the tier object Tier tier = new Tier(tierName); tier.setPolicyContent(policy.toString().getBytes(Charset.defaultCharset())); if (id.getAttribute(APIConstants.THROTTLE_ID_DISPLAY_NAME_ELEMENT) != null) { tier.setDisplayName(id.getAttributeValue(APIConstants.THROTTLE_ID_DISPLAY_NAME_ELEMENT)); } else { tier.setDisplayName(tierName); } String desc; try { long requestPerMin = APIDescriptionGenUtil.getAllowedCountPerMinute(policy); tier.setRequestsPerMin(requestPerMin); long requestCount = APIDescriptionGenUtil.getAllowedRequestCount(policy); tier.setRequestCount(requestCount); long unitTime = APIDescriptionGenUtil.getTimeDuration(policy); tier.setUnitTime(unitTime); if (requestPerMin >= 1) { desc = DESCRIPTION.replaceAll("\\[1\\]", Long.toString(requestPerMin)); } else { desc = DESCRIPTION; } tier.setDescription(desc); } catch (APIManagementException ex) { // If there is any issue in getting the request counts or the time duration, that means this tier // information can not be used for throttling. Hence we log this exception and continue the flow // to the next tier. log.warn("Unable to get the request count/time duration information for : " + tier.getName() + ". " + ex.getMessage()); continue; } // Get all the attributes of the tier. Map<String, Object> tierAttributes = APIDescriptionGenUtil.getTierAttributes(policy); if (!tierAttributes.isEmpty()) { // The description, billing plan and the stop on quota reach properties are also stored as attributes // of the tier attributes. Hence we extract them from the above attributes map. Iterator<Entry<String, Object>> attributeIterator = tierAttributes.entrySet().iterator(); while (attributeIterator.hasNext()) { Entry<String, Object> entry = attributeIterator.next(); if (APIConstants.THROTTLE_TIER_DESCRIPTION_ATTRIBUTE.equals(entry.getKey()) && entry.getValue() instanceof String) { tier.setDescription((String) entry.getValue()); // We remove the attribute from the map attributeIterator.remove(); continue; } if (APIConstants.THROTTLE_TIER_PLAN_ATTRIBUTE.equals(entry.getKey()) && entry.getValue() instanceof String) { tier.setTierPlan((String) entry.getValue()); // We remove the attribute from the map attributeIterator.remove(); continue; } if (APIConstants.THROTTLE_TIER_QUOTA_ACTION_ATTRIBUTE.equals(entry.getKey()) && entry.getValue() instanceof String) { tier.setStopOnQuotaReached(Boolean.parseBoolean((String) entry.getValue())); // We remove the attribute from the map attributeIterator.remove(); // We do not need a continue since this is the last statement. } } tier.setTierAttributes(tierAttributes); } tiers.put(tierName, tier); } } if (isEnabledUnlimitedTier()) { Tier tier = new Tier(APIConstants.UNLIMITED_TIER); tier.setDescription(APIConstants.UNLIMITED_TIER_DESC); tier.setDisplayName(APIConstants.UNLIMITED_TIER); tier.setRequestsPerMin(Long.MAX_VALUE); if (isUnlimitedTierPaid(getTenantDomainFromTenantId(tenantId))) { tier.setTierPlan(APIConstants.COMMERCIAL_TIER_PLAN); } else { tier.setTierPlan(APIConstants.BILLING_PLAN_FREE); } tiers.put(tier.getName(), tier); } return tiers; } /** * Retrieves filtered list of available tiers from registry. This method will not return Unauthenticated * tier in the list. Use to retrieve all tiers without * any filtering. * * @param registry registry to access tiers config * @param tierLocation registry location of tiers config * @return map containing available tiers * @throws APIManagementException when fails to retrieve tier attributes */ private static Map<String, Tier> getTiers(Registry registry, String tierLocation, int tenantId) throws APIManagementException { Map<String, Tier> tiers = null; try { tiers = getAllTiers(registry, tierLocation, tenantId); tiers.remove(APIConstants.UNAUTHENTICATED_TIER); } catch (RegistryException e) { handleException(APIConstants.MSG_TIER_RET_ERROR, e); } catch (XMLStreamException e) { handleException(APIConstants.MSG_MALFORMED_XML_ERROR, e); } catch (APIManagementException e) { handleException("Unable to get tier attributes", e); } catch (Exception e) { // generic exception is caught to catch exceptions thrown from map remove method handleException("Unable to remove Unauthenticated tier from tiers list", e); } return tiers; } /** * This method deletes a given tier from tier xml file, for a given tenant * * @param tier tier to be deleted * @param tenantId id of the tenant * @throws APIManagementException if error occurs while getting registry resource or processing XML */ public static void deleteTier(Tier tier, int tenantId) throws APIManagementException { try { Registry registry = ServiceReferenceHolder.getInstance().getRegistryService(). getGovernanceSystemRegistry(tenantId); if (registry.resourceExists(APIConstants.API_TIER_LOCATION)) { Resource resource = registry.get(APIConstants.API_TIER_LOCATION); String content = new String((byte[]) resource.getContent(), Charset.defaultCharset()); OMElement element = AXIOMUtil.stringToOM(content); OMElement assertion = element.getFirstChildWithName(APIConstants.ASSERTION_ELEMENT); Iterator policies = assertion.getChildrenWithName(APIConstants.POLICY_ELEMENT); boolean foundTier = false; String tierName = null; while (policies.hasNext()) { OMElement policy = (OMElement) policies.next(); OMElement id = policy.getFirstChildWithName(APIConstants.THROTTLE_ID_ELEMENT); tierName = tier.getName(); if (tierName != null && tierName.equalsIgnoreCase(id.getText())) { foundTier = true; policies.remove(); break; } } if (!foundTier) { log.error("Tier doesn't exist : " + tierName); throw new APIManagementException("Tier doesn't exist : " + tierName); } resource.setContent(element.toString()); registry.put(APIConstants.API_TIER_LOCATION, resource); } } catch (RegistryException e) { log.error(APIConstants.MSG_TIER_RET_ERROR, e); throw new APIManagementException(e.getMessage()); } catch (XMLStreamException e) { log.error(APIConstants.MSG_MALFORMED_XML_ERROR, e); throw new APIManagementException(e.getMessage()); } } /** * Returns the tier display name for a particular tier * * @return the relevant tier display name * @throws APIManagementException if an error occurs when loading tiers from the registry */ public static String getTierDisplayName(int tenantId, String tierName) throws APIManagementException { String displayName = null; if (APIConstants.UNLIMITED_TIER.equals(tierName)) { return APIConstants.UNLIMITED_TIER; } try { Registry registry = ServiceReferenceHolder.getInstance().getRegistryService(). getGovernanceSystemRegistry(tenantId); if (registry.resourceExists(APIConstants.API_TIER_LOCATION)) { Resource resource = registry.get(APIConstants.API_TIER_LOCATION); String content = new String((byte[]) resource.getContent(), Charset.defaultCharset()); OMElement element = AXIOMUtil.stringToOM(content); OMElement assertion = element.getFirstChildWithName(APIConstants.ASSERTION_ELEMENT); Iterator policies = assertion.getChildrenWithName(APIConstants.POLICY_ELEMENT); while (policies.hasNext()) { OMElement policy = (OMElement) policies.next(); OMElement id = policy.getFirstChildWithName(APIConstants.THROTTLE_ID_ELEMENT); if (id.getText().equals(tierName)) { if (id.getAttribute(APIConstants.THROTTLE_ID_DISPLAY_NAME_ELEMENT) != null) { displayName = id.getAttributeValue(APIConstants.THROTTLE_ID_DISPLAY_NAME_ELEMENT); } else if (displayName == null) { displayName = id.getText(); } } } } } catch (RegistryException e) { log.error(APIConstants.MSG_TIER_RET_ERROR, e); throw new APIManagementException(APIConstants.MSG_TIER_RET_ERROR, e); } catch (XMLStreamException e) { log.error(APIConstants.MSG_MALFORMED_XML_ERROR, e); throw new APIManagementException(APIConstants.MSG_MALFORMED_XML_ERROR, e); } return displayName; } /** * Checks whether the specified user has the specified permission. * * @param username A username * @param permission A valid Carbon permission * @throws APIManagementException If the user does not have the specified permission or if an error occurs */ public static void checkPermission(String username, String permission) throws APIManagementException { if (username == null) { throw new APIManagementException("Attempt to execute privileged operation as" + " the anonymous user"); } if (isPermissionCheckDisabled()) { log.debug("Permission verification is disabled by APIStore configuration"); return; } String tenantDomain = MultitenantUtils.getTenantDomain(username); PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); boolean authorized; try { int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager(). getTenantId(tenantDomain); if (!org.wso2.carbon.utils.multitenancy.MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { org.wso2.carbon.user.api.AuthorizationManager manager = ServiceReferenceHolder.getInstance() .getRealmService() .getTenantUserRealm(tenantId) .getAuthorizationManager(); authorized = manager.isUserAuthorized(MultitenantUtils.getTenantAwareUsername(username), permission, CarbonConstants.UI_PERMISSION_ACTION); } else { // On the first login attempt to publisher (without browsing the // store), the user realm will be null. if (ServiceReferenceHolder.getUserRealm() == null) { ServiceReferenceHolder.setUserRealm((UserRealm) ServiceReferenceHolder.getInstance() .getRealmService() .getTenantUserRealm(tenantId)); } authorized = AuthorizationManager.getInstance() .isUserAuthorized(MultitenantUtils.getTenantAwareUsername(username), permission); } if (!authorized) { throw new APIManagementException("User '" + username + "' does not have the " + "required permission: " + permission); } } catch (UserStoreException e) { throw new APIManagementException("Error while checking the user:" + username + " authorized or not", e); } finally { PrivilegedCarbonContext.endTenantFlow(); } } /** * Checks whether the specified user has the specified permission. * * @param userNameWithoutChange A username * @param permission A valid Carbon permission * @throws APIManagementException If the user does not have the specified permission or if an error occurs */ public static boolean hasPermission(String userNameWithoutChange, String permission) throws APIManagementException { return hasPermission(userNameWithoutChange, permission, false); } /** * Checks whether the specified user has the specified permission. * * @param userNameWithoutChange A username * @param permission A valid Carbon permission * @throws APIManagementException If the user does not have the specified permission or if an error occurs */ public static boolean hasPermission(String userNameWithoutChange, String permission, boolean isFromPublisher) throws APIManagementException { boolean authorized = false; if (userNameWithoutChange == null) { throw new APIManagementException("Attempt to execute privileged operation as" + " the anonymous user"); } if (isPermissionCheckDisabled()) { log.debug("Permission verification is disabled by APIStore configuration"); authorized = true; return authorized; } if (isFromPublisher && APIConstants.Permissions.APIM_ADMIN.equals(permission)) { userNameWithoutChange = getUserNameWithTenantSuffix(userNameWithoutChange); Integer value = getValueFromCache(APIConstants.API_PUBLISHER_ADMIN_PERMISSION_CACHE, userNameWithoutChange); if (value != null) { return value == 1; } } String tenantDomain = MultitenantUtils.getTenantDomain(userNameWithoutChange); PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); try { int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager(). getTenantId(tenantDomain); if (!org.wso2.carbon.utils.multitenancy.MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { org.wso2.carbon.user.api.AuthorizationManager manager = ServiceReferenceHolder.getInstance() .getRealmService() .getTenantUserRealm(tenantId) .getAuthorizationManager(); authorized = manager.isUserAuthorized(MultitenantUtils.getTenantAwareUsername(userNameWithoutChange), permission, CarbonConstants.UI_PERMISSION_ACTION); } else { // On the first login attempt to publisher (without browsing the // store), the user realm will be null. if (ServiceReferenceHolder.getUserRealm() == null) { ServiceReferenceHolder.setUserRealm((UserRealm) ServiceReferenceHolder.getInstance() .getRealmService() .getTenantUserRealm(tenantId)); } authorized = AuthorizationManager.getInstance() .isUserAuthorized(MultitenantUtils.getTenantAwareUsername(userNameWithoutChange), permission); } if (isFromPublisher && APIConstants.Permissions.APIM_ADMIN.equals(permission)) { addToRolesCache(APIConstants.API_PUBLISHER_ADMIN_PERMISSION_CACHE, userNameWithoutChange, authorized ? 1 : 2); } } catch (UserStoreException e) { throw new APIManagementException("Error while checking the user:" + userNameWithoutChange + " authorized or not", e); } finally { PrivilegedCarbonContext.endTenantFlow(); } return authorized; } /** * Checks whether the disablePermissionCheck parameter enabled * * @return boolean */ public static boolean isPermissionCheckDisabled() { APIManagerConfiguration config = ServiceReferenceHolder.getInstance(). getAPIManagerConfigurationService().getAPIManagerConfiguration(); String disablePermissionCheck = config.getFirstProperty(APIConstants.API_STORE_DISABLE_PERMISSION_CHECK); if (disablePermissionCheck == null) { return false; } return Boolean.parseBoolean(disablePermissionCheck); } /** * Checks whether the specified user has the specified permission without throwing * any exceptions. * * @param username A username * @param permission A valid Carbon permission * @return true if the user has the specified permission and false otherwise */ public static boolean checkPermissionQuietly(String username, String permission) { try { checkPermission(username, permission); return true; } catch (APIManagementException ignore) { // Ignore the exception. // Logging it on debug mode so if needed we can see the exception stacktrace. if (log.isDebugEnabled()) { log.debug("User does not have permission", ignore); } return false; } } /** * Gets the information of the logged in User. * * @param cookie Cookie of the previously logged in session. * @param serviceUrl Url of the authentication service. * @return LoggedUserInfo object containing details of the logged in user. * @throws ExceptionException * @throws RemoteException */ public static LoggedUserInfo getLoggedInUserInfo(String cookie, String serviceUrl) throws RemoteException, ExceptionException { LoggedUserInfoAdminStub stub = new LoggedUserInfoAdminStub(null, serviceUrl + "LoggedUserInfoAdmin"); ServiceClient client = stub._getServiceClient(); Options options = client.getOptions(); options.setManageSession(true); options.setProperty(HTTPConstants.COOKIE_STRING, cookie); return stub.getUserInfo(); } /** * Get user profiles of user * * @param username username * @return default user profile of user * @throws APIManagementException */ public static UserProfileDTO getUserDefaultProfile(String username) throws APIManagementException { APIManagerConfiguration apiManagerConfiguration = ServiceReferenceHolder.getInstance() .getAPIManagerConfigurationService().getAPIManagerConfiguration(); String url = apiManagerConfiguration.getFirstProperty(APIConstants.API_KEY_VALIDATOR_URL); String errorMsg = "Error while getting profile of user "; try { UserProfileMgtServiceStub stub = new UserProfileMgtServiceStub( ServiceReferenceHolder.getContextService().getClientConfigContext(), url + APIConstants.USER_PROFILE_MGT_SERVICE); ServiceClient gatewayServiceClient = stub._getServiceClient(); CarbonUtils.setBasicAccessSecurityHeaders( apiManagerConfiguration.getFirstProperty(APIConstants.API_KEY_VALIDATOR_USERNAME), apiManagerConfiguration.getFirstProperty(APIConstants.API_KEY_VALIDATOR_PASSWORD), gatewayServiceClient); UserProfileDTO[] profiles = stub.getUserProfiles(username); for (UserProfileDTO dto : profiles) { if (APIConstants.USER_DEFAULT_PROFILE.equals(dto.getProfileName())) { return dto; } } } catch (AxisFault axisFault) { //here we are going to log the error message and return because in this case, current user cannot fetch //profile of another user (due to cross tenant isolation, not allowed to access user details etc.) log.error("Cannot access user profile of : " + username); return null; } catch (RemoteException e) { handleException(errorMsg + username, e); } catch (UserProfileMgtServiceUserProfileExceptionException e) { handleException(errorMsg + username, e); } return null; } /** * Retrieves the role list of a user * * @param username Name of the username * @throws APIManagementException If an error occurs */ public static String[] getListOfRoles(String username) throws APIManagementException { return getListOfRoles(username, false); } /** * Retrieves the role list of a user * * @param username A username * @param isFromPublisher To specify whether this call is from publisher * @throws APIManagementException If an error occurs */ public static String[] getListOfRoles(String username, boolean isFromPublisher) throws APIManagementException { if (username == null) { throw new APIManagementException("Attempt to execute privileged operation as" + " the anonymous user"); } String[] roles = null; if (isFromPublisher) { username = getUserNameWithTenantSuffix(username); roles = getValueFromCache(APIConstants.API_PUBLISHER_USER_ROLE_CACHE, username); } if (roles != null) { return roles; } String tenantDomain = MultitenantUtils.getTenantDomain(username); try { if (!org.wso2.carbon.utils.multitenancy.MultitenantConstants.SUPER_TENANT_DOMAIN_NAME .equals(tenantDomain)) { int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(tenantDomain); UserStoreManager manager = ServiceReferenceHolder.getInstance().getRealmService() .getTenantUserRealm(tenantId).getUserStoreManager(); roles = manager.getRoleListOfUser(MultitenantUtils.getTenantAwareUsername(username)); } else { roles = AuthorizationManager.getInstance() .getRolesOfUser(MultitenantUtils.getTenantAwareUsername(username)); } if (isFromPublisher) { addToRolesCache(APIConstants.API_PUBLISHER_USER_ROLE_CACHE, username, roles); } return roles; } catch (UserStoreException e) { throw new APIManagementException("UserStoreException while trying the role list of the user " + username, e); } } /** * To add the value to a cache. * * @param cacheName - Name of the Cache * @param key - Key of the entry that need to be added. * @param value - Value of the entry that need to be added. */ protected static <T> void addToRolesCache(String cacheName, String key, T value) { if (isPublisherRoleCacheEnabled) { if (log.isDebugEnabled()) { log.debug("Publisher role cache is enabled, adding the roles for the " + key + " to the cache " + cacheName + "'"); } Caching.getCacheManager(APIConstants.API_MANAGER_CACHE_MANAGER).getCache(cacheName).put(key, value); } } /** * To get the value from the cache. * * @param cacheName Name of the cache. * @param key Key of the cache entry. * @return Role list from the cache, if a values exists, otherwise null. */ protected static <T> T getValueFromCache(String cacheName, String key) { if (isPublisherRoleCacheEnabled) { if (log.isDebugEnabled()) { log.debug("Publisher role cache is enabled, retrieving the roles for " + key + " from the cache " + cacheName + "'"); } Cache<String, T> rolesCache = Caching.getCacheManager(APIConstants.API_MANAGER_CACHE_MANAGER) .getCache(cacheName); return rolesCache.get(key); } return null; } /** * Retrieves the list of user roles without throwing any exceptions. * * @param username A username * @return the list of roles to which the user belongs to. */ public static String[] getListOfRolesQuietly(String username) { try { return getListOfRoles(username); } catch (APIManagementException e) { return new String[0]; } } /** * Sets permission for uploaded file resource. * * @param filePath Registry path for the uploaded file * @throws APIManagementException */ public static void setFilePermission(String filePath) throws APIManagementException { try { String filePathString = filePath.replaceFirst("/registry/resource/", ""); org.wso2.carbon.user.api.AuthorizationManager accessControlAdmin = ServiceReferenceHolder.getInstance(). getRealmService().getTenantUserRealm(MultitenantConstants.SUPER_TENANT_ID). getAuthorizationManager(); if (!accessControlAdmin.isRoleAuthorized(CarbonConstants.REGISTRY_ANONNYMOUS_ROLE_NAME, filePathString, ActionConstants.GET)) { accessControlAdmin.authorizeRole(CarbonConstants.REGISTRY_ANONNYMOUS_ROLE_NAME, filePathString, ActionConstants.GET); } } catch (UserStoreException e) { throw new APIManagementException("Error while setting up permissions for file location", e); } } /** * This method used to get API from governance artifact specific to copyAPI * * @param artifact API artifact * @param registry Registry * @return API * @throws APIManagementException if failed to get API from artifact */ public static API getAPI(GovernanceArtifact artifact, Registry registry, APIIdentifier oldId, String oldContext) throws APIManagementException { API api; try { String providerName = artifact.getAttribute(APIConstants.API_OVERVIEW_PROVIDER); String apiName = artifact.getAttribute(APIConstants.API_OVERVIEW_NAME); String apiVersion = artifact.getAttribute(APIConstants.API_OVERVIEW_VERSION); api = new API(new APIIdentifier(providerName, apiName, apiVersion)); int apiId = ApiMgtDAO.getInstance().getAPIID(oldId, null); if (apiId == -1) { return null; } // set rating String artifactPath = GovernanceUtils.getArtifactPath(registry, artifact.getId()); BigDecimal bigDecimal = BigDecimal.valueOf(registry.getAverageRating(artifactPath)); BigDecimal res = bigDecimal.setScale(1, RoundingMode.HALF_UP); api.setRating(res.floatValue()); //set description api.setDescription(artifact.getAttribute(APIConstants.API_OVERVIEW_DESCRIPTION)); //set last access time api.setLastUpdated(registry.get(artifactPath).getLastModified()); //set uuid api.setUUID(artifact.getId()); // set url api.setStatus(getApiStatus(artifact.getAttribute(APIConstants.API_OVERVIEW_STATUS))); api.setThumbnailUrl(artifact.getAttribute(APIConstants.API_OVERVIEW_THUMBNAIL_URL)); api.setWsdlUrl(artifact.getAttribute(APIConstants.API_OVERVIEW_WSDL)); api.setWadlUrl(artifact.getAttribute(APIConstants.API_OVERVIEW_WADL)); api.setTechnicalOwner(artifact.getAttribute(APIConstants.API_OVERVIEW_TEC_OWNER)); api.setTechnicalOwnerEmail(artifact.getAttribute(APIConstants.API_OVERVIEW_TEC_OWNER_EMAIL)); api.setBusinessOwner(artifact.getAttribute(APIConstants.API_OVERVIEW_BUSS_OWNER)); api.setBusinessOwnerEmail(artifact.getAttribute(APIConstants.API_OVERVIEW_BUSS_OWNER_EMAIL)); api.setEndpointSecured(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_SECURED))); api.setEndpointAuthDigest(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_AUTH_DIGEST))); api.setEndpointUTUsername(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_USERNAME)); if (!((APIConstants.DEFAULT_MODIFIED_ENDPOINT_PASSWORD) .equals(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_PASSWORD)))) { api.setEndpointUTPassword(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_PASSWORD)); } else { //If APIEndpointPasswordRegistryHandler is enabled take password from the registry hidden property api.setEndpointUTPassword(getActualEpPswdFromHiddenProperty(api, registry)); } api.setTransports(artifact.getAttribute(APIConstants.API_OVERVIEW_TRANSPORTS)); api.setEndpointConfig(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_CONFIG)); api.setRedirectURL(artifact.getAttribute(APIConstants.API_OVERVIEW_REDIRECT_URL)); api.setApiOwner(artifact.getAttribute(APIConstants.API_OVERVIEW_OWNER)); api.setAdvertiseOnly(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_ADVERTISE_ONLY))); api.setSubscriptionAvailability(artifact.getAttribute(APIConstants.API_OVERVIEW_SUBSCRIPTION_AVAILABILITY)); api.setSubscriptionAvailableTenants(artifact.getAttribute(APIConstants.API_OVERVIEW_SUBSCRIPTION_AVAILABLE_TENANTS)); api.setResponseCache(artifact.getAttribute(APIConstants.API_OVERVIEW_RESPONSE_CACHING)); api.setImplementation(artifact.getAttribute(APIConstants.PROTOTYPE_OVERVIEW_IMPLEMENTATION)); api.setVisibility(artifact.getAttribute(APIConstants.API_OVERVIEW_VISIBILITY)); String tenantDomainName = MultitenantUtils.getTenantDomain(replaceEmailDomainBack(providerName)); int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(tenantDomainName); boolean isGlobalThrottlingEnabled = APIUtil.isAdvanceThrottlingEnabled(); if (isGlobalThrottlingEnabled) { String apiLevelTier = ApiMgtDAO.getInstance().getAPILevelTier(apiId); api.setApiLevelPolicy(apiLevelTier); } String tiers = artifact.getAttribute(APIConstants.API_OVERVIEW_TIER); Map<String, Tier> definedTiers = getTiers(tenantId); Set<Tier> availableTier = getAvailableTiers(definedTiers, tiers, apiName); api.addAvailableTiers(availableTier); api.setContext(artifact.getAttribute(APIConstants.API_OVERVIEW_CONTEXT)); api.setContextTemplate(artifact.getAttribute(APIConstants.API_OVERVIEW_CONTEXT_TEMPLATE)); api.setLatest(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_IS_LATEST))); ArrayList<URITemplate> urlPatternsList; Set<Scope> scopes = ApiMgtDAO.getInstance().getAPIScopes(oldId); api.setScopes(scopes); HashMap<String, String> resourceScopes; resourceScopes = ApiMgtDAO.getInstance().getResourceToScopeMapping(oldId); urlPatternsList = ApiMgtDAO.getInstance().getAllURITemplates(oldContext, oldId.getVersion()); Set<URITemplate> uriTemplates = new HashSet<URITemplate>(urlPatternsList); for (URITemplate uriTemplate : uriTemplates) { uriTemplate.setResourceURI(api.getUrl()); uriTemplate.setResourceSandboxURI(api.getSandboxUrl()); String resourceScopeKey = APIUtil.getResourceKey(oldContext, oldId.getVersion(), uriTemplate.getUriTemplate(), uriTemplate.getHTTPVerb()); uriTemplate.setScope(findScopeByKey(scopes, resourceScopes.get(resourceScopeKey))); } api.setUriTemplates(uriTemplates); Set<String> tags = new HashSet<String>(); Tag[] tag = registry.getTags(artifactPath); for (Tag tag1 : tag) { tags.add(tag1.getTagName()); } api.addTags(tags); api.setLastUpdated(registry.get(artifactPath).getLastModified()); api.setAsDefaultVersion(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_IS_DEFAULT_VERSION))); String environments = artifact.getAttribute(APIConstants.API_OVERVIEW_ENVIRONMENTS); api.setEnvironments(extractEnvironmentsForAPI(environments)); api.setCorsConfiguration(getCorsConfigurationFromArtifact(artifact)); } catch (GovernanceException e) { String msg = "Failed to get API fro artifact "; throw new APIManagementException(msg, e); } catch (RegistryException e) { String msg = "Failed to get LastAccess time or Rating"; throw new APIManagementException(msg, e); } catch (UserStoreException e) { String msg = "Failed to get User Realm of API Provider"; throw new APIManagementException(msg, e); } return api; } public static boolean checkAccessTokenPartitioningEnabled() { return OAuthServerConfiguration.getInstance().isAccessTokenPartitioningEnabled(); } public static boolean checkUserNameAssertionEnabled() { return OAuthServerConfiguration.getInstance().isUserNameAssertionEnabled(); } public static String[] getAvailableKeyStoreTables() throws APIManagementException { String[] keyStoreTables = new String[0]; Map<String, String> domainMappings = getAvailableUserStoreDomainMappings(); if (domainMappings != null) { keyStoreTables = new String[domainMappings.size()]; int i = 0; for (Entry<String, String> e : domainMappings.entrySet()) { String value = e.getValue(); keyStoreTables[i] = APIConstants.ACCESS_TOKEN_STORE_TABLE + "_" + value.trim(); i++; } } return keyStoreTables; } public static Map<String, String> getAvailableUserStoreDomainMappings() throws APIManagementException { Map<String, String> userStoreDomainMap = new HashMap<String, String>(); String domainsStr = OAuthServerConfiguration.getInstance().getAccessTokenPartitioningDomains(); if (domainsStr != null) { String[] userStoreDomainsArr = domainsStr.split(","); for (String anUserStoreDomainsArr : userStoreDomainsArr) { String[] mapping = anUserStoreDomainsArr.trim().split(":"); //A:foo.com , B:bar.com if (mapping.length < 2) { throw new APIManagementException("Domain mapping has not defined"); } userStoreDomainMap.put(mapping[1].trim(), mapping[0].trim()); //key=domain & value=mapping } } return userStoreDomainMap; } public static String getAccessTokenStoreTableFromUserId(String userId) throws APIManagementException { String accessTokenStoreTable = APIConstants.ACCESS_TOKEN_STORE_TABLE; String userStore; if (userId != null) { String[] strArr = userId.split("/"); if (strArr.length > 1) { userStore = strArr[0]; Map<String, String> availableDomainMappings = getAvailableUserStoreDomainMappings(); if (availableDomainMappings != null && availableDomainMappings.containsKey(userStore)) { accessTokenStoreTable = accessTokenStoreTable + "_" + availableDomainMappings.get(userStore); } } } return accessTokenStoreTable; } public static String getAccessTokenStoreTableFromAccessToken(String apiKey) throws APIManagementException { String userId = getUserIdFromAccessToken(apiKey); //i.e: 'foo.com/admin' or 'admin' return getAccessTokenStoreTableFromUserId(userId); } public static String getUserIdFromAccessToken(String apiKey) { String userId = null; String decodedKey = new String(Base64.decodeBase64(apiKey.getBytes(Charset.defaultCharset())), Charset.defaultCharset()); String[] tmpArr = decodedKey.split(":"); if (tmpArr.length == 2) { //tmpArr[0]= userStoreDomain & tmpArr[1] = userId userId = tmpArr[1]; } return userId; } /** * validates if an accessToken has expired or not * * @param accessTokenDO * @return true if token has expired else false */ public static boolean isAccessTokenExpired(APIKeyValidationInfoDTO accessTokenDO) { long validityPeriod = accessTokenDO.getValidityPeriod(); long issuedTime = accessTokenDO.getIssuedTime(); long timestampSkew = OAuthServerConfiguration.getInstance().getTimeStampSkewInSeconds() * 1000; long currentTime = System.currentTimeMillis(); //If the validity period is not an never expiring value if (validityPeriod != Long.MAX_VALUE && // For cases where validityPeriod is closer to Long.MAX_VALUE (then issuedTime + validityPeriod would spill // over and would produce a negative value) (currentTime - timestampSkew) > validityPeriod) { //check the validity of cached OAuth2AccessToken Response if ((currentTime - timestampSkew) > (issuedTime + validityPeriod)) { accessTokenDO.setValidationStatus(APIConstants.KeyValidationStatus.API_AUTH_INVALID_CREDENTIALS); return true; } } return false; } /** * When an input is having '@',replace it with '-AT-' [This is required to persist API data in registry,as registry * paths don't allow '@' sign.] * * @param input inputString * @return String modifiedString */ public static String replaceEmailDomain(String input) { if (input != null && input.contains(APIConstants.EMAIL_DOMAIN_SEPARATOR)) { input = input.replace(APIConstants.EMAIL_DOMAIN_SEPARATOR, APIConstants.EMAIL_DOMAIN_SEPARATOR_REPLACEMENT); } return input; } /** * When an input is having '-AT-',replace it with @ [This is required to persist API data between registry and database] * * @param input inputString * @return String modifiedString */ public static String replaceEmailDomainBack(String input) { if (input != null && input.contains(APIConstants.EMAIL_DOMAIN_SEPARATOR_REPLACEMENT)) { input = input.replace(APIConstants.EMAIL_DOMAIN_SEPARATOR_REPLACEMENT, APIConstants.EMAIL_DOMAIN_SEPARATOR); } return input; } public static void copyResourcePermissions(String username, String sourceArtifactPath, String targetArtifactPath) throws APIManagementException { String sourceResourcePath = RegistryUtils.getAbsolutePath(RegistryContext.getBaseInstance(), APIUtil.getMountedPath(RegistryContext.getBaseInstance(), RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH) + sourceArtifactPath); String targetResourcePath = RegistryUtils.getAbsolutePath(RegistryContext.getBaseInstance(), APIUtil.getMountedPath(RegistryContext.getBaseInstance(), RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH) + targetArtifactPath); String tenantDomain = MultitenantUtils.getTenantDomain(APIUtil.replaceEmailDomainBack(username)); try { int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager().getTenantId(tenantDomain); org.wso2.carbon.user.api.AuthorizationManager authManager = ServiceReferenceHolder.getInstance().getRealmService(). getTenantUserRealm(tenantId).getAuthorizationManager(); String[] allowedRoles = authManager.getAllowedRolesForResource(sourceResourcePath, ActionConstants.GET); if (allowedRoles != null) { for (String allowedRole : allowedRoles) { authManager.authorizeRole(allowedRole, targetResourcePath, ActionConstants.GET); } } } catch (UserStoreException e) { throw new APIManagementException("Error while adding role permissions to API", e); } } /** * This function is to set resource permissions based on its visibility * * @param visibility API visibility * @param roles Authorized roles * @param artifactPath API resource path * @throws APIManagementException Throwing exception */ public static void setResourcePermissions(String username, String visibility, String[] roles, String artifactPath) throws APIManagementException { try { String resourcePath = RegistryUtils.getAbsolutePath(RegistryContext.getBaseInstance(), APIUtil.getMountedPath(RegistryContext.getBaseInstance(), RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH) + artifactPath); String tenantDomain = MultitenantUtils.getTenantDomain(APIUtil.replaceEmailDomainBack(username)); if (!org.wso2.carbon.utils.multitenancy.MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { int tenantId = ServiceReferenceHolder.getInstance().getRealmService(). getTenantManager().getTenantId(tenantDomain); // calculate resource path RegistryAuthorizationManager authorizationManager = new RegistryAuthorizationManager (ServiceReferenceHolder.getUserRealm()); resourcePath = authorizationManager.computePathOnMount(resourcePath); org.wso2.carbon.user.api.AuthorizationManager authManager = ServiceReferenceHolder.getInstance().getRealmService(). getTenantUserRealm(tenantId).getAuthorizationManager(); if (visibility != null && APIConstants.API_RESTRICTED_VISIBILITY.equalsIgnoreCase(visibility)) { boolean isRoleEveryOne = false; /*If no roles have defined, authorize for everyone role */ if (roles != null) { if (roles.length == 1 && "".equals(roles[0])) { authManager.authorizeRole(APIConstants.EVERYONE_ROLE, resourcePath, ActionConstants.GET); isRoleEveryOne = true; } else { for (String role : roles) { if (APIConstants.EVERYONE_ROLE.equalsIgnoreCase(role)) { isRoleEveryOne = true; } authManager.authorizeRole(role, resourcePath, ActionConstants.GET); } } } if (!isRoleEveryOne) { authManager.denyRole(APIConstants.EVERYONE_ROLE, resourcePath, ActionConstants.GET); } authManager.denyRole(APIConstants.ANONYMOUS_ROLE, resourcePath, ActionConstants.GET); } else if (visibility != null && APIConstants.API_PRIVATE_VISIBILITY.equalsIgnoreCase(visibility)) { authManager.authorizeRole(APIConstants.EVERYONE_ROLE, resourcePath, ActionConstants.GET); authManager.denyRole(APIConstants.ANONYMOUS_ROLE, resourcePath, ActionConstants.GET); } else if (visibility != null && APIConstants.DOC_OWNER_VISIBILITY.equalsIgnoreCase(visibility)) { /*If no roles have defined, deny access for everyone & anonymous role */ if (roles == null) { authManager.denyRole(APIConstants.EVERYONE_ROLE, resourcePath, ActionConstants.GET); authManager.denyRole(APIConstants.ANONYMOUS_ROLE, resourcePath, ActionConstants.GET); } else { for (String role : roles) { authManager.denyRole(role, resourcePath, ActionConstants.GET); } } } else { authManager.authorizeRole(APIConstants.EVERYONE_ROLE, resourcePath, ActionConstants.GET); authManager.authorizeRole(APIConstants.ANONYMOUS_ROLE, resourcePath, ActionConstants.GET); } } else { RegistryAuthorizationManager authorizationManager = new RegistryAuthorizationManager (ServiceReferenceHolder.getUserRealm()); if (visibility != null && APIConstants.API_RESTRICTED_VISIBILITY.equalsIgnoreCase(visibility)) { boolean isRoleEveryOne = false; if (roles != null) { for (String role : roles) { if (APIConstants.EVERYONE_ROLE.equalsIgnoreCase(role)) { isRoleEveryOne = true; } authorizationManager.authorizeRole(role, resourcePath, ActionConstants.GET); } } if (!isRoleEveryOne) { authorizationManager.denyRole(APIConstants.EVERYONE_ROLE, resourcePath, ActionConstants.GET); } authorizationManager.denyRole(APIConstants.ANONYMOUS_ROLE, resourcePath, ActionConstants.GET); } else if (visibility != null && APIConstants.API_PRIVATE_VISIBILITY.equalsIgnoreCase(visibility)) { authorizationManager.authorizeRole(APIConstants.EVERYONE_ROLE, resourcePath, ActionConstants.GET); authorizationManager.denyRole(APIConstants.ANONYMOUS_ROLE, resourcePath, ActionConstants.GET); } else if (visibility != null && APIConstants.DOC_OWNER_VISIBILITY.equalsIgnoreCase(visibility)) { /*If no roles have defined, deny access for everyone & anonymous role */ if (roles == null) { authorizationManager.denyRole(APIConstants.EVERYONE_ROLE, resourcePath, ActionConstants.GET); authorizationManager.denyRole(APIConstants.ANONYMOUS_ROLE, resourcePath, ActionConstants.GET); } else { for (String role : roles) { authorizationManager.denyRole(role, resourcePath, ActionConstants.GET); } } } else { authorizationManager.authorizeRole(APIConstants.EVERYONE_ROLE, resourcePath, ActionConstants.GET); authorizationManager.authorizeRole(APIConstants.ANONYMOUS_ROLE, resourcePath, ActionConstants.GET); } } } catch (UserStoreException e) { throw new APIManagementException("Error while adding role permissions to API", e); } } public static void loadTenantAPIPolicy(String tenant, int tenantID) throws APIManagementException { String tierBasePath = CarbonUtils.getCarbonHome() + File.separator + "repository" + File.separator + "resources" + File.separator + "default-tiers" + File.separator; String apiTierFilePath = tierBasePath + APIConstants.DEFAULT_API_TIER_FILE_NAME; String appTierFilePath = tierBasePath + APIConstants.DEFAULT_APP_TIER_FILE_NAME; String resTierFilePath = tierBasePath + APIConstants.DEFAULT_RES_TIER_FILE_NAME; loadTenantAPIPolicy(tenantID, APIConstants.API_TIER_LOCATION, apiTierFilePath); loadTenantAPIPolicy(tenantID, APIConstants.APP_TIER_LOCATION, appTierFilePath); loadTenantAPIPolicy(tenantID, APIConstants.RES_TIER_LOCATION, resTierFilePath); } /** * Load the throttling policy to the registry for tenants * * @param tenantID * @param location * @param fileName * @throws APIManagementException */ private static void loadTenantAPIPolicy(int tenantID, String location, String fileName) throws APIManagementException { InputStream inputStream = null; try { RegistryService registryService = ServiceReferenceHolder.getInstance().getRegistryService(); UserRegistry govRegistry = registryService.getGovernanceSystemRegistry(tenantID); if (govRegistry.resourceExists(location)) { if (log.isDebugEnabled()) { log.debug("Tier policies already uploaded to the tenant's registry space"); } return; } if (log.isDebugEnabled()) { log.debug("Adding API tier policies to the tenant's registry"); } File defaultTiers = new File(fileName); if (!defaultTiers.exists()) { log.info("Default tier policies not found in : " + fileName); return; } inputStream = FileUtils.openInputStream(defaultTiers); byte[] data = IOUtils.toByteArray(inputStream); Resource resource = govRegistry.newResource(); resource.setContent(data); govRegistry.put(location, resource); } catch (RegistryException e) { throw new APIManagementException("Error while saving policy information to the registry", e); } catch (IOException e) { throw new APIManagementException("Error while reading policy file content", e); } finally { if (inputStream != null) { try { inputStream.close(); } catch (IOException e) { log.error("Error when closing input stream", e); } } } } /** * Load the External API Store Configuration to the registry * * @param tenantID * @throws org.wso2.carbon.apimgt.api.APIManagementException */ public static void loadTenantExternalStoreConfig(int tenantID) throws APIManagementException { try { RegistryService registryService = ServiceReferenceHolder.getInstance().getRegistryService(); UserRegistry govRegistry = registryService.getGovernanceSystemRegistry(tenantID); if (govRegistry.resourceExists(APIConstants.EXTERNAL_API_STORES_LOCATION)) { log.debug("External Stores configuration already uploaded to the registry"); return; } if (log.isDebugEnabled()) { log.debug("Adding External Stores configuration to the tenant's registry"); } InputStream inputStream = APIManagerComponent.class.getResourceAsStream("/externalstores/default-external-api-stores.xml"); byte[] data = IOUtils.toByteArray(inputStream); Resource resource = govRegistry.newResource(); resource.setContent(data); govRegistry.put(APIConstants.EXTERNAL_API_STORES_LOCATION, resource); /*set resource permission*/ org.wso2.carbon.user.api.AuthorizationManager authManager = ServiceReferenceHolder.getInstance().getRealmService().getTenantUserRealm(tenantID). getAuthorizationManager(); String resourcePath = RegistryUtils.getAbsolutePath(RegistryContext.getBaseInstance(), APIUtil.getMountedPath(RegistryContext.getBaseInstance(), RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH) + APIConstants.EXTERNAL_API_STORES_LOCATION); authManager.denyRole(APIConstants.EVERYONE_ROLE, resourcePath, ActionConstants.GET); } catch (RegistryException e) { throw new APIManagementException("Error while saving External Stores configuration information to the " + "registry", e); } catch (IOException e) { throw new APIManagementException("Error while reading External Stores configuration file content", e); } catch (UserStoreException e) { throw new APIManagementException("Error while setting permission to External Stores configuration file", e); } } /** * Load the Google Analytics Configuration to the registry * * @param tenantID * @throws APIManagementException */ public static void loadTenantGAConfig(int tenantID) throws APIManagementException { InputStream inputStream = null; try { RegistryService registryService = ServiceReferenceHolder.getInstance() .getRegistryService(); UserRegistry govRegistry = registryService.getGovernanceSystemRegistry(tenantID); if (govRegistry.resourceExists(APIConstants.GA_CONFIGURATION_LOCATION)) { log.debug("Google Analytics configuration already uploaded to the registry"); return; } if (log.isDebugEnabled()) { log.debug("Adding Google Analytics configuration to the tenant's registry"); } inputStream = APIManagerComponent.class.getResourceAsStream("/statistics/default-ga-config.xml"); byte[] data = IOUtils.toByteArray(inputStream); Resource resource = govRegistry.newResource(); resource.setContent(data); govRegistry.put(APIConstants.GA_CONFIGURATION_LOCATION, resource); /*set resource permission*/ org.wso2.carbon.user.api.AuthorizationManager authManager = ServiceReferenceHolder.getInstance().getRealmService(). getTenantUserRealm(tenantID).getAuthorizationManager(); String resourcePath = RegistryUtils.getAbsolutePath(RegistryContext.getBaseInstance(), APIUtil.getMountedPath(RegistryContext.getBaseInstance(), RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH) + APIConstants.GA_CONFIGURATION_LOCATION); authManager.denyRole(APIConstants.EVERYONE_ROLE, resourcePath, ActionConstants.GET); } catch (RegistryException e) { throw new APIManagementException("Error while saving Google Analytics configuration information to the registry", e); } catch (IOException e) { throw new APIManagementException("Error while reading Google Analytics configuration file content", e); } catch (UserStoreException e) { throw new APIManagementException("Error while setting permission to Google Analytics configuration file", e); } finally { try { if (inputStream != null) { inputStream.close(); } } catch (IOException e) { if (log.isWarnEnabled()) { log.warn("Error while closing the input stream", e); } } } } public static void loadTenantWorkFlowExtensions(int tenantID) throws APIManagementException { // TODO: Merge different resource loading methods and create a single method. try { RegistryService registryService = ServiceReferenceHolder.getInstance().getRegistryService(); UserRegistry govRegistry = registryService.getGovernanceSystemRegistry(tenantID); if (govRegistry.resourceExists(APIConstants.WORKFLOW_EXECUTOR_LOCATION)) { log.debug("External Stores configuration already uploaded to the registry"); return; } if (log.isDebugEnabled()) { log.debug("Adding External Stores configuration to the tenant's registry"); } InputStream inputStream = APIManagerComponent.class.getResourceAsStream("/workflowextensions/default-workflow-extensions.xml"); byte[] data = IOUtils.toByteArray(inputStream); Resource resource = govRegistry.newResource(); resource.setContent(data); resource.setMediaType(APIConstants.WORKFLOW_MEDIA_TYPE); govRegistry.put(APIConstants.WORKFLOW_EXECUTOR_LOCATION, resource); } catch (RegistryException e) { throw new APIManagementException("Error while saving External Stores configuration information to the registry", e); } catch (IOException e) { throw new APIManagementException("Error while reading External Stores configuration file content", e); } } /** * @param tenantId * @throws APIManagementException */ public static void loadTenantSelfSignUpConfigurations(int tenantId) throws APIManagementException { try { RegistryService registryService = ServiceReferenceHolder.getInstance().getRegistryService(); UserRegistry govRegistry = registryService.getGovernanceSystemRegistry(tenantId); if (govRegistry.resourceExists(APIConstants.SELF_SIGN_UP_CONFIG_LOCATION)) { log.debug("Self signup configuration already uploaded to the registry"); return; } if (log.isDebugEnabled()) { log.debug("Adding Self signup configuration to the tenant's registry"); } InputStream inputStream; if (tenantId == org.wso2.carbon.utils.multitenancy.MultitenantConstants.SUPER_TENANT_ID) { inputStream = APIManagerComponent.class.getResourceAsStream("/signupconfigurations/default-sign-up-config.xml"); } else { inputStream = APIManagerComponent.class.getResourceAsStream("/signupconfigurations/tenant-sign-up-config.xml"); } byte[] data = IOUtils.toByteArray(inputStream); Resource resource = govRegistry.newResource(); resource.setContent(data); resource.setMediaType(APIConstants.SELF_SIGN_UP_CONFIG_MEDIA_TYPE); govRegistry.put(APIConstants.SELF_SIGN_UP_CONFIG_LOCATION, resource); } catch (RegistryException e) { throw new APIManagementException("Error while saving Self signup configuration information to the registry", e); } catch (IOException e) { throw new APIManagementException("Error while reading Self signup configuration file content", e); } } public static void loadTenantConf(int tenantID) throws APIManagementException { RegistryService registryService = ServiceReferenceHolder.getInstance().getRegistryService(); try { UserRegistry registry = registryService.getConfigSystemRegistry(tenantID); if (registry.resourceExists(APIConstants.API_TENANT_CONF_LOCATION)) { log.debug("Tenant conf already uploaded to the registry"); return; } String tenantConfLocation = CarbonUtils.getCarbonHome() + File.separator + APIConstants.RESOURCE_FOLDER_LOCATION + File.separator + APIConstants.API_TENANT_CONF; File tenantConfFile = new File(tenantConfLocation); byte[] data; if (tenantConfFile.exists()) { // Load conf from resources directory in pack if it exists FileInputStream fileInputStream = new FileInputStream(tenantConfFile); data = IOUtils.toByteArray(fileInputStream); } else { // Fallback to loading the conf that is stored at jar level if file does not exist in pack InputStream inputStream = APIManagerComponent.class.getResourceAsStream("/tenant/" + APIConstants.API_TENANT_CONF); data = IOUtils.toByteArray(inputStream); } log.debug("Adding tenant config to the registry"); Resource resource = registry.newResource(); resource.setMediaType(APIConstants.APPLICATION_JSON_MEDIA_TYPE); resource.setContent(data); registry.put(APIConstants.API_TENANT_CONF_LOCATION, resource); } catch (RegistryException e) { throw new APIManagementException("Error while saving tenant conf to the registry", e); } catch (IOException e) { throw new APIManagementException("Error while reading tenant conf file content", e); } } /** * @param tenantId * @throws APIManagementException */ public static void createSelfSignUpRoles(int tenantId) throws APIManagementException { try { RegistryService registryService = ServiceReferenceHolder.getInstance().getRegistryService(); UserRegistry govRegistry = registryService.getGovernanceSystemRegistry(tenantId); if (govRegistry.resourceExists(APIConstants.SELF_SIGN_UP_CONFIG_LOCATION)) { Resource resource = govRegistry.get(APIConstants.SELF_SIGN_UP_CONFIG_LOCATION); InputStream content = resource.getContentStream(); DocumentBuilderFactory factory = getSecuredDocumentBuilder(); factory.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true); DocumentBuilder parser = factory.newDocumentBuilder(); Document dc = parser.parse(content); boolean enableSubscriberRoleCreation = isSubscriberRoleCreationEnabled(tenantId); String signUpDomain = dc.getElementsByTagName(APIConstants.SELF_SIGN_UP_REG_DOMAIN_ELEM).item(0) .getFirstChild().getNodeValue(); if (enableSubscriberRoleCreation) { int roleLength = dc.getElementsByTagName(APIConstants.SELF_SIGN_UP_REG_ROLE_NAME_ELEMENT) .getLength(); for (int i = 0; i < roleLength; i++) { String roleName = dc.getElementsByTagName(APIConstants.SELF_SIGN_UP_REG_ROLE_NAME_ELEMENT) .item(i).getFirstChild().getNodeValue(); boolean isExternalRole = Boolean.parseBoolean(dc .getElementsByTagName(APIConstants.SELF_SIGN_UP_REG_ROLE_IS_EXTERNAL).item(i) .getFirstChild().getNodeValue()); if (roleName != null) { // If isExternalRole==false ;create the subscriber role as an internal role if (isExternalRole && signUpDomain != null) { roleName = signUpDomain.toUpperCase() + CarbonConstants.DOMAIN_SEPARATOR + roleName; } else { roleName = UserCoreConstants.INTERNAL_DOMAIN + CarbonConstants.DOMAIN_SEPARATOR + roleName; } createSubscriberRole(roleName, tenantId); } } } } if (log.isDebugEnabled()) { log.debug("Adding Self signup configuration to the tenant's registry"); } } catch (RegistryException e) { throw new APIManagementException("Error while getting Self signup role information from the registry", e); } catch (ParserConfigurationException e) { throw new APIManagementException("Error while getting Self signup role information from the registry", e); } catch (SAXException e) { throw new APIManagementException("Error while getting Self signup role information from the registry", e); } catch (IOException e) { throw new APIManagementException("Error while getting Self signup role information from the registry", e); } } /** * Returns whether subscriber role creation enabled for the given tenant in tenant-conf.json * * @param tenantId id of the tenant * @return true if subscriber role creation enabled in tenant-conf.json */ public static boolean isSubscriberRoleCreationEnabled(int tenantId) throws APIManagementException { String tenantDomain = getTenantDomainFromTenantId(tenantId); JSONObject defaultRoles = getTenantDefaultRoles(tenantDomain); boolean isSubscriberRoleCreationEnabled = false; if (defaultRoles != null) { JSONObject subscriberRoleConfig = (JSONObject) defaultRoles .get(APIConstants.API_TENANT_CONF_DEFAULT_ROLES_SUBSCRIBER_ROLE); isSubscriberRoleCreationEnabled = isRoleCreationEnabled(subscriberRoleConfig); } return isSubscriberRoleCreationEnabled; } /** * Create default roles specified in APIM per-tenant configuration file * * @param tenantId id of the tenant * @throws APIManagementException */ public static void createDefaultRoles(int tenantId) throws APIManagementException { String tenantDomain = getTenantDomainFromTenantId(tenantId); JSONObject defaultRoles = getTenantDefaultRoles(tenantDomain); if (defaultRoles != null) { // create publisher role if it's creation is enabled in tenant-conf.json JSONObject publisherRoleConfig = (JSONObject) defaultRoles .get(APIConstants.API_TENANT_CONF_DEFAULT_ROLES_PUBLISHER_ROLE); if (isRoleCreationEnabled(publisherRoleConfig)) { String publisherRoleName = String.valueOf(publisherRoleConfig .get(APIConstants.API_TENANT_CONF_DEFAULT_ROLES_ROLENAME)); if (!StringUtils.isBlank(publisherRoleName)) { createPublisherRole(publisherRoleName, tenantId); } } // create creator role if it's creation is enabled in tenant-conf.json JSONObject creatorRoleConfig = (JSONObject) defaultRoles .get(APIConstants.API_TENANT_CONF_DEFAULT_ROLES_CREATOR_ROLE); if (isRoleCreationEnabled(creatorRoleConfig)) { String creatorRoleName = String.valueOf(creatorRoleConfig .get(APIConstants.API_TENANT_CONF_DEFAULT_ROLES_ROLENAME)); if (!StringUtils.isBlank(creatorRoleName)) { createCreatorRole(creatorRoleName, tenantId); } } createSelfSignUpRoles(tenantId); } } /** * Returns whether role creation enabled for the provided role config * * @param roleConfig role config in tenat-conf.json * @return true if role creation enabled for the provided role config */ private static boolean isRoleCreationEnabled (JSONObject roleConfig) { boolean roleCreationEnabled = false; if (roleConfig != null && roleConfig.get( APIConstants.API_TENANT_CONF_DEFAULT_ROLES_CREATE_ON_TENANT_LOAD) != null && (Boolean) (roleConfig.get( APIConstants.API_TENANT_CONF_DEFAULT_ROLES_CREATE_ON_TENANT_LOAD))) { roleCreationEnabled = true; } return roleCreationEnabled; } public static boolean isAnalyticsEnabled() { return APIManagerAnalyticsConfiguration.getInstance().isAnalyticsEnabled(); } /** * Add all the custom sequences of given type to registry * * @param registry Registry instance * @param customSequenceType Custom sequence type which is in/out or fault * @throws APIManagementException */ public static void addDefinedAllSequencesToRegistry(UserRegistry registry, String customSequenceType) throws APIManagementException { InputStream inSeqStream = null; String seqFolderLocation = APIConstants.API_CUSTOM_SEQUENCES_FOLDER_LOCATION + File.separator + customSequenceType; try { File inSequenceDir = new File(seqFolderLocation); File[] sequences; sequences = inSequenceDir.listFiles(); if (sequences != null) { //Tracks whether new sequences are there to deploy boolean availableNewSequences = false; //Tracks whether json_fault.xml is in the registry boolean jsonFaultSeqInRegistry = false; for (File sequenceFile : sequences) { String sequenceFileName = sequenceFile.getName(); String regResourcePath = APIConstants.API_CUSTOM_SEQUENCE_LOCATION + '/' + customSequenceType + '/' + sequenceFileName; if (registry.resourceExists(regResourcePath)) { if (APIConstants.API_CUSTOM_SEQ_JSON_FAULT.equals(sequenceFileName)) { jsonFaultSeqInRegistry = true; } if (log.isDebugEnabled()) { log.debug("The sequence file with the name " + sequenceFileName + " already exists in the registry path " + regResourcePath); } } else { availableNewSequences = true; if (log.isDebugEnabled()) { log.debug( "Adding sequence file with the name " + sequenceFileName + " to the registry path " + regResourcePath); } inSeqStream = new FileInputStream(sequenceFile); byte[] inSeqData = IOUtils.toByteArray(inSeqStream); Resource inSeqResource = registry.newResource(); inSeqResource.setContent(inSeqData); registry.put(regResourcePath, inSeqResource); } } //On the fly migration of json_fault.xml for 2.0.0 to 2.1.0 if (APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT.equals(customSequenceType) && availableNewSequences && jsonFaultSeqInRegistry) { String oldFaultStatHandler = "org.wso2.carbon.apimgt.usage.publisher.APIMgtFaultHandler"; String newFaultStatHandler = "org.wso2.carbon.apimgt.gateway.handlers.analytics.APIMgtFaultHandler"; String regResourcePath = APIConstants.API_CUSTOM_SEQUENCE_LOCATION + '/' + customSequenceType + '/' + APIConstants.API_CUSTOM_SEQ_JSON_FAULT; Resource jsonFaultSeqResource = registry.get(regResourcePath); String oldJsonFaultSeqContent = new String((byte[]) jsonFaultSeqResource.getContent(), Charset.defaultCharset()); if (oldJsonFaultSeqContent != null && oldJsonFaultSeqContent.contains(oldFaultStatHandler)) { String newJsonFaultContent = oldJsonFaultSeqContent.replace(oldFaultStatHandler, newFaultStatHandler); jsonFaultSeqResource.setContent(newJsonFaultContent); registry.put(regResourcePath, jsonFaultSeqResource); } } } else { log.error( "Custom sequence template location unavailable for custom sequence type " + customSequenceType + " : " + seqFolderLocation ); } } catch (RegistryException e) { throw new APIManagementException( "Error while saving defined sequences to the registry ", e); } catch (IOException e) { throw new APIManagementException("Error while reading defined sequence ", e); } finally { IOUtils.closeQuietly(inSeqStream); } } /** * Adds the sequences defined in repository/resources/customsequences folder to tenant registry * * @param tenantID tenant Id * @throws APIManagementException */ public static void writeDefinedSequencesToTenantRegistry(int tenantID) throws APIManagementException { try { RegistryService registryService = ServiceReferenceHolder.getInstance().getRegistryService(); UserRegistry govRegistry = registryService.getGovernanceSystemRegistry(tenantID); //Add all custom in,out and fault sequences to tenant registry APIUtil.addDefinedAllSequencesToRegistry(govRegistry, APIConstants.API_CUSTOM_SEQUENCE_TYPE_IN); APIUtil.addDefinedAllSequencesToRegistry(govRegistry, APIConstants.API_CUSTOM_SEQUENCE_TYPE_OUT); APIUtil.addDefinedAllSequencesToRegistry(govRegistry, APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT); } catch (RegistryException e) { throw new APIManagementException( "Error while saving defined sequences to the registry of tenant with id " + tenantID, e); } } /** * Load the API RXT to the registry for tenants * * @param tenant * @param tenantID * @throws APIManagementException */ public static void loadloadTenantAPIRXT(String tenant, int tenantID) throws APIManagementException { RegistryService registryService = ServiceReferenceHolder.getInstance().getRegistryService(); UserRegistry registry = null; try { registry = registryService.getGovernanceSystemRegistry(tenantID); } catch (RegistryException e) { throw new APIManagementException("Error when create registry instance ", e); } String rxtDir = CarbonUtils.getCarbonHome() + File.separator + "repository" + File.separator + "resources" + File.separator + "rxts"; File file = new File(rxtDir); FilenameFilter filenameFilter = new FilenameFilter() { @Override public boolean accept(File dir, String name) { // if the file extension is .rxt return true, else false return name.endsWith(".rxt"); } }; String[] rxtFilePaths = file.list(filenameFilter); if (rxtFilePaths == null) { throw new APIManagementException("rxt files not found in directory " + rxtDir); } for (String rxtPath : rxtFilePaths) { String resourcePath = GovernanceConstants.RXT_CONFIGS_PATH + RegistryConstants.PATH_SEPARATOR + rxtPath; //This is "registry" is a governance registry instance, therefore calculate the relative path to governance. String govRelativePath = RegistryUtils.getRelativePathToOriginal(resourcePath, APIUtil.getMountedPath(RegistryContext.getBaseInstance(), RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH)); try { // calculate resource path RegistryAuthorizationManager authorizationManager = new RegistryAuthorizationManager (ServiceReferenceHolder.getUserRealm()); resourcePath = authorizationManager.computePathOnMount(resourcePath); org.wso2.carbon.user.api.AuthorizationManager authManager = ServiceReferenceHolder.getInstance().getRealmService(). getTenantUserRealm(tenantID).getAuthorizationManager(); if (registry.resourceExists(govRelativePath)) { // set anonymous user permission to RXTs authManager.authorizeRole(APIConstants.ANONYMOUS_ROLE, resourcePath, ActionConstants.GET); continue; } String rxt = FileUtil.readFileToString(rxtDir + File.separator + rxtPath); Resource resource = registry.newResource(); resource.setContent(rxt.getBytes(Charset.defaultCharset())); resource.setMediaType(APIConstants.RXT_MEDIA_TYPE); registry.put(govRelativePath, resource); authManager.authorizeRole(APIConstants.ANONYMOUS_ROLE, resourcePath, ActionConstants.GET); } catch (UserStoreException e) { throw new APIManagementException("Error while adding role permissions to API", e); } catch (IOException e) { String msg = "Failed to read rxt files"; throw new APIManagementException(msg, e); } catch (RegistryException e) { String msg = "Failed to add rxt to registry "; throw new APIManagementException(msg, e); } } } /** * Converting the user store domain name to uppercase. * * @param username Username to be modified * @return Username with domain name set to uppercase. */ public static String setDomainNameToUppercase(String username) { String modifiedName = username; if (username != null) { String[] nameParts = username.split(CarbonConstants.DOMAIN_SEPARATOR); if (nameParts.length > 1) { modifiedName = nameParts[0].toUpperCase() + CarbonConstants.DOMAIN_SEPARATOR + nameParts[1]; } } return modifiedName; } /** * Create APIM Subscriber role with the given name in specified tenant * * @param roleName role name * @param tenantId id of the tenant * @throws APIManagementException */ public static void createSubscriberRole(String roleName, int tenantId) throws APIManagementException { Permission[] subscriberPermissions = new Permission[] { new Permission(APIConstants.Permissions.LOGIN, UserMgtConstants.EXECUTE_ACTION), new Permission(APIConstants.Permissions.API_SUBSCRIBE, UserMgtConstants.EXECUTE_ACTION) }; createRole (roleName, subscriberPermissions, tenantId); } /** * Create APIM Publisher roles with the given name in specified tenant * * @param roleName role name * @param tenantId id of the tenant * @throws APIManagementException */ public static void createPublisherRole(String roleName, int tenantId) throws APIManagementException { Permission[] publisherPermissions = new Permission[] { new Permission(APIConstants.Permissions.LOGIN, UserMgtConstants.EXECUTE_ACTION), new Permission(APIConstants.Permissions.API_PUBLISH, UserMgtConstants.EXECUTE_ACTION) }; createRole (roleName, publisherPermissions, tenantId); } /** * Create APIM Creator roles with the given name in specified tenant * * @param roleName role name * @param tenantId id of the tenant * @throws APIManagementException */ public static void createCreatorRole(String roleName, int tenantId) throws APIManagementException { Permission[] creatorPermissions = new Permission[] { new Permission(APIConstants.Permissions.LOGIN, UserMgtConstants.EXECUTE_ACTION), new Permission(APIConstants.Permissions.API_CREATE, UserMgtConstants.EXECUTE_ACTION), new Permission(APIConstants.Permissions.CONFIGURE_GOVERNANCE, UserMgtConstants.EXECUTE_ACTION), new Permission(APIConstants.Permissions.RESOURCE_GOVERN, UserMgtConstants.EXECUTE_ACTION)}; createRole (roleName, creatorPermissions, tenantId); } /** * Creates a role with a given set of permissions for the specified tenant * * @param roleName role name * @param permissions a set of permissions to be associated with the role * @param tenantId id of the tenant * @throws APIManagementException */ public static void createRole(String roleName, Permission[] permissions, int tenantId) throws APIManagementException { try { RealmService realmService = ServiceReferenceHolder.getInstance().getRealmService(); UserRealm realm; org.wso2.carbon.user.api.UserRealm tenantRealm; UserStoreManager manager; if (tenantId < 0) { realm = realmService.getBootstrapRealm(); manager = realm.getUserStoreManager(); } else { tenantRealm = realmService.getTenantUserRealm(tenantId); manager = tenantRealm.getUserStoreManager(); } if (!manager.isExistingRole(roleName)) { if (log.isDebugEnabled()) { log.debug("Creating role: " + roleName); } String tenantAdminName = ServiceReferenceHolder.getInstance().getRealmService() .getTenantUserRealm(tenantId).getRealmConfiguration().getAdminUserName(); String[] userList = new String[] { tenantAdminName }; manager.addRole(roleName, userList, permissions); } } catch (UserStoreException e) { throw new APIManagementException("Error while creating role: " + roleName, e); } } public void setupSelfRegistration(APIManagerConfiguration config, int tenantId) throws APIManagementException { boolean enabled = Boolean.parseBoolean(config.getFirstProperty(APIConstants.SELF_SIGN_UP_ENABLED)); if (!enabled) { return; } // Create the subscriber role as an internal role String role = UserCoreConstants.INTERNAL_DOMAIN + CarbonConstants.DOMAIN_SEPARATOR + config.getFirstProperty(APIConstants.SELF_SIGN_UP_ROLE); if ((UserCoreConstants.INTERNAL_DOMAIN + CarbonConstants.DOMAIN_SEPARATOR).equals(role)) { // Required parameter missing - Throw an exception and interrupt startup throw new APIManagementException("Required subscriber role parameter missing " + "in the self sign up configuration"); } try { RealmService realmService = ServiceReferenceHolder.getInstance().getRealmService(); UserRealm realm; org.wso2.carbon.user.api.UserRealm tenantRealm; UserStoreManager manager; if (tenantId < 0) { realm = realmService.getBootstrapRealm(); manager = realm.getUserStoreManager(); } else { tenantRealm = realmService.getTenantUserRealm(tenantId); manager = tenantRealm.getUserStoreManager(); } if (!manager.isExistingRole(role)) { if (log.isDebugEnabled()) { log.debug("Creating subscriber role: " + role); } Permission[] subscriberPermissions = new Permission[]{ new Permission("/permission/admin/login", UserMgtConstants.EXECUTE_ACTION), new Permission(APIConstants.Permissions.API_SUBSCRIBE, UserMgtConstants.EXECUTE_ACTION)}; String tenantAdminName = ServiceReferenceHolder.getInstance().getRealmService() .getTenantUserRealm(tenantId).getRealmConfiguration().getAdminUserName(); String[] userList = new String[]{tenantAdminName}; manager.addRole(role, userList, subscriberPermissions); } } catch (UserStoreException e) { throw new APIManagementException("Error while creating subscriber role: " + role + " - " + "Self registration might not function properly.", e); } } public static String removeAnySymbolFromUriTempate(String uriTemplate) { if (uriTemplate != null) { int anySymbolIndex = uriTemplate.indexOf("/*"); if (anySymbolIndex != -1) { return uriTemplate.substring(0, anySymbolIndex); } } return uriTemplate; } public static float getAverageRating(APIIdentifier apiId) throws APIManagementException { return ApiMgtDAO.getInstance().getAverageRating(apiId); } public static float getAverageRating(int apiId) throws APIManagementException { return ApiMgtDAO.getInstance().getAverageRating(apiId); } public static List<Tenant> getAllTenantsWithSuperTenant() throws UserStoreException { Tenant[] tenants = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager().getAllTenants(); ArrayList<Tenant> tenantArrayList = new ArrayList<Tenant>(); Collections.addAll(tenantArrayList, tenants); Tenant superAdminTenant = new Tenant(); superAdminTenant.setDomain(MultitenantConstants.SUPER_TENANT_DOMAIN_NAME); superAdminTenant.setId(org.wso2.carbon.utils.multitenancy.MultitenantConstants.SUPER_TENANT_ID); superAdminTenant.setAdminName(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME); tenantArrayList.add(superAdminTenant); return tenantArrayList; } /** * In multi tenant environment, publishers should allow only to revoke the tokens generated within his domain. * Super tenant should not see the tenant created tokens and vise versa. This method is used to check the logged in * user have permissions to revoke a given users tokens. * * @param loggedInUser current logged in user to publisher * @param authorizedUser access token owner * @return */ public static boolean isLoggedInUserAuthorizedToRevokeToken(String loggedInUser, String authorizedUser) { String loggedUserTenantDomain = MultitenantUtils.getTenantDomain(loggedInUser); String authorizedUserTenantDomain = MultitenantUtils.getTenantDomain(authorizedUser); if (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(loggedUserTenantDomain) && MultitenantConstants .SUPER_TENANT_DOMAIN_NAME.equals(authorizedUserTenantDomain)) { return true; } else if (authorizedUserTenantDomain.equals(loggedUserTenantDomain)) { return true; } return false; } public static int getApplicationId(String appName, String userId) throws APIManagementException { return ApiMgtDAO.getInstance().getApplicationId(appName, userId); } public static boolean isAPIManagementEnabled() { return Boolean.parseBoolean(CarbonUtils.getServerConfiguration().getFirstProperty("APIManagement.Enabled")); } public static boolean isLoadAPIContextsAtStartup() { return Boolean.parseBoolean(CarbonUtils.getServerConfiguration().getFirstProperty( "APIManagement.LoadAPIContextsInServerStartup")); } public static Set<APIStore> getExternalAPIStores(int tenantId) throws APIManagementException { SortedSet<APIStore> apistoreSet = new TreeSet<APIStore>(new APIStoreNameComparator()); apistoreSet.addAll(getExternalStores(tenantId)); return apistoreSet; } public static boolean isAllowDisplayAPIsWithMultipleStatus() { APIManagerConfiguration config = ServiceReferenceHolder.getInstance(). getAPIManagerConfigurationService().getAPIManagerConfiguration(); String displayAllAPIs = config.getFirstProperty(APIConstants.API_STORE_DISPLAY_ALL_APIS); if (displayAllAPIs == null) { log.warn("The configurations related to show deprecated APIs in APIStore " + "are missing in api-manager.xml."); return false; } return Boolean.parseBoolean(displayAllAPIs); } public static boolean isAllowDisplayMultipleVersions() { APIManagerConfiguration config = ServiceReferenceHolder.getInstance(). getAPIManagerConfigurationService().getAPIManagerConfiguration(); String displayMultiVersions = config.getFirstProperty(APIConstants.API_STORE_DISPLAY_MULTIPLE_VERSIONS); if (displayMultiVersions == null) { log.warn("The configurations related to show multiple versions of API in APIStore " + "are missing in api-manager.xml."); return false; } return Boolean.parseBoolean(displayMultiVersions); } public static Set<APIStore> getExternalAPIStores(Set<APIStore> inputStores, int tenantId) throws APIManagementException { SortedSet<APIStore> apiStores = new TreeSet<APIStore>(new APIStoreNameComparator()); apiStores.addAll(getExternalStores(tenantId)); //Retains only the stores that contained in configuration inputStores.retainAll(apiStores); boolean exists = false; if (!apiStores.isEmpty()) { for (APIStore store : apiStores) { for (APIStore inputStore : inputStores) { if (inputStore.getName().equals(store.getName())) { // If the configured apistore already stored in // db,ignore adding it again exists = true; } } if (!exists) { inputStores.add(store); } exists = false; } } return inputStores; } public static boolean isAPIsPublishToExternalAPIStores(int tenantId) throws APIManagementException { return !getExternalStores(tenantId).isEmpty(); } public static boolean isAPIGatewayKeyCacheEnabled() { try { APIManagerConfiguration config = ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService() .getAPIManagerConfiguration(); String serviceURL = config.getFirstProperty(APIConstants.GATEWAY_TOKEN_CACHE_ENABLED); return Boolean.parseBoolean(serviceURL); } catch (Exception e) { log.error("Did not found valid API Validation Information cache configuration. Use default configuration" + e); } return true; } public static Cache getAPIContextCache() { CacheManager contextCacheManager = Caching.getCacheManager(APIConstants.API_CONTEXT_CACHE_MANAGER). getCache(APIConstants.API_CONTEXT_CACHE).getCacheManager(); if (!isContextCacheInitialized) { isContextCacheInitialized = true; return contextCacheManager.<String, Boolean>createCacheBuilder(APIConstants.API_CONTEXT_CACHE_MANAGER). setExpiry(CacheConfiguration.ExpiryType.MODIFIED, new CacheConfiguration.Duration(TimeUnit.DAYS, APIConstants.API_CONTEXT_CACHE_EXPIRY_TIME_IN_DAYS)).setStoreByValue(false).build(); } else { return Caching.getCacheManager(APIConstants.API_CONTEXT_CACHE_MANAGER).getCache(APIConstants.API_CONTEXT_CACHE); } } /** * Get active tenant domains * * @return * @throws UserStoreException */ public static Set<String> getActiveTenantDomains() throws UserStoreException { Set<String> tenantDomains; Tenant[] tenants = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager().getAllTenants(); if (tenants == null || tenants.length == 0) { tenantDomains = Collections.<String>emptySet(); } else { tenantDomains = new HashSet<String>(); for (Tenant tenant : tenants) { if (tenant.isActive()) { tenantDomains.add(tenant.getDomain()); } } if (!tenantDomains.isEmpty()) { tenantDomains.add(MultitenantConstants.SUPER_TENANT_DOMAIN_NAME); } } return tenantDomains; } /** * Retrieves the role list of system * * @throws APIManagementException If an error occurs */ public static String[] getRoleNames(String username) throws APIManagementException { String tenantDomain = MultitenantUtils.getTenantDomain(username); try { if (!org.wso2.carbon.utils.multitenancy.MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(tenantDomain); UserStoreManager manager = ServiceReferenceHolder.getInstance().getRealmService() .getTenantUserRealm(tenantId).getUserStoreManager(); return manager.getRoleNames(); } else { return AuthorizationManager.getInstance().getRoleNames(); } } catch (UserStoreException e) { log.error("Error while getting all the roles", e); return new String[0]; } } /** * check whether given role is exist * * @param userName logged user * @param roleName role name need to check * @return true if exist and false if not * @throws APIManagementException If an error occurs */ public static boolean isRoleNameExist(String userName, String roleName) throws APIManagementException { if (roleName == null || StringUtils.isEmpty(roleName.trim())) { return true; } //disable role validation if "disableRoleValidationAtScopeCreation" system property is set String disableRoleValidation = System.getProperty(DISABLE_ROLE_VALIDATION_AT_SCOPE_CREATION); if (Boolean.parseBoolean(disableRoleValidation)) { return true; } org.wso2.carbon.user.api.UserStoreManager userStoreManager; try { RealmService realmService = ServiceReferenceHolder.getInstance().getRealmService(); int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(MultitenantUtils.getTenantDomain(userName)); userStoreManager = realmService.getTenantUserRealm(tenantId).getUserStoreManager(); String[] roles = roleName.split(","); for (String role : roles) { if (!userStoreManager.isExistingRole(role)) { return false; } } } catch (org.wso2.carbon.user.api.UserStoreException e) { log.error("Error when getting the list of roles", e); } return true; } /** * Create API Definition in JSON * * @param api API * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to generate the content and save * @deprecated */ @Deprecated public static String createSwaggerJSONContent(API api) throws APIManagementException { APIIdentifier identifier = api.getId(); APIManagerConfiguration config = ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService() .getAPIManagerConfiguration(); Environment environment = (Environment) config.getApiGatewayEnvironments().values().toArray()[0]; String endpoints = environment.getApiGatewayEndpoint(); String[] endpointsSet = endpoints.split(","); String apiContext = api.getContext(); String version = identifier.getVersion(); Set<URITemplate> uriTemplates = api.getUriTemplates(); String description = api.getDescription(); // With the new context version strategy, the URL prefix is the apiContext. the verison will be embedded in // the apiContext. String urlPrefix = apiContext; if (endpointsSet.length < 1) { throw new APIManagementException("Error in creating JSON representation of the API" + identifier.getApiName()); } if (description == null || "".equals(description)) { description = ""; } else { description = description.trim(); } Map<String, List<Operation>> uriTemplateDefinitions = new HashMap<String, List<Operation>>(); List<APIResource> apis = new ArrayList<APIResource>(); for (URITemplate template : uriTemplates) { List<Operation> ops; List<Parameter> parameters; String path = urlPrefix + APIUtil.removeAnySymbolFromUriTempate(template.getUriTemplate()); /* path exists in uriTemplateDefinitions */ if (uriTemplateDefinitions.get(path) != null) { ops = uriTemplateDefinitions.get(path); parameters = new ArrayList<Parameter>(); String httpVerb = template.getHTTPVerb(); /* For GET and DELETE Parameter name - Query Parameters */ if (Constants.Configuration.HTTP_METHOD_GET.equals(httpVerb) || Constants.Configuration.HTTP_METHOD_DELETE.equals(httpVerb)) { Parameter queryParam = new Parameter(APIConstants.OperationParameter.QUERY_PARAM_NAME, APIConstants.OperationParameter.QUERY_PARAM_DESCRIPTION, APIConstants.OperationParameter.PAYLOAD_PARAM_TYPE, false, false, "String"); parameters.add(queryParam); } else {/* For POST, PUT and PATCH Parameter name - Payload */ Parameter payLoadParam = new Parameter(APIConstants.OperationParameter.PAYLOAD_PARAM_NAME, APIConstants.OperationParameter.PAYLOAD_PARAM_DESCRIPTION, APIConstants.OperationParameter.PAYLOAD_PARAM_TYPE, false, false, "String"); parameters.add(payLoadParam); } Parameter authParam = new Parameter(APIConstants.OperationParameter.AUTH_PARAM_NAME, APIConstants.OperationParameter.AUTH_PARAM_DESCRIPTION, APIConstants.OperationParameter.AUTH_PARAM_TYPE, false, false, "String"); parameters.add(authParam); if (!"OPTIONS".equals(httpVerb)) { Operation op = new Operation(httpVerb, description, description, parameters); ops.add(op); } } else {/* path not exists in uriTemplateDefinitions */ ops = new ArrayList<Operation>(); parameters = new ArrayList<Parameter>(); String httpVerb = template.getHTTPVerb(); /* For GET and DELETE Parameter name - Query Parameters */ if (Constants.Configuration.HTTP_METHOD_GET.equals(httpVerb) || Constants.Configuration.HTTP_METHOD_DELETE.equals(httpVerb)) { Parameter queryParam = new Parameter(APIConstants.OperationParameter.QUERY_PARAM_NAME, APIConstants.OperationParameter.QUERY_PARAM_DESCRIPTION, APIConstants.OperationParameter.PAYLOAD_PARAM_TYPE, false, false, "String"); parameters.add(queryParam); } else {/* For POST,PUT and PATCH Parameter name - Payload */ Parameter payLoadParam = new Parameter(APIConstants.OperationParameter.PAYLOAD_PARAM_NAME, APIConstants.OperationParameter.PAYLOAD_PARAM_DESCRIPTION, APIConstants.OperationParameter.PAYLOAD_PARAM_TYPE, false, false, "String"); parameters.add(payLoadParam); } Parameter authParam = new Parameter(APIConstants.OperationParameter.AUTH_PARAM_NAME, APIConstants.OperationParameter.AUTH_PARAM_DESCRIPTION, APIConstants.OperationParameter.AUTH_PARAM_TYPE, false, false, "String"); parameters.add(authParam); if (!"OPTIONS".equals(httpVerb)) { Operation op = new Operation(httpVerb, description, description, parameters); ops.add(op); } uriTemplateDefinitions.put(path, ops); } } final Set<Entry<String, List<Operation>>> entries = uriTemplateDefinitions.entrySet(); for (Entry entry : entries) { APIResource apiResource = new APIResource((String) entry.getKey(), description, (List<Operation>) entry.getValue()); apis.add(apiResource); } APIDefinition apidefinition = new APIDefinition(version, APIConstants.SWAGGER_VERSION, endpointsSet[0], apiContext, apis); Gson gson = new Gson(); return gson.toJson(apidefinition); } /** * Helper method to get tenantId from userName * * @param userName user name * @return tenantId */ public static int getTenantId(String userName) { //get tenant domain from user name String tenantDomain = MultitenantUtils.getTenantDomain(userName); return getTenantIdFromTenantDomain(tenantDomain); } /** * Helper method to get tenantId from tenantDomain * * @param tenantDomain tenant Domain * @return tenantId */ public static int getTenantIdFromTenantDomain(String tenantDomain) { RealmService realmService = ServiceReferenceHolder.getInstance().getRealmService(); if (realmService == null) { return MultitenantConstants.SUPER_TENANT_ID; } try { return realmService.getTenantManager().getTenantId(tenantDomain); } catch (UserStoreException e) { log.error(e.getMessage(), e); } return -1; } /** * Helper method to get tenantDomain from tenantId * * @param tenantId tenant Id * @return tenantId */ public static String getTenantDomainFromTenantId(int tenantId) { RealmService realmService = ServiceReferenceHolder.getInstance().getRealmService(); if (realmService == null) { return MultitenantConstants.SUPER_TENANT_DOMAIN_NAME; } try { return realmService.getTenantManager().getDomain(tenantId); } catch (UserStoreException e) { log.error(e.getMessage(), e); } return null; } public static int getSuperTenantId() { return MultitenantConstants.SUPER_TENANT_ID; } /** * Helper method to get username with tenant domain. * * @param userName * @return userName with tenant domain */ public static String getUserNameWithTenantSuffix(String userName) { String userNameWithTenantPrefix = userName; String tenantDomain = MultitenantUtils.getTenantDomain(userName); if (userName != null && !userName.endsWith("@" + MultitenantConstants.SUPER_TENANT_DOMAIN_NAME) && MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { userNameWithTenantPrefix = userName + "@" + tenantDomain; } return userNameWithTenantPrefix; } /** * Build OMElement from inputstream * * @param inputStream * @return * @throws Exception */ public static OMElement buildOMElement(InputStream inputStream) throws Exception { XMLStreamReader parser; StAXOMBuilder builder; try { XMLInputFactory factory = XMLInputFactory.newInstance(); factory.setProperty(XMLInputFactory.IS_SUPPORTING_EXTERNAL_ENTITIES, false); parser = factory.createXMLStreamReader(inputStream); builder = new StAXOMBuilder(parser); } catch (XMLStreamException e) { String msg = "Error in initializing the parser."; log.error(msg, e); throw new Exception(msg, e); } return builder.getDocumentElement(); } /** * Get stored in sequences, out sequences and fault sequences from the governanceSystem registry * * @param sequenceName -The sequence to be retrieved * @param tenantId * @param direction - Direction indicates which sequences to fetch. Values would be * "in", "out" or "fault" * @return * @throws APIManagementException */ public static OMElement getCustomSequence(String sequenceName, int tenantId, String direction, APIIdentifier identifier) throws APIManagementException { org.wso2.carbon.registry.api.Collection seqCollection = null; try { UserRegistry registry = ServiceReferenceHolder.getInstance().getRegistryService() .getGovernanceSystemRegistry(tenantId); if ("in".equals(direction)) { seqCollection = (org.wso2.carbon.registry.api.Collection) registry .get(APIConstants.API_CUSTOM_INSEQUENCE_LOCATION); } else if ("out".equals(direction)) { seqCollection = (org.wso2.carbon.registry.api.Collection) registry .get(APIConstants.API_CUSTOM_OUTSEQUENCE_LOCATION); } else if ("fault".equals(direction)) { seqCollection = (org.wso2.carbon.registry.api.Collection) registry .get(APIConstants.API_CUSTOM_FAULTSEQUENCE_LOCATION); } if (seqCollection == null) { seqCollection = (org.wso2.carbon.registry.api.Collection) registry.get(getSequencePath(identifier, direction)); } if (seqCollection != null) { String[] childPaths = seqCollection.getChildren(); for (String childPath : childPaths) { Resource sequence = registry.get(childPath); OMElement seqElment = APIUtil.buildOMElement(sequence.getContentStream()); if (sequenceName.equals(seqElment.getAttributeValue(new QName("name")))) { return seqElment; } } } // If the sequence not found the default sequences, check in custom sequences seqCollection = (org.wso2.carbon.registry.api.Collection) registry.get(getSequencePath(identifier, direction)); if (seqCollection != null) { String[] childPaths = seqCollection.getChildren(); for (String childPath : childPaths) { Resource sequence = registry.get(childPath); OMElement seqElment = APIUtil.buildOMElement(sequence.getContentStream()); if (sequenceName.equals(seqElment.getAttributeValue(new QName("name")))) { return seqElment; } } } } catch (Exception e) { String msg = "Issue is in accessing the Registry"; log.error(msg); throw new APIManagementException(msg, e); } return null; } /** * Returns true if the sequence is a per API one * * @param sequenceName * @param tenantId * @param identifier API identifier * @param sequenceType in/out/fault * @return true/false * @throws APIManagementException */ public static boolean isPerAPISequence(String sequenceName, int tenantId, APIIdentifier identifier, String sequenceType) throws APIManagementException { org.wso2.carbon.registry.api.Collection seqCollection = null; try { UserRegistry registry = ServiceReferenceHolder.getInstance().getRegistryService() .getGovernanceSystemRegistry(tenantId); // If the sequence not found the default sequences, check in custom sequences if (registry.resourceExists(getSequencePath(identifier, sequenceType))) { seqCollection = (org.wso2.carbon.registry.api.Collection) registry.get(getSequencePath(identifier, sequenceType)); if (seqCollection != null) { String[] childPaths = seqCollection.getChildren(); for (String childPath : childPaths) { Resource sequence = registry.get(childPath); OMElement seqElment = APIUtil.buildOMElement(sequence.getContentStream()); if (sequenceName.equals(seqElment.getAttributeValue(new QName("name")))) { return true; } } } } } catch (RegistryException e) { String msg = "Error while retrieving registry for tenant " + tenantId; log.error(msg); throw new APIManagementException(msg, e); } catch (org.wso2.carbon.registry.api.RegistryException e) { String msg = "Error while processing the " + sequenceType + " sequences of " + identifier + " in the registry"; log.error(msg); throw new APIManagementException(msg, e); } catch (Exception e) { throw new APIManagementException(e.getMessage(), e); } return false; } /** * Returns uuid correspond to the given sequence name and direction * * @param sequenceName name of the sequence * @param tenantId logged in user's tenantId * @param direction in/out/fault * @param identifier API identifier * @return uuid of the given mediation sequence or null * @throws APIManagementException If failed to get the uuid of the mediation sequence */ public static String getMediationSequenceUuid(String sequenceName, int tenantId, String direction, APIIdentifier identifier) throws APIManagementException { org.wso2.carbon.registry.api.Collection seqCollection = null; String seqCollectionPath; try { UserRegistry registry = ServiceReferenceHolder.getInstance().getRegistryService() .getGovernanceSystemRegistry(tenantId); if ("in".equals(direction)) { seqCollection = (org.wso2.carbon.registry.api.Collection) registry .get(APIConstants.API_CUSTOM_SEQUENCE_LOCATION+File.separator+ APIConstants.API_CUSTOM_SEQUENCE_TYPE_IN); } else if ("out".equals(direction)) { seqCollection = (org.wso2.carbon.registry.api.Collection) registry .get(APIConstants.API_CUSTOM_SEQUENCE_LOCATION+File.separator+ APIConstants.API_CUSTOM_SEQUENCE_TYPE_OUT); } else if ("fault".equals(direction)) { seqCollection = (org.wso2.carbon.registry.api.Collection) registry .get(APIConstants.API_CUSTOM_SEQUENCE_LOCATION+File.separator+ APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT); } if (seqCollection == null) { seqCollection = (org.wso2.carbon.registry.api.Collection) registry.get (getSequencePath(identifier, direction)); } if (seqCollection != null) { String[] childPaths = seqCollection.getChildren(); for (String childPath : childPaths) { Resource sequence = registry.get(childPath); OMElement seqElment = APIUtil.buildOMElement(sequence.getContentStream()); String seqElmentName= seqElment.getAttributeValue(new QName("name")); if (sequenceName.equals(seqElmentName)) { return sequence.getUUID(); } } } // If the sequence not found the default sequences, check in custom sequences seqCollection = (org.wso2.carbon.registry.api.Collection) registry.get (getSequencePath(identifier,direction)); if (seqCollection != null) { String[] childPaths = seqCollection.getChildren(); for (String childPath : childPaths) { Resource sequence = registry.get(childPath); OMElement seqElment = APIUtil.buildOMElement(sequence.getContentStream()); if (sequenceName.equals(seqElment.getAttributeValue(new QName("name")))) { return sequence.getUUID(); } } } } catch (Exception e) { String msg = "Issue is in accessing the Registry"; log.error(msg); throw new APIManagementException(msg, e); } return null; } /** * Returns true if sequence is set * * @param sequence * @return */ public static boolean isSequenceDefined(String sequence) { return sequence != null && !"none".equals(sequence); } /** * Return the sequence extension name. * eg: admin--testAPi--v1.00 * * @param api * @return */ public static String getSequenceExtensionName(API api) { return api.getId().getProviderName() + "--" + api.getId().getApiName() + ":v" + api.getId().getVersion(); } /** * @param token * @return */ public static String decryptToken(String token) throws CryptoException { APIManagerConfiguration config = ServiceReferenceHolder.getInstance(). getAPIManagerConfigurationService().getAPIManagerConfiguration(); if (Boolean.parseBoolean(config.getFirstProperty(APIConstants.ENCRYPT_TOKENS_ON_PERSISTENCE))) { return new String(CryptoUtil.getDefaultCryptoUtil().base64DecodeAndDecrypt(token), Charset.defaultCharset()); } return token; } /** * @param token * @return */ public static String encryptToken(String token) throws CryptoException { APIManagerConfiguration config = ServiceReferenceHolder.getInstance(). getAPIManagerConfigurationService().getAPIManagerConfiguration(); if (Boolean.parseBoolean(config.getFirstProperty(APIConstants.ENCRYPT_TOKENS_ON_PERSISTENCE))) { return CryptoUtil.getDefaultCryptoUtil().encryptAndBase64Encode(token.getBytes(Charset.defaultCharset())); } return token; } public static void loadTenantRegistry(int tenantId) throws RegistryException { TenantRegistryLoader tenantRegistryLoader = APIManagerComponent.getTenantRegistryLoader(); ServiceReferenceHolder.getInstance().getIndexLoaderService().loadTenantIndex(tenantId); tenantRegistryLoader.loadTenantRegistry(tenantId); } /** * This is to get the registry resource's HTTP permlink path. * Once this issue is fixed (https://wso2.org/jira/browse/REGISTRY-2110), * we can remove this method, and get permlink from the resource. * * @param path - Registry resource path * @return {@link String} -HTTP permlink */ public static String getRegistryResourceHTTPPermlink(String path) { String schemeHttp = APIConstants.HTTP_PROTOCOL; String schemeHttps = APIConstants.HTTPS_PROTOCOL; ConfigurationContextService contetxservice = ServiceReferenceHolder.getContextService(); //First we will try to generate http permalink and if its disabled then only we will consider https int port = CarbonUtils.getTransportProxyPort(contetxservice.getServerConfigContext(), schemeHttp); if (port == -1) { port = CarbonUtils.getTransportPort(contetxservice.getServerConfigContext(), schemeHttp); } //getting https parameters if http is disabled. If proxy port is not present we will go for default port if (port == -1) { port = CarbonUtils.getTransportProxyPort(contetxservice.getServerConfigContext(), schemeHttps); } if (port == -1) { port = CarbonUtils.getTransportPort(contetxservice.getServerConfigContext(), schemeHttps); } String webContext = ServerConfiguration.getInstance().getFirstProperty("WebContextRoot"); if (webContext == null || "/".equals(webContext)) { webContext = ""; } RegistryService registryService = ServiceReferenceHolder.getInstance().getRegistryService(); String version = ""; if (registryService == null) { log.error("Registry Service has not been set."); } else if (path != null) { try { String[] versions = registryService.getRegistry( CarbonConstants.REGISTRY_SYSTEM_USERNAME, CarbonContext.getThreadLocalCarbonContext().getTenantId()).getVersions(path); if (versions != null && versions.length > 0) { version = versions[0].substring(versions[0].lastIndexOf(";version:")); } } catch (RegistryException e) { log.error("An error occurred while determining the latest version of the " + "resource at the given path: " + path, e); } } if (port != -1 && path != null) { String tenantDomain = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantDomain(true); return webContext + ((tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) ? "/" + MultitenantConstants.TENANT_AWARE_URL_PREFIX + "/" + tenantDomain : "") + "/registry/resource" + org.wso2.carbon.registry.app.Utils.encodeRegistryPath(path) + version; } return null; } public static boolean isSandboxEndpointsExists(API api) { JSONParser parser = new JSONParser(); JSONObject config = null; try { config = (JSONObject) parser.parse(api.getEndpointConfig()); if (config.containsKey("sandbox_endpoints")) { return true; } } catch (ParseException e) { log.error(APIConstants.MSG_JSON_PARSE_ERROR, e); } catch (ClassCastException e) { log.error(APIConstants.MSG_JSON_PARSE_ERROR, e); } return false; } public static boolean isProductionEndpointsExists(API api) { JSONParser parser = new JSONParser(); JSONObject config = null; try { config = (JSONObject) parser.parse(api.getEndpointConfig()); if (config.containsKey("production_endpoints")) { return true; } } catch (ParseException e) { log.error(APIConstants.MSG_JSON_PARSE_ERROR, e); } catch (ClassCastException e) { log.error(APIConstants.MSG_JSON_PARSE_ERROR, e); } return false; } /** * This method used to get API minimum information from governance artifact * * @param artifact API artifact * @param registry Registry * @return API * @throws APIManagementException if failed to get API from artifact */ public static API getAPIInformation(GovernanceArtifact artifact, Registry registry) throws APIManagementException { API api; try { String providerName = artifact.getAttribute(APIConstants.API_OVERVIEW_PROVIDER); String apiName = artifact.getAttribute(APIConstants.API_OVERVIEW_NAME); String apiVersion = artifact.getAttribute(APIConstants.API_OVERVIEW_VERSION); api = new API(new APIIdentifier(providerName, apiName, apiVersion)); //set uuid api.setUUID(artifact.getId()); api.setThumbnailUrl(artifact.getAttribute(APIConstants.API_OVERVIEW_THUMBNAIL_URL)); api.setStatus(getApiStatus(artifact.getAttribute(APIConstants.API_OVERVIEW_STATUS))); api.setContext(artifact.getAttribute(APIConstants.API_OVERVIEW_CONTEXT)); api.setVisibility(artifact.getAttribute(APIConstants.API_OVERVIEW_VISIBILITY)); api.setVisibleRoles(artifact.getAttribute(APIConstants.API_OVERVIEW_VISIBLE_ROLES)); api.setVisibleTenants(artifact.getAttribute(APIConstants.API_OVERVIEW_VISIBLE_TENANTS)); api.setTransports(artifact.getAttribute(APIConstants.API_OVERVIEW_TRANSPORTS)); api.setInSequence(artifact.getAttribute(APIConstants.API_OVERVIEW_INSEQUENCE)); api.setOutSequence(artifact.getAttribute(APIConstants.API_OVERVIEW_OUTSEQUENCE)); api.setFaultSequence(artifact.getAttribute(APIConstants.API_OVERVIEW_FAULTSEQUENCE)); api.setDescription(artifact.getAttribute(APIConstants.API_OVERVIEW_DESCRIPTION)); api.setRedirectURL(artifact.getAttribute(APIConstants.API_OVERVIEW_REDIRECT_URL)); api.setBusinessOwner(artifact.getAttribute(APIConstants.API_OVERVIEW_BUSS_OWNER)); api.setApiOwner(artifact.getAttribute(APIConstants.API_OVERVIEW_OWNER)); api.setAdvertiseOnly(Boolean.parseBoolean(artifact.getAttribute(APIConstants.API_OVERVIEW_ADVERTISE_ONLY))); String environments = artifact.getAttribute(APIConstants.API_OVERVIEW_ENVIRONMENTS); api.setEnvironments(extractEnvironmentsForAPI(environments)); api.setCorsConfiguration(getCorsConfigurationFromArtifact(artifact)); String artifactPath = GovernanceUtils.getArtifactPath(registry, artifact.getId()); api.setLastUpdated(registry.get(artifactPath).getLastModified()); api.setCreatedTime(String.valueOf(registry.get(artifactPath).getCreatedTime().getTime())); } catch (GovernanceException e) { String msg = "Failed to get API from artifact "; throw new APIManagementException(msg, e); } catch (RegistryException e) { String msg = "Failed to get LastAccess time or Rating"; throw new APIManagementException(msg, e); } return api; } /** * Get the cache key of the ResourceInfoDTO * * @param apiContext - Context of the API * @param apiVersion - API Version * @param resourceUri - The resource uri Ex: /name/version * @param httpMethod - The http method. Ex: GET, POST * @return - The cache key */ public static String getResourceInfoDTOCacheKey(String apiContext, String apiVersion, String resourceUri, String httpMethod) { return apiContext + "/" + apiVersion + resourceUri + ":" + httpMethod; } /** * Get the key of the Resource ( used in scopes) * * @param api - API * @param template - URI Template * @return - The resource key */ public static String getResourceKey(API api, URITemplate template) { return APIUtil.getResourceKey(api.getContext(), api.getId().getVersion(), template.getUriTemplate(), template.getHTTPVerb()); } /** * Get the key of the Resource ( used in scopes) * * @param apiContext - Context of the API * @param apiVersion - API Version * @param resourceUri - The resource uri Ex: /name/version * @param httpMethod - The http method. Ex: GET, POST * @return - The resource key */ public static String getResourceKey(String apiContext, String apiVersion, String resourceUri, String httpMethod) { return apiContext + "/" + apiVersion + resourceUri + ":" + httpMethod; } /** * Find scope object in a set based on the key * * @param scopes - Set of scopes * @param key - Key to search with * @return Scope - scope object */ public static Scope findScopeByKey(Set<Scope> scopes, String key) { for (Scope scope : scopes) { if (scope.getKey().equals(key)) { return scope; } } return null; } /** * Get the cache key of the APIInfoDTO * * @param apiContext - Context of the API * @param apiVersion - API Version * @return - The cache key of the APIInfoDTO */ public static String getAPIInfoDTOCacheKey(String apiContext, String apiVersion) { return apiContext + "/" + apiVersion; } /** * Get the cache key of the Access Token * * @param accessToken - The access token which is cached * @param apiContext - The context of the API * @param apiVersion - The version of the API * @param resourceUri - The value of the resource url * @param httpVerb - The http method. Ex: GET, POST * @param authLevel - Required Authentication level. Ex: Application/Application User * @return - The Key which will be used to cache the access token */ public static String getAccessTokenCacheKey(String accessToken, String apiContext, String apiVersion, String resourceUri, String httpVerb, String authLevel) { return accessToken + ':' + apiContext + '/' + apiVersion + resourceUri + ':' + httpVerb + ':' + authLevel; } /** * Resolves system properties and replaces in given in text * * @param text * @return System properties resolved text */ public static String replaceSystemProperty(String text) { int indexOfStartingChars = -1; int indexOfClosingBrace; // The following condition deals with properties. // Properties are specified as ${system.property}, // and are assumed to be System properties while (indexOfStartingChars < text.indexOf("${") && (indexOfStartingChars = text.indexOf("${")) != -1 && (indexOfClosingBrace = text.indexOf('}')) != -1) { // Is a // property // used? String sysProp = text.substring(indexOfStartingChars + 2, indexOfClosingBrace); String propValue = System.getProperty(sysProp); if (propValue == null) { if ("carbon.context".equals(sysProp)) { propValue = ServiceReferenceHolder.getContextService().getServerConfigContext().getContextRoot(); } else if ("admin.username".equals(sysProp) || "admin.password".equals(sysProp)) { try { RealmConfiguration realmConfig = new RealmConfigXMLProcessor().buildRealmConfigurationFromFile(); if ("admin.username".equals(sysProp)) { propValue = realmConfig.getAdminUserName(); } else { propValue = realmConfig.getAdminPassword(); } } catch (UserStoreException e) { // Can't throw an exception because the server is // starting and can't be halted. log.error("Unable to build the Realm Configuration", e); return null; } } } //Derive original text value with resolved system property value if (propValue != null) { text = text.substring(0, indexOfStartingChars) + propValue + text.substring(indexOfClosingBrace + 1); } if ("carbon.home".equals(sysProp) && propValue != null && ".".equals(propValue)) { text = new File(".").getAbsolutePath() + File.separator + text; } } return text; } public static String encryptPassword(String plainTextPassword) throws APIManagementException { try { return CryptoUtil.getDefaultCryptoUtil().encryptAndBase64Encode(plainTextPassword.getBytes(Charset.defaultCharset())); } catch (CryptoException e) { String errorMsg = "Error while encrypting the password. " + e.getMessage(); throw new APIManagementException(errorMsg, e); } } /** * Search Apis by Doc Content * * @param registry - Registry which is searched * @param tenantID - Tenant id of logged in domain * @param username - Logged in username * @param searchTerm - Search value for doc * @param searchClient - Search client * @return - Documentation to APIs map * @throws APIManagementException - If failed to get ArtifactManager for given tenant */ public static Map<Documentation, API> searchAPIsByDoc(Registry registry, int tenantID, String username, String searchTerm, String searchClient) throws APIManagementException { Map<Documentation, API> apiDocMap = new HashMap<Documentation, API>(); try { PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(username); GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY); GenericArtifactManager docArtifactManager = APIUtil.getArtifactManager(registry, APIConstants.DOCUMENTATION_KEY); SolrClient client = SolrClient.getInstance(); Map<String, String> fields = new HashMap<String, String>(); fields.put(APIConstants.DOCUMENTATION_SEARCH_PATH_FIELD, "*" + APIConstants.API_ROOT_LOCATION + "*"); fields.put(APIConstants.DOCUMENTATION_SEARCH_MEDIA_TYPE_FIELD, "*"); if (tenantID == -1) { tenantID = MultitenantConstants.SUPER_TENANT_ID; } //PaginationContext.init(0, 10000, "ASC", APIConstants.DOCUMENTATION_SEARCH_PATH_FIELD, Integer.MAX_VALUE); SolrDocumentList documentList = client.query(searchTerm, tenantID, fields); org.wso2.carbon.user.api.AuthorizationManager manager = ServiceReferenceHolder.getInstance(). getRealmService().getTenantUserRealm(tenantID). getAuthorizationManager(); username = MultitenantUtils.getTenantAwareUsername(username); for (SolrDocument document : documentList) { String filePath = (String) document.getFieldValue("path_s"); int index = filePath.indexOf(APIConstants.APIMGT_REGISTRY_LOCATION); filePath = filePath.substring(index); Association[] associations = registry.getAllAssociations(filePath); API api = null; Documentation doc = null; for (Association association : associations) { boolean isAuthorized; String documentationPath = association.getSourcePath(); String path = RegistryUtils.getAbsolutePath(RegistryContext.getBaseInstance(), APIUtil.getMountedPath(RegistryContext.getBaseInstance(), RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH) + documentationPath); if (CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME.equalsIgnoreCase(username)) { isAuthorized = manager.isRoleAuthorized(APIConstants.ANONYMOUS_ROLE, path, ActionConstants.GET); } else { isAuthorized = manager.isUserAuthorized(username, path, ActionConstants.GET); } if (isAuthorized) { Resource docResource = registry.get(documentationPath); String docArtifactId = docResource.getUUID(); if (docArtifactId != null) { GenericArtifact docArtifact = docArtifactManager.getGenericArtifact(docArtifactId); doc = APIUtil.getDocumentation(docArtifact); } Association[] docAssociations = registry.getAssociations(documentationPath, APIConstants.DOCUMENTATION_ASSOCIATION); /* There will be only one document association, for a document path which is by its owner API*/ if (docAssociations.length > 0) { String apiPath = docAssociations[0].getSourcePath(); path = RegistryUtils.getAbsolutePath(RegistryContext.getBaseInstance(), APIUtil.getMountedPath(RegistryContext.getBaseInstance(), RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH) + apiPath); if (CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME.equalsIgnoreCase(username)) { isAuthorized = manager.isRoleAuthorized(APIConstants.ANONYMOUS_ROLE, path, ActionConstants.GET); } else { isAuthorized = manager.isUserAuthorized(username, path, ActionConstants.GET); } if (isAuthorized) { Resource resource = registry.get(apiPath); String apiArtifactId = resource.getUUID(); if (apiArtifactId != null) { GenericArtifact apiArtifact = artifactManager.getGenericArtifact(apiArtifactId); api = APIUtil.getAPI(apiArtifact, registry); } else { throw new GovernanceException("artifact id is null of " + apiPath); } } } } if (doc != null && api != null) { if (APIConstants.STORE_CLIENT.equals(searchClient)) { if (api.getStatus().equals(getApiStatus(APIConstants.PUBLISHED)) || api.getStatus().equals(getApiStatus(APIConstants.PROTOTYPED))) { apiDocMap.put(doc, api); } } else { apiDocMap.put(doc, api); } } } } } catch (IndexerException e) { handleException("Failed to search APIs with type Doc", e); } catch (RegistryException e) { handleException("Failed to search APIs with type Doc", e); } catch (UserStoreException e) { handleException("Failed to search APIs with type Doc", e); } return apiDocMap; } public static Map<String, Object> searchAPIsByURLPattern(Registry registry, String searchTerm, int start, int end) throws APIManagementException { SortedSet<API> apiSet = new TreeSet<API>(new APINameComparator()); List<API> apiList = new ArrayList<API>(); final String searchValue = searchTerm.trim(); Map<String, Object> result = new HashMap<String, Object>(); int totalLength = 0; String criteria; Map<String, List<String>> listMap = new HashMap<String, List<String>>(); GenericArtifact[] genericArtifacts = new GenericArtifact[0]; GenericArtifactManager artifactManager = null; try { artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY); PaginationContext.init(0, 10000, "ASC", APIConstants.API_OVERVIEW_NAME, Integer.MAX_VALUE); if (artifactManager != null) { for (int i = 0; i < 20; i++) { //This need to fix in future.We don't have a way to get max value of // "url_template" entry stores in registry,unless we search in each API criteria = APIConstants.API_URI_PATTERN + i; listMap.put(criteria, new ArrayList<String>() { { add(searchValue); } }); genericArtifacts = (GenericArtifact[]) ArrayUtils.addAll(genericArtifacts, artifactManager .findGenericArtifacts(listMap)); } if (genericArtifacts == null || genericArtifacts.length == 0) { result.put("apis", apiSet); result.put("length", 0); return result; } totalLength = genericArtifacts.length; StringBuilder apiNames = new StringBuilder(); for (GenericArtifact artifact : genericArtifacts) { if (apiNames.indexOf(artifact.getAttribute(APIConstants.API_OVERVIEW_NAME)) < 0) { String status = artifact.getAttribute(APIConstants.API_OVERVIEW_STATUS); if (isAllowDisplayAPIsWithMultipleStatus()) { if (APIConstants.PUBLISHED.equals(status) || APIConstants.DEPRECATED.equals(status)) { API api = APIUtil.getAPI(artifact, registry); if (api != null) { apiList.add(api); apiNames.append(api.getId().getApiName()); } } } else { if (APIConstants.PUBLISHED.equals(status)) { API api = APIUtil.getAPI(artifact, registry); if (api != null) { apiList.add(api); apiNames.append(api.getId().getApiName()); } } } } totalLength = apiList.size(); } if (totalLength <= ((start + end) - 1)) { end = totalLength; } for (int i = start; i < end; i++) { apiSet.add(apiList.get(i)); } } } catch (APIManagementException e) { handleException("Failed to search APIs with input url-pattern", e); } catch (GovernanceException e) { handleException("Failed to search APIs with input url-pattern", e); } result.put("apis", apiSet); result.put("length", totalLength); return result; } /** * This method will check the validity of given url. WSDL url should be * contain http, https or file system patch * otherwise we will mark it as invalid wsdl url. How ever here we do not * validate wsdl content. * * @param wsdlURL wsdl url tobe tested * @return true if its valid url else fale */ public static boolean isValidWSDLURL(String wsdlURL, boolean required) { if (wsdlURL != null && !"".equals(wsdlURL)) { if (wsdlURL.startsWith("http:") || wsdlURL.startsWith("https:") || wsdlURL.startsWith("file:") || wsdlURL.startsWith("/registry")) { return true; } } else if (!required) { // If the WSDL in not required and URL is empty, then we don't need // to add debug log. // Hence returning. return false; } if (log.isDebugEnabled()) { log.debug("WSDL url validation failed. Provided wsdl url is not valid url: " + wsdlURL); } return false; } /** * load tenant axis configurations. * * @param tenantDomain */ public static void loadTenantConfig(String tenantDomain) { final String finalTenantDomain = tenantDomain; ConfigurationContext ctx = ServiceReferenceHolder.getContextService().getServerConfigContext(); //Cannot use the tenantDomain directly because it's getting locked in createTenantConfigurationContext() // method in TenantAxisUtils String accessFlag = tenantDomain + "@WSO2"; long lastAccessed = TenantAxisUtils.getLastAccessed(tenantDomain, ctx); //Only if the tenant is in unloaded state, we do the loading if (System.currentTimeMillis() - lastAccessed >= tenantIdleTimeMillis) { synchronized (accessFlag.intern()) { // Currently loading tenants are added to a set. // If a tenant domain is in the set it implies that particular tenant is being loaded. // Therefore if and only if the set does not contain the tenant. if (!currentLoadingTenants.contains(tenantDomain)) { //Only one concurrent request is allowed to add to the currentLoadingTenants currentLoadingTenants.add(tenantDomain); ctx.getThreadPool().execute(new Runnable() { @Override public void run() { Thread.currentThread().setName("APIMHostObjectUtils-loadTenantConfig-thread"); try { PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext() .setTenantDomain(finalTenantDomain, true); ConfigurationContext ctx = ServiceReferenceHolder.getContextService() .getServerConfigContext(); TenantAxisUtils.getTenantAxisConfiguration(finalTenantDomain, ctx); } catch (Exception e) { log.error("Error while creating axis configuration for tenant " + finalTenantDomain, e); } finally { //only after the tenant is loaded completely, the tenant domain is removed from the set currentLoadingTenants.remove(finalTenantDomain); PrivilegedCarbonContext.endTenantFlow(); } } }); } } } } /** * load tenant axis configurations. * * @param tenantDomain */ public static void loadTenantConfigBlockingMode(String tenantDomain) { try { ConfigurationContext ctx = ServiceReferenceHolder.getContextService().getServerConfigContext(); TenantAxisUtils.getTenantAxisConfiguration(tenantDomain, ctx); } catch (Exception e) { log.error("Error while creating axis configuration for tenant " + tenantDomain, e); } } public static String extractCustomerKeyFromAuthHeader(Map headersMap) { //From 1.0.7 version of this component onwards remove the OAuth authorization header from // the message is configurable. So we dont need to remove headers at this point. String authHeader = (String) headersMap.get(HttpHeaders.AUTHORIZATION); if (authHeader == null) { return null; } if (authHeader.startsWith("OAuth ") || authHeader.startsWith("oauth ")) { authHeader = authHeader.substring(authHeader.indexOf("o")); } String[] headers = authHeader.split(APIConstants.OAUTH_HEADER_SPLITTER); for (String header : headers) { String[] elements = header.split(APIConstants.CONSUMER_KEY_SEGMENT_DELIMITER); if (elements.length > 1) { int j = 0; boolean isConsumerKeyHeaderAvailable = false; for (String element : elements) { if (!"".equals(element.trim())) { if (APIConstants.CONSUMER_KEY_SEGMENT.equals(elements[j].trim())) { isConsumerKeyHeaderAvailable = true; } else if (isConsumerKeyHeaderAvailable) { return removeLeadingAndTrailing(elements[j].trim()); } } j++; } } } return null; } private static String removeLeadingAndTrailing(String base) { String result = base; if (base.startsWith("\"") || base.endsWith("\"")) { result = base.replace("\"", ""); } return result.trim(); } /** * This method will return mounted path of the path if the path * is mounted. Else path will be returned. * * @param registryContext Registry Context instance which holds path mappings * @param path default path of the registry * @return mounted path or path */ public static String getMountedPath(RegistryContext registryContext, String path) { if (registryContext != null && path != null) { List<Mount> mounts = registryContext.getMounts(); if (mounts != null) { for (Mount mount : mounts) { if (path.equals(mount.getPath())) { return mount.getTargetPath(); } } } } return path; } /** * Returns a map of gateway / store domains for the tenant * * @return a Map of domain names for tenant * @throws org.wso2.carbon.apimgt.api.APIManagementException if an error occurs when loading tiers from the registry */ public static Map<String, String> getDomainMappings(String tenantDomain, String appType) throws APIManagementException { Map<String, String> domains = new HashMap<String, String>(); String resourcePath; try { Registry registry = ServiceReferenceHolder.getInstance().getRegistryService(). getGovernanceSystemRegistry(); resourcePath = APIConstants.API_DOMAIN_MAPPINGS.replace("<tenant-id>", tenantDomain); if (registry.resourceExists(resourcePath)) { Resource resource = registry.get(resourcePath); String content = new String((byte[]) resource.getContent(), Charset.defaultCharset()); JSONParser parser = new JSONParser(); JSONObject mappings = (JSONObject) parser.parse(content); if (mappings.get(appType) != null) { mappings = (JSONObject) mappings.get(appType); for (Object o : mappings.entrySet()) { Entry thisEntry = (Entry) o; String key = (String) thisEntry.getKey(); //Instead strictly comparing customUrl, checking whether name is starting with customUrl //to allow users to add multiple URLs if needed if(!StringUtils.isEmpty(key) && key.startsWith(APIConstants.CUSTOM_URL)) { String value = (String) thisEntry.getValue(); domains.put(key, value); } } } } } catch (RegistryException e) { String msg = "Error while retrieving gateway domain mappings from registry"; log.error(msg, e); throw new APIManagementException(msg, e); } catch (ClassCastException e) { String msg = "Invalid JSON found in the gateway tenant domain mappings"; log.error(msg, e); throw new APIManagementException(msg, e); } catch (ParseException e) { String msg = "Malformed JSON found in the gateway tenant domain mappings"; log.error(msg, e); throw new APIManagementException(msg, e); } return domains; } /** * This method used to Downloaded Uploaded Documents from publisher * * @param userName logged in username * @param resourceUrl resource want to download * @param tenantDomain loggedUserTenantDomain * @return map that contains Data of the resource * @throws APIManagementException */ public static Map<String, Object> getDocument(String userName, String resourceUrl, String tenantDomain) throws APIManagementException { Map<String, Object> documentMap = new HashMap<String, Object>(); InputStream inStream = null; String[] resourceSplitPath = resourceUrl.split(RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH); if (resourceSplitPath.length == 2) { resourceUrl = resourceSplitPath[1]; } else { handleException("Invalid resource Path " + resourceUrl); } Resource apiDocResource; Registry registryType = null; boolean isTenantFlowStarted = false; try { int tenantId; if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { PrivilegedCarbonContext.startTenantFlow(); isTenantFlowStarted = true; PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); } else { tenantId = MultitenantConstants.SUPER_TENANT_ID; } userName = MultitenantUtils.getTenantAwareUsername(userName); registryType = ServiceReferenceHolder .getInstance(). getRegistryService().getGovernanceUserRegistry(userName, tenantId); if (registryType.resourceExists(resourceUrl)) { apiDocResource = registryType.get(resourceUrl); inStream = apiDocResource.getContentStream(); documentMap.put("Data", inStream); documentMap.put("contentType", apiDocResource.getMediaType()); String[] content = apiDocResource.getPath().split("/"); documentMap.put("name", content[content.length - 1]); } } catch (RegistryException e) { String msg = "Couldn't retrieve registry for User " + userName + " Tenant " + tenantDomain; log.error(msg, e); handleException(msg, e); } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } return documentMap; } /** * this method used to set environments values to api object. * * @param environments environments values in json format * @return set of environments that Published */ public static Set<String> extractEnvironmentsForAPI(String environments) { Set<String> environmentStringSet = null; if (environments == null) { environmentStringSet = new HashSet<String>( ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService() .getAPIManagerConfiguration().getApiGatewayEnvironments().keySet()); } else { //handle not to publish to any of the gateways if (APIConstants.API_GATEWAY_NONE.equals(environments)) { environmentStringSet = new HashSet<String>(); } //handle to set published gateways nto api object else if (!"".equals(environments)) { String[] publishEnvironmentArray = environments.split(","); environmentStringSet = new HashSet<String>(Arrays.asList(publishEnvironmentArray)); environmentStringSet.remove(APIConstants.API_GATEWAY_NONE); } //handle to publish to any of the gateways when api creating stage else if ("".equals(environments)) { environmentStringSet = new HashSet<String>( ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService() .getAPIManagerConfiguration().getApiGatewayEnvironments().keySet()); } } return environmentStringSet; } /** * This method used to set environment values to governance artifact of API . * * @param api API object with the attributes value */ public static String writeEnvironmentsToArtifact(API api) { StringBuilder publishedEnvironments = new StringBuilder(); Set<String> apiEnvironments = api.getEnvironments(); if (apiEnvironments != null) { for (String environmentName : apiEnvironments) { publishedEnvironments.append(environmentName).append(','); } if (apiEnvironments.isEmpty()) { publishedEnvironments.append("none,"); } if (!publishedEnvironments.toString().isEmpty()) { publishedEnvironments.deleteCharAt(publishedEnvironments.length() - 1); } } return publishedEnvironments.toString(); } /** * This method used to get the currently published gateway environments of an API . * * @param api API object with the attributes value */ public static List<Environment> getEnvironmentsOfAPI(API api) { Map<String, Environment> gatewayEnvironments = ServiceReferenceHolder.getInstance() .getAPIManagerConfigurationService() .getAPIManagerConfiguration() .getApiGatewayEnvironments(); Set<String> apiEnvironments = api.getEnvironments(); List<Environment> returnEnvironments = new ArrayList<Environment>(); for (Environment environment : gatewayEnvironments.values()) { for (String apiEnvironment : apiEnvironments) { if (environment.getName().equals(apiEnvironment)) { returnEnvironments.add(environment); break; } } } return returnEnvironments; } /** * Given the apps and the application name to check for, it will check if the application already exists. * * @param apps The collection of applications * @param name The application to be checked if exists * @return true - if an application of the name <name> already exists in the collection <apps> * false- if an application of the name <name> does not already exists in the collection <apps> */ public static boolean doesApplicationExist(Application[] apps, String name) { boolean doesApplicationExist = false; if (apps != null) { for (Application app : apps) { if (app.getName().equals(name)) { doesApplicationExist = true; } } } return doesApplicationExist; } /** * Read the group id extractor class reference from api-manager.xml. * * @return group id extractor class reference. */ public static String getGroupingExtractorImplementation() { APIManagerConfiguration config = ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService() .getAPIManagerConfiguration(); return config.getFirstProperty(APIConstants.API_STORE_GROUP_EXTRACTOR_IMPLEMENTATION); } /** * This method will update the permission cache of the tenant which is related to the given usename * * @param username User name to find the relevant tenant * @throws UserStoreException if the permission update failed */ public static void updatePermissionCache(String username) throws UserStoreException { String tenantDomain = MultitenantUtils.getTenantDomain(username); int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager().getTenantId(tenantDomain); PermissionUpdateUtil.updatePermissionTree(tenantId); } /** * Check whether given application name is available under current subscriber or group * * @param subscriber subscriber name * @param applicationName application name * @param groupId group of the subscriber * @return true if application is available for the subscriber * @throws APIManagementException if failed to get applications for given subscriber */ public static boolean isApplicationExist(String subscriber, String applicationName, String groupId) throws APIManagementException { return ApiMgtDAO.getInstance().isApplicationExist(applicationName, subscriber, groupId); } public static String getHostAddress() { if (hostAddress != null) { return hostAddress; } hostAddress = ServerConfiguration.getInstance().getFirstProperty(APIConstants.API_MANAGER_HOSTNAME); if (null == hostAddress) { if (getLocalAddress() != null) { hostAddress = getLocalAddress().getHostName(); } if (hostAddress == null) { hostAddress = APIConstants.API_MANAGER_HOSTNAME_UNKNOWN; } return hostAddress; } else { return hostAddress; } } private static InetAddress getLocalAddress() { Enumeration<NetworkInterface> ifaces = null; try { ifaces = NetworkInterface.getNetworkInterfaces(); } catch (SocketException e) { log.error("Failed to get host address", e); } if (ifaces != null) { while (ifaces.hasMoreElements()) { NetworkInterface iface = ifaces.nextElement(); Enumeration<InetAddress> addresses = iface.getInetAddresses(); while (addresses.hasMoreElements()) { InetAddress addr = addresses.nextElement(); if (addr instanceof Inet4Address && !addr.isLoopbackAddress()) { return addr; } } } } return null; } public static boolean isStringArray(Object[] args) { for (Object arg : args) { if (!(arg instanceof String)) { return false; } } return true; } public static String appendDomainWithUser(String username, String domain) { if (username.contains(APIConstants.EMAIL_DOMAIN_SEPARATOR) || username.contains(APIConstants.EMAIL_DOMAIN_SEPARATOR_REPLACEMENT) || MultitenantConstants.SUPER_TENANT_NAME.equalsIgnoreCase(username)) { return username; } return username + APIConstants.EMAIL_DOMAIN_SEPARATOR + domain; } /* * Util method to convert a java object to a json object * */ public static String convertToString(Object obj) { Gson gson = new Gson(); return gson.toJson(obj); } public static String getSequencePath(APIIdentifier identifier, String pathFlow) { String artifactPath = APIConstants.API_ROOT_LOCATION + RegistryConstants.PATH_SEPARATOR + identifier.getProviderName() + RegistryConstants.PATH_SEPARATOR + identifier.getApiName() + RegistryConstants.PATH_SEPARATOR + identifier.getVersion(); return artifactPath + RegistryConstants.PATH_SEPARATOR + pathFlow + RegistryConstants.PATH_SEPARATOR; } private static String getAPIMonetizationCategory(Set<Tier> tiers, String tenantDomain) throws APIManagementException { boolean isPaidFound = false; boolean isFreeFound = false; for (Tier tier : tiers) { if (isTierPaid(tier.getName(), tenantDomain)) { isPaidFound = true; } else { isFreeFound = true; if (isPaidFound) { break; } } } if (!isPaidFound) { return APIConstants.API_CATEGORY_FREE; } else if (!isFreeFound) { return APIConstants.API_CATEGORY_PAID; } else { return APIConstants.API_CATEGORY_FREEMIUM; } } private static boolean isTierPaid(String tierName, String tenantDomainName) throws APIManagementException { String tenantDomain = tenantDomainName; if (tenantDomain == null) { tenantDomain = MultitenantConstants.SUPER_TENANT_DOMAIN_NAME; } if (APIConstants.UNLIMITED_TIER.equalsIgnoreCase(tierName)) { return isUnlimitedTierPaid(tenantDomain); } boolean isPaid = false; Tier tier = getTierFromCache(tierName, tenantDomain); if (tier != null) { final Map<String, Object> tierAttributes = tier.getTierAttributes(); if (tierAttributes != null) { String isPaidValue = tier.getTierPlan(); if (isPaidValue != null && APIConstants.COMMERCIAL_TIER_PLAN.equals(isPaidValue)) { isPaid = true; } } } else { throw new APIManagementException("Tier " + tierName + "cannot be found"); } return isPaid; } private static boolean isUnlimitedTierPaid(String tenantDomain) throws APIManagementException { JSONObject apiTenantConfig = null; try { String content = null; PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(tenantDomain); Registry registry = ServiceReferenceHolder.getInstance().getRegistryService() .getConfigSystemRegistry(tenantId); if (registry.resourceExists(APIConstants.API_TENANT_CONF_LOCATION)) { Resource resource = registry.get(APIConstants.API_TENANT_CONF_LOCATION); content = new String((byte[]) resource.getContent(), Charset.defaultCharset()); } if (content != null) { JSONParser parser = new JSONParser(); apiTenantConfig = (JSONObject) parser.parse(content); } } catch (UserStoreException e) { handleException("UserStoreException thrown when getting API tenant config from registry", e); } catch (RegistryException e) { handleException("RegistryException thrown when getting API tenant config from registry", e); } catch (ParseException e) { handleException("ParseException thrown when passing API tenant config from registry", e); } finally { PrivilegedCarbonContext.endTenantFlow(); } if (apiTenantConfig != null) { Object value = apiTenantConfig.get(APIConstants.API_TENANT_CONF_IS_UNLIMITED_TIER_PAID); if (value != null) { return Boolean.parseBoolean(value.toString()); } else { throw new APIManagementException(APIConstants.API_TENANT_CONF_IS_UNLIMITED_TIER_PAID + " config does not exist for tenant " + tenantDomain); } } return false; } public static Tier getTierFromCache(String tierName, String tenantDomain) throws APIManagementException { Map<String, Tier> tierMap = null; try { PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); if (getTiersCache().containsKey(tierName)) { tierMap = (Map<String, Tier>) getTiersCache().get(tierName); } else { int requestedTenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); if (!APIUtil.isAdvanceThrottlingEnabled()) { if (requestedTenantId == 0) { tierMap = APIUtil.getTiers(); } else { tierMap = APIUtil.getTiers(requestedTenantId); } } else { if (requestedTenantId == 0) { tierMap = APIUtil.getAdvancedSubsriptionTiers(); } else { tierMap = APIUtil.getAdvancedSubsriptionTiers(requestedTenantId); } } getTiersCache().put(tierName, tierMap); } } finally { PrivilegedCarbonContext.endTenantFlow(); } return tierMap.get(tierName); } public static void clearTiersCache(String tenantDomain) { try { PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); getTiersCache().removeAll(); } finally { PrivilegedCarbonContext.endTenantFlow(); } } private static Cache getTiersCache() { return Caching.getCacheManager(APIConstants.API_MANAGER_CACHE_MANAGER). getCache(APIConstants.TIERS_CACHE); } /** * Util method to return the artifact from a registry resource path * * @param apiIdentifier * @param registry * @return * @throws APIManagementException */ public static GenericArtifact getAPIArtifact(APIIdentifier apiIdentifier, Registry registry) throws APIManagementException { String apiPath = APIUtil.getAPIPath(apiIdentifier); GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY); try { Resource apiResource = registry.get(apiPath); String artifactId = apiResource.getUUID(); if (artifactId == null) { throw new APIManagementException("artifact id is null for : " + apiPath); } return artifactManager.getGenericArtifact(artifactId); } catch (RegistryException e) { handleException("Failed to get API artifact from : " + apiPath, e); return null; } } /** * Return a http client instance * * @param port - server port * @param protocol- service endpoint protocol http/https * @return */ public static HttpClient getHttpClient(int port, String protocol) { SchemeRegistry registry = new SchemeRegistry(); SSLSocketFactory socketFactory = SSLSocketFactory.getSocketFactory(); String sslValue = null; AxisConfiguration axis2Config = ServiceReferenceHolder.getContextService().getServerConfigContext() .getAxisConfiguration(); org.apache.axis2.description.Parameter sslVerifyClient = axis2Config.getTransportIn(APIConstants.HTTPS_PROTOCOL) .getParameter(APIConstants.SSL_VERIFY_CLIENT); if (sslVerifyClient != null) { sslValue = (String) sslVerifyClient.getValue(); } if (APIConstants.HTTPS_PROTOCOL.equals(protocol)) { try { if (APIConstants.SSL_VERIFY_CLIENT_STATUS_REQUIRE.equals(sslValue)) { socketFactory = createSocketFactory(); } if (port >= 0) { registry.register(new Scheme(APIConstants.HTTPS_PROTOCOL, port, socketFactory)); } else { registry.register(new Scheme(APIConstants.HTTPS_PROTOCOL, 443, socketFactory)); } } catch (APIManagementException e) { log.error(e); } } else if (APIConstants.HTTP_PROTOCOL.equals(protocol)) { if (port >= 0) { registry.register(new Scheme(APIConstants.HTTP_PROTOCOL, port, PlainSocketFactory.getSocketFactory())); } else { registry.register(new Scheme(APIConstants.HTTP_PROTOCOL, 80, PlainSocketFactory.getSocketFactory())); } } HttpParams params = new BasicHttpParams(); ThreadSafeClientConnManager tcm = new ThreadSafeClientConnManager(registry); return new DefaultHttpClient(tcm, params); } private static SSLSocketFactory createSocketFactory() throws APIManagementException { KeyStore keyStore; String keyStorePath = null; String keyStorePassword; try { keyStorePath = CarbonUtils.getServerConfiguration().getFirstProperty("Security.KeyStore.Location"); keyStorePassword = CarbonUtils.getServerConfiguration() .getFirstProperty("Security.KeyStore.Password"); keyStore = KeyStore.getInstance("JKS"); keyStore.load(new FileInputStream(keyStorePath), keyStorePassword.toCharArray()); SSLSocketFactory sslSocketFactory = new SSLSocketFactory(keyStore, keyStorePassword); return sslSocketFactory; } catch (KeyStoreException e) { handleException("Failed to read from Key Store", e); } catch (CertificateException e) { handleException("Failed to read Certificate", e); } catch (NoSuchAlgorithmException e) { handleException("Failed to load Key Store from " + keyStorePath, e); } catch (IOException e) { handleException("Key Store not found in " + keyStorePath, e); } catch (UnrecoverableKeyException e) { handleException("Failed to load key from" + keyStorePath, e); } catch (KeyManagementException e) { handleException("Failed to load key from" + keyStorePath, e); } return null; } /** * This method will return a relative URL for given registry resource which we can used to retrieve the resource * from the web UI. For example, URI for a thumbnail icon of a tag can be generated from this method. * * @param resourceType Type of the registry resource. Based on this value the way URL is generate can be changed. * @param tenantDomain tenant domain of the resource * @param resourcePath path of the resource * @return relative path of the registry resource from the web context level */ public static String getRegistryResourcePathForUI(APIConstants.RegistryResourceTypesForUI resourceType, String tenantDomain, String resourcePath) { StringBuilder resourcePathBuilder = new StringBuilder(); if (APIConstants.RegistryResourceTypesForUI.TAG_THUMBNAIL.equals(resourceType)) { if (tenantDomain != null && !"".equals(tenantDomain) && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { // The compiler will concatenate the 2 constants. If we use the builder to append the 2 constants, then // it will happen during the runtime. resourcePathBuilder.append(RegistryConstants.PATH_SEPARATOR + MultitenantConstants .TENANT_AWARE_URL_PREFIX + RegistryConstants.PATH_SEPARATOR).append(tenantDomain); } // The compiler will concatenate the 2 constants. If we use the builder to append the 2 constants, then // it will happen during the runtime. resourcePathBuilder.append(APIConstants.REGISTRY_RESOURCE_PREFIX + RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH); resourcePathBuilder.append(resourcePath); } return resourcePathBuilder.toString(); } /** * Gets the class given the class name. * * @param className the fully qualified name of the class. * @return an instance of the class with the given name * @throws ClassNotFoundException * @throws IllegalAccessException * @throws InstantiationException */ public static Class getClassForName(String className) throws ClassNotFoundException, IllegalAccessException, InstantiationException { return Class.forName(className); } /** * This method will check the validity of given url. * otherwise we will mark it as invalid url. * * @param url url tobe tested * @return true if its valid url else fale */ public static boolean isValidURL(String url) { if (url == null) { return false; } try { URL urlVal = new URL(url); // If there are no issues, then this is a valid URL. Hence returning true. return true; } catch (MalformedURLException e) { return false; } } /** * @param tenantDomain Tenant domain to be used to get configurations for REST API scopes * @return JSON object which contains configuration for REST API scopes * @throws APIManagementException */ public static JSONObject getTenantRESTAPIScopesConfig(String tenantDomain) throws APIManagementException { JSONObject apiTenantConfig = null; JSONObject restAPIConfigJSON = null; try { String content = new APIMRegistryServiceImpl().getConfigRegistryResourceContent(tenantDomain, APIConstants.API_TENANT_CONF_LOCATION); if (content != null) { JSONParser parser = new JSONParser(); apiTenantConfig = (JSONObject) parser.parse(content); if (apiTenantConfig != null) { Object value = apiTenantConfig.get(APIConstants.REST_API_SCOPES_CONFIG); if (value != null) { restAPIConfigJSON = (JSONObject) value; } else { throw new APIManagementException("RESTAPIScopes" + " config does not exist for tenant " + tenantDomain); } } } } catch (UserStoreException e) { handleException("UserStoreException thrown when getting API tenant config from registry", e); } catch (RegistryException e) { handleException("RegistryException thrown when getting API tenant config from registry", e); } catch (ParseException e) { handleException("ParseException thrown when passing API tenant config from registry", e); } return restAPIConfigJSON; } /** * @param tenantDomain Tenant domain to be used to get default role configurations * @return JSON object which contains configuration for default roles * @throws APIManagementException */ public static JSONObject getTenantDefaultRoles(String tenantDomain) throws APIManagementException { JSONObject apiTenantConfig; JSONObject defaultRolesConfigJSON = null; try { String content = new APIMRegistryServiceImpl().getConfigRegistryResourceContent(tenantDomain, APIConstants.API_TENANT_CONF_LOCATION); if (content != null) { JSONParser parser = new JSONParser(); apiTenantConfig = (JSONObject) parser.parse(content); if (apiTenantConfig != null) { Object value = apiTenantConfig.get(APIConstants.API_TENANT_CONF_DEFAULT_ROLES); if (value != null) { defaultRolesConfigJSON = (JSONObject) value; } else { //Config might not exist for migrated environments from previous release if (log.isDebugEnabled()) { log.debug(APIConstants.API_TENANT_CONF_DEFAULT_ROLES + " config does not exist for tenant " + tenantDomain); } } } } } catch (UserStoreException e) { handleException("Error while retrieving user realm for tenant " + tenantDomain, e); } catch (RegistryException e) { handleException("Error while retrieving tenant configuration file for tenant " + tenantDomain, e); } catch (ParseException e) { handleException( "Error while parsing tenant configuration file while retrieving default roles for tenant " + tenantDomain, e); } return defaultRolesConfigJSON; } /** * @param config JSON configuration object with scopes and associated roles * @return Map of scopes which contains scope names and associated role list */ public static Map<String, String> getRESTAPIScopesFromConfig(JSONObject config) { Map<String, String> scopes = new HashMap<String, String>(); JSONArray scopesArray = (JSONArray) config.get("Scope"); for (Object scopeObj : scopesArray) { JSONObject scope = (JSONObject) scopeObj; String scopeName = scope.get(APIConstants.REST_API_SCOPE_NAME).toString(); String scopeRoles = scope.get(APIConstants.REST_API_SCOPE_ROLE).toString(); scopes.put(scopeName, scopeRoles); } return scopes; } /** * Determines if the scope is specified in the whitelist. * * @param scope - The scope key to check * @return - 'true' if the scope is white listed. 'false' if not. */ public static boolean isWhiteListedScope(String scope) { if (whiteListedScopes == null) { APIManagerConfiguration configuration = ServiceReferenceHolder.getInstance(). getAPIManagerConfigurationService().getAPIManagerConfiguration(); // Read scope whitelist from Configuration. List<String> whitelist = configuration.getProperty(APIConstants.WHITELISTED_SCOPES); // If whitelist is null, default scopes will be put. if (whitelist == null) { whitelist = new ArrayList<String>(); whitelist.add(APIConstants.OPEN_ID_SCOPE_NAME); whitelist.add(APIConstants.DEVICE_SCOPE_PATTERN); } whiteListedScopes = new HashSet<String>(whitelist); } for (String scopeTobeSkipped : whiteListedScopes) { if (scope.matches(scopeTobeSkipped)) { return true; } } return false; } public static String getServerURL() throws APIManagementException { String hostName = ServerConfiguration.getInstance().getFirstProperty(APIConstants.HOST_NAME); try { if (hostName == null) { hostName = NetworkUtils.getLocalHostname(); } } catch (SocketException e) { throw new APIManagementException("Error while trying to read hostname.", e); } String mgtTransport = CarbonUtils.getManagementTransport(); AxisConfiguration axisConfiguration = ServiceReferenceHolder .getContextService().getServerConfigContext().getAxisConfiguration(); int mgtTransportPort = CarbonUtils.getTransportProxyPort(axisConfiguration, mgtTransport); if (mgtTransportPort <= 0) { mgtTransportPort = CarbonUtils.getTransportPort(axisConfiguration, mgtTransport); } String serverUrl = mgtTransport + "://" + hostName.toLowerCase(); // If it's well known HTTPS port, skip adding port if (mgtTransportPort != APIConstants.DEFAULT_HTTPS_PORT) { serverUrl += ":" + mgtTransportPort; } // If ProxyContextPath is defined then append it String proxyContextPath = ServerConfiguration.getInstance().getFirstProperty(APIConstants.PROXY_CONTEXT_PATH); if (proxyContextPath != null && !proxyContextPath.trim().isEmpty()) { if (proxyContextPath.charAt(0) == '/') { serverUrl += proxyContextPath; } else { serverUrl += "/" + proxyContextPath; } } return serverUrl; } /** * Extract the provider of the API from name * * @param apiVersion - API Name with version * @param tenantDomain - tenant domain of the API * @return API publisher name */ public static String getAPIProviderFromRESTAPI(String apiVersion, String tenantDomain) { int index = apiVersion.indexOf("--"); if (StringUtils.isEmpty(tenantDomain)) { tenantDomain = org.wso2.carbon.utils.multitenancy.MultitenantConstants.SUPER_TENANT_DOMAIN_NAME; } String apiProvider; if (index != -1) { apiProvider = apiVersion.substring(0, index); if (apiProvider.contains(APIConstants.EMAIL_DOMAIN_SEPARATOR_REPLACEMENT)) { apiProvider = apiProvider.replace(APIConstants.EMAIL_DOMAIN_SEPARATOR_REPLACEMENT, APIConstants.EMAIL_DOMAIN_SEPARATOR); } if (!apiProvider.endsWith(tenantDomain)) { apiProvider = apiProvider + '@' + tenantDomain; } return apiProvider; } return null; } /** * Used to generate CORS Configuration object from CORS Configuration Json * * @param jsonString json representation of CORS configuration * @return CORSConfiguration Object */ public static CORSConfiguration getCorsConfigurationDtoFromJson(String jsonString) { return new Gson().fromJson(jsonString, CORSConfiguration.class); } /** * Used to generate Json string from CORS Configuration object * * @param corsConfiguration CORSConfiguration Object * @return Json string according to CORSConfiguration Object */ public static String getCorsConfigurationJsonFromDto(CORSConfiguration corsConfiguration) { return new Gson().toJson(corsConfiguration); } /** * Used to get access control allowed headers according to the api-manager.xml * * @return access control allowed headers string */ public static String getAllowedHeaders() { return ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService().getAPIManagerConfiguration(). getFirstProperty(APIConstants.CORS_CONFIGURATION_ACCESS_CTL_ALLOW_HEADERS); } /** * Used to get access control allowed methods define in api-manager.xml * * @return access control allowed methods string */ public static String getAllowedMethods() { return ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService().getAPIManagerConfiguration(). getFirstProperty(APIConstants.CORS_CONFIGURATION_ACCESS_CTL_ALLOW_METHODS); } /** * Used to get access control allowed credential define in api-manager.xml * * @return true if access control allow credential enabled */ public static boolean isAllowCredentials() { String allowCredentials = ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService().getAPIManagerConfiguration(). getFirstProperty(APIConstants.CORS_CONFIGURATION_ACCESS_CTL_ALLOW_CREDENTIALS); return Boolean.parseBoolean(allowCredentials); } /** * Used to get CORS Configuration enabled from api-manager.xml * * @return true if CORS-Configuration is enabled in api-manager.xml */ public static boolean isCORSEnabled() { String corsEnabled = ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService().getAPIManagerConfiguration(). getFirstProperty(APIConstants.CORS_CONFIGURATION_ENABLED); return Boolean.parseBoolean(corsEnabled); } /** * Used to get access control allowed origins define in api-manager.xml * * @return allow origins list defined in api-manager.xml */ public static String getAllowedOrigins() { return ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService().getAPIManagerConfiguration(). getFirstProperty(APIConstants.CORS_CONFIGURATION_ACCESS_CTL_ALLOW_ORIGIN); } /** * Used to get CORSConfiguration according to the API artifact * * @param artifact registry artifact for the API * @return CORS Configuration object extract from the artifact * @throws GovernanceException if attribute couldn't fetch from the artifact. */ public static CORSConfiguration getCorsConfigurationFromArtifact(GovernanceArtifact artifact) throws GovernanceException { CORSConfiguration corsConfiguration = APIUtil.getCorsConfigurationDtoFromJson( artifact.getAttribute(APIConstants.API_OVERVIEW_CORS_CONFIGURATION)); if (corsConfiguration == null) { corsConfiguration = getDefaultCorsConfiguration(); } return corsConfiguration; } /** * Used to get Default CORS Configuration object according to configuration define in api-manager.xml * * @return CORSConfiguration object accordine to the defined values in api-manager.xml */ public static CORSConfiguration getDefaultCorsConfiguration() { List<String> allowHeadersStringSet = Arrays.asList(getAllowedHeaders().split(",")); List<String> allowMethodsStringSet = Arrays.asList(getAllowedMethods().split(",")); List<String> allowOriginsStringSet = Arrays.asList(getAllowedOrigins().split(",")); return new CORSConfiguration(false, allowOriginsStringSet, false, allowHeadersStringSet, allowMethodsStringSet); } /** * Used to get API name from synapse API Name * * @param api_version API name from synapse configuration * @return api name according to the tenant */ public static String getAPINamefromRESTAPI(String api_version) { int index = api_version.indexOf("--"); String api; if (index != -1) { api_version = api_version.substring(index + 2); } api = api_version.split(":")[0]; index = api.indexOf("--"); if (index != -1) { api = api.substring(index + 2); } return api; } /** * @param stakeHolder value "publisher" for publisher value "subscriber" for subscriber value "admin-dashboard" for admin * Return all alert types. * @return Hashmap of alert types. * @throws APIManagementException */ public static HashMap<Integer, String> getAllAlertTypeByStakeHolder(String stakeHolder) throws APIManagementException { HashMap<Integer, String> map; map = ApiMgtDAO.getInstance().getAllAlertTypesByStakeHolder(stakeHolder); return map; } /** * @param userName user name with tenant domain ex: [email protected] * @param stakeHolder value "p" for publisher value "s" for subscriber value "a" for admin * @return map of saved values of alert types. * @throws APIManagementException */ public static List<Integer> getSavedAlertTypesIdsByUserNameAndStakeHolder(String userName, String stakeHolder) throws APIManagementException { List<Integer> list; list = ApiMgtDAO.getInstance().getSavedAlertTypesIdsByUserNameAndStakeHolder(userName, stakeHolder); return list; } /** * This util method retrieves saved email list by user and stakeHolder name * * @param userName user name with tenant ID. * @param stakeHolder if its publisher values should "p", if it is store value is "s" if admin dashboard value is "a" * @return List of eamil list. * @throws APIManagementException */ public static List<String> retrieveSavedEmailList(String userName, String stakeHolder) throws APIManagementException { List<String> list; list = ApiMgtDAO.getInstance().retrieveSavedEmailList(userName, stakeHolder); return list; } private static boolean isDefaultQuotaPolicyContentAware(Policy policy) { if (PolicyConstants.BANDWIDTH_TYPE.equalsIgnoreCase(policy.getDefaultQuotaPolicy().getType())) { return true; } return false; } public static void addDefaultSuperTenantAdvancedThrottlePolicies() throws APIManagementException { int tenantId = MultitenantConstants.SUPER_TENANT_ID; long[] requestCount = new long[]{50, 20, 10, Integer.MAX_VALUE}; //Adding application level throttle policies String[] appPolicies = new String[]{APIConstants.DEFAULT_APP_POLICY_FIFTY_REQ_PER_MIN, APIConstants.DEFAULT_APP_POLICY_TWENTY_REQ_PER_MIN, APIConstants.DEFAULT_APP_POLICY_TEN_REQ_PER_MIN, APIConstants.DEFAULT_APP_POLICY_UNLIMITED}; String[] appPolicyDecs = new String[]{APIConstants.DEFAULT_APP_POLICY_LARGE_DESC, APIConstants.DEFAULT_APP_POLICY_MEDIUM_DESC, APIConstants.DEFAULT_APP_POLICY_SMALL_DESC, APIConstants.DEFAULT_APP_POLICY_UNLIMITED_DESC}; ApiMgtDAO apiMgtDAO = ApiMgtDAO.getInstance(); String policyName; //Add application level throttle policies for (int i = 0; i < appPolicies.length; i++) { policyName = appPolicies[i]; if (!apiMgtDAO.isPolicyExist(PolicyConstants.POLICY_LEVEL_APP, tenantId, policyName)) { ApplicationPolicy applicationPolicy = new ApplicationPolicy(policyName); applicationPolicy.setDisplayName(policyName); applicationPolicy.setDescription(appPolicyDecs[i]); applicationPolicy.setTenantId(tenantId); applicationPolicy.setDeployed(true); QuotaPolicy defaultQuotaPolicy = new QuotaPolicy(); RequestCountLimit requestCountLimit = new RequestCountLimit(); requestCountLimit.setRequestCount(requestCount[i]); requestCountLimit.setUnitTime(1); requestCountLimit.setTimeUnit(APIConstants.TIME_UNIT_MINUTE); defaultQuotaPolicy.setType(PolicyConstants.REQUEST_COUNT_TYPE); defaultQuotaPolicy.setLimit(requestCountLimit); applicationPolicy.setDefaultQuotaPolicy(defaultQuotaPolicy); apiMgtDAO.addApplicationPolicy(applicationPolicy); } } //Adding Subscription level policies long[] requestCountSubPolicies = new long[]{5000, 2000, 1000, 500, Integer.MAX_VALUE}; String[] subPolicies = new String[]{APIConstants.DEFAULT_SUB_POLICY_GOLD, APIConstants.DEFAULT_SUB_POLICY_SILVER, APIConstants.DEFAULT_SUB_POLICY_BRONZE, APIConstants.DEFAULT_SUB_POLICY_UNAUTHENTICATED, APIConstants.DEFAULT_SUB_POLICY_UNLIMITED}; String[] subPolicyDecs = new String[]{APIConstants.DEFAULT_SUB_POLICY_GOLD_DESC, APIConstants.DEFAULT_SUB_POLICY_SILVER_DESC, APIConstants.DEFAULT_SUB_POLICY_BRONZE_DESC, APIConstants.DEFAULT_SUB_POLICY_UNAUTHENTICATED_DESC, APIConstants.DEFAULT_SUB_POLICY_UNLIMITED_DESC}; for (int i = 0; i < subPolicies.length; i++) { policyName = subPolicies[i]; if (!apiMgtDAO.isPolicyExist(PolicyConstants.POLICY_LEVEL_SUB, tenantId, policyName)) { SubscriptionPolicy subscriptionPolicy = new SubscriptionPolicy(policyName); subscriptionPolicy.setDisplayName(policyName); subscriptionPolicy.setDescription(subPolicyDecs[i]); subscriptionPolicy.setTenantId(tenantId); subscriptionPolicy.setDeployed(true); QuotaPolicy defaultQuotaPolicy = new QuotaPolicy(); RequestCountLimit requestCountLimit = new RequestCountLimit(); requestCountLimit.setRequestCount(requestCountSubPolicies[i]); requestCountLimit.setUnitTime(1); requestCountLimit.setTimeUnit(APIConstants.TIME_UNIT_MINUTE); defaultQuotaPolicy.setType(PolicyConstants.REQUEST_COUNT_TYPE); defaultQuotaPolicy.setLimit(requestCountLimit); subscriptionPolicy.setDefaultQuotaPolicy(defaultQuotaPolicy); subscriptionPolicy.setStopOnQuotaReach(true); subscriptionPolicy.setBillingPlan(APIConstants.BILLING_PLAN_FREE); apiMgtDAO.addSubscriptionPolicy(subscriptionPolicy); } } //Adding Resource level policies String[] apiPolicies = new String[]{APIConstants.DEFAULT_API_POLICY_FIFTY_THOUSAND_REQ_PER_MIN, APIConstants.DEFAULT_API_POLICY_TWENTY_THOUSAND_REQ_PER_MIN, APIConstants.DEFAULT_API_POLICY_TEN_THOUSAND_REQ_PER_MIN, APIConstants.DEFAULT_API_POLICY_UNLIMITED}; String[] apiPolicyDecs = new String[]{APIConstants.DEFAULT_API_POLICY_ULTIMATE_DESC, APIConstants.DEFAULT_API_POLICY_PLUS_DESC, APIConstants.DEFAULT_API_POLICY_BASIC_DESC, APIConstants.DEFAULT_API_POLICY_UNLIMITED_DESC}; long[] requestCountApiPolicies = new long[]{50000, 20000, 10000, Integer.MAX_VALUE}; for (int i = 0; i < apiPolicies.length; i++) { policyName = apiPolicies[i]; if (!apiMgtDAO.isPolicyExist(PolicyConstants.POLICY_LEVEL_API, tenantId, policyName)) { APIPolicy apiPolicy = new APIPolicy(policyName); apiPolicy.setDisplayName(policyName); apiPolicy.setDescription(apiPolicyDecs[i]); apiPolicy.setTenantId(tenantId); apiPolicy.setUserLevel(APIConstants.API_POLICY_API_LEVEL); apiPolicy.setDeployed(true); QuotaPolicy defaultQuotaPolicy = new QuotaPolicy(); RequestCountLimit requestCountLimit = new RequestCountLimit(); requestCountLimit.setRequestCount(requestCountApiPolicies[i]); requestCountLimit.setUnitTime(1); requestCountLimit.setTimeUnit(APIConstants.TIME_UNIT_MINUTE); defaultQuotaPolicy.setType(PolicyConstants.REQUEST_COUNT_TYPE); defaultQuotaPolicy.setLimit(requestCountLimit); apiPolicy.setDefaultQuotaPolicy(defaultQuotaPolicy); apiMgtDAO.addAPIPolicy(apiPolicy); } } } public static void addDefaultTenantAdvancedThrottlePolicies(String tenantDomain, int tenantId) throws APIManagementException { ThrottlePolicyDeploymentManager deploymentManager = ThrottlePolicyDeploymentManager.getInstance(); ThrottlePolicyTemplateBuilder policyBuilder = new ThrottlePolicyTemplateBuilder(); Map<String, Long> defualtLimits = ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService().getAPIManagerConfiguration() .getThrottleProperties().getDefaultThrottleTierLimits(); long tenPerMinTier = defualtLimits.containsKey(APIConstants.DEFAULT_APP_POLICY_TEN_REQ_PER_MIN) ? defualtLimits.get(APIConstants.DEFAULT_APP_POLICY_TEN_REQ_PER_MIN) : 10; long twentyPerMinTier = defualtLimits.containsKey(APIConstants.DEFAULT_APP_POLICY_TWENTY_REQ_PER_MIN) ? defualtLimits.get(APIConstants.DEFAULT_APP_POLICY_TWENTY_REQ_PER_MIN) : 20; long fiftyPerMinTier = defualtLimits.containsKey(APIConstants.DEFAULT_APP_POLICY_FIFTY_REQ_PER_MIN) ? defualtLimits.get(APIConstants.DEFAULT_APP_POLICY_FIFTY_REQ_PER_MIN) : 50; long[] requestCount = new long[]{fiftyPerMinTier, twentyPerMinTier, tenPerMinTier, Integer.MAX_VALUE}; //Adding application level throttle policies String[] appPolicies = new String[]{APIConstants.DEFAULT_APP_POLICY_FIFTY_REQ_PER_MIN, APIConstants.DEFAULT_APP_POLICY_TWENTY_REQ_PER_MIN, APIConstants.DEFAULT_APP_POLICY_TEN_REQ_PER_MIN, APIConstants.DEFAULT_APP_POLICY_UNLIMITED}; String[] appPolicyDecs = new String[]{APIConstants.DEFAULT_APP_POLICY_LARGE_DESC, APIConstants.DEFAULT_APP_POLICY_MEDIUM_DESC, APIConstants.DEFAULT_APP_POLICY_SMALL_DESC, APIConstants.DEFAULT_APP_POLICY_UNLIMITED_DESC}; ApiMgtDAO apiMgtDAO = ApiMgtDAO.getInstance(); String policyName; //Add application level throttle policies for (int i = 0; i < appPolicies.length; i++) { policyName = appPolicies[i]; boolean needDeployment = false; ApplicationPolicy applicationPolicy = new ApplicationPolicy(policyName); applicationPolicy.setDisplayName(policyName); applicationPolicy.setDescription(appPolicyDecs[i]); applicationPolicy.setTenantId(tenantId); applicationPolicy.setDeployed(false); applicationPolicy.setTenantDomain(tenantDomain); QuotaPolicy defaultQuotaPolicy = new QuotaPolicy(); RequestCountLimit requestCountLimit = new RequestCountLimit(); requestCountLimit.setRequestCount(requestCount[i]); requestCountLimit.setUnitTime(1); requestCountLimit.setTimeUnit(APIConstants.TIME_UNIT_MINUTE); defaultQuotaPolicy.setType(PolicyConstants.REQUEST_COUNT_TYPE); defaultQuotaPolicy.setLimit(requestCountLimit); applicationPolicy.setDefaultQuotaPolicy(defaultQuotaPolicy); if (!apiMgtDAO.isPolicyExist(PolicyConstants.POLICY_LEVEL_APP, tenantId, policyName)) { apiMgtDAO.addApplicationPolicy(applicationPolicy); needDeployment = true; } if (!apiMgtDAO.isPolicyDeployed(PolicyConstants.POLICY_LEVEL_APP, tenantId, policyName)) { needDeployment = true; } if (needDeployment) { String policyString; try { policyString = policyBuilder.getThrottlePolicyForAppLevel(applicationPolicy); String policyFile = applicationPolicy.getTenantDomain() + "_" + PolicyConstants.POLICY_LEVEL_APP + "_" + applicationPolicy.getPolicyName(); if(!APIConstants.DEFAULT_APP_POLICY_UNLIMITED.equalsIgnoreCase(policyName)) { deploymentManager.deployPolicyToGlobalCEP(policyString); } apiMgtDAO.setPolicyDeploymentStatus(PolicyConstants.POLICY_LEVEL_APP, applicationPolicy.getPolicyName(), applicationPolicy.getTenantId(), true); } catch (APITemplateException e) { throw new APIManagementException("Error while adding default subscription policy" + applicationPolicy.getPolicyName(), e); } } } long bronzeTierLimit = defualtLimits.containsKey(APIConstants.DEFAULT_SUB_POLICY_BRONZE) ? defualtLimits.get(APIConstants.DEFAULT_SUB_POLICY_BRONZE) : 1000; long silverTierLimit = defualtLimits.containsKey(APIConstants.DEFAULT_SUB_POLICY_SILVER) ? defualtLimits.get(APIConstants.DEFAULT_SUB_POLICY_SILVER) : 2000; long goldTierLimit = defualtLimits.containsKey(APIConstants.DEFAULT_SUB_POLICY_GOLD) ? defualtLimits.get(APIConstants.DEFAULT_SUB_POLICY_GOLD) : 5000; long unauthenticatedTierLimit = defualtLimits.containsKey(APIConstants.DEFAULT_APP_POLICY_FIFTY_REQ_PER_MIN) ? defualtLimits.get(APIConstants.DEFAULT_SUB_POLICY_UNAUTHENTICATED) : 500; //Adding Subscription level policies long[] requestCountSubPolicies = new long[]{goldTierLimit, silverTierLimit, bronzeTierLimit, unauthenticatedTierLimit, Integer.MAX_VALUE}; String[] subPolicies = new String[]{APIConstants.DEFAULT_SUB_POLICY_GOLD, APIConstants.DEFAULT_SUB_POLICY_SILVER, APIConstants.DEFAULT_SUB_POLICY_BRONZE, APIConstants.DEFAULT_SUB_POLICY_UNAUTHENTICATED, APIConstants.DEFAULT_SUB_POLICY_UNLIMITED}; String[] subPolicyDecs = new String[]{APIConstants.DEFAULT_SUB_POLICY_GOLD_DESC, APIConstants.DEFAULT_SUB_POLICY_SILVER_DESC, APIConstants.DEFAULT_SUB_POLICY_BRONZE_DESC, APIConstants.DEFAULT_SUB_POLICY_UNAUTHENTICATED_DESC, APIConstants.DEFAULT_SUB_POLICY_UNLIMITED_DESC}; for (int i = 0; i < subPolicies.length; i++) { policyName = subPolicies[i]; boolean needDeployment = false; SubscriptionPolicy subscriptionPolicy = new SubscriptionPolicy(policyName); subscriptionPolicy.setDisplayName(policyName); subscriptionPolicy.setDescription(subPolicyDecs[i]); subscriptionPolicy.setTenantId(tenantId); subscriptionPolicy.setDeployed(false); subscriptionPolicy.setTenantDomain(tenantDomain); QuotaPolicy defaultQuotaPolicy = new QuotaPolicy(); RequestCountLimit requestCountLimit = new RequestCountLimit(); requestCountLimit.setRequestCount(requestCountSubPolicies[i]); requestCountLimit.setUnitTime(1); requestCountLimit.setTimeUnit(APIConstants.TIME_UNIT_MINUTE); defaultQuotaPolicy.setType(PolicyConstants.REQUEST_COUNT_TYPE); defaultQuotaPolicy.setLimit(requestCountLimit); subscriptionPolicy.setDefaultQuotaPolicy(defaultQuotaPolicy); subscriptionPolicy.setStopOnQuotaReach(true); subscriptionPolicy.setBillingPlan(APIConstants.BILLING_PLAN_FREE); if (!apiMgtDAO.isPolicyExist(PolicyConstants.POLICY_LEVEL_SUB, tenantId, policyName)) { apiMgtDAO.addSubscriptionPolicy(subscriptionPolicy); needDeployment = true; } if (!apiMgtDAO.isPolicyDeployed(PolicyConstants.POLICY_LEVEL_SUB, tenantId, policyName)) { needDeployment = true; } if (needDeployment) { String policyString; try { policyString = policyBuilder.getThrottlePolicyForSubscriptionLevel(subscriptionPolicy); String policyFile = subscriptionPolicy.getTenantDomain() + "_" +PolicyConstants.POLICY_LEVEL_SUB + "_" + subscriptionPolicy.getPolicyName(); if(!APIConstants.DEFAULT_SUB_POLICY_UNLIMITED.equalsIgnoreCase(policyName)) { deploymentManager.deployPolicyToGlobalCEP(policyString); } apiMgtDAO.setPolicyDeploymentStatus(PolicyConstants.POLICY_LEVEL_SUB, subscriptionPolicy.getPolicyName(), subscriptionPolicy.getTenantId(), true); } catch (APITemplateException e) { throw new APIManagementException("Error while adding default application policy " + subscriptionPolicy.getPolicyName(), e); } } } long tenThousandPerMinTier = defualtLimits.containsKey(APIConstants.DEFAULT_API_POLICY_TEN_THOUSAND_REQ_PER_MIN) ? defualtLimits.get(APIConstants.DEFAULT_API_POLICY_TEN_THOUSAND_REQ_PER_MIN) : 10000; long twentyThousandPerMinTier = defualtLimits.containsKey(APIConstants.DEFAULT_API_POLICY_TWENTY_THOUSAND_REQ_PER_MIN) ? defualtLimits.get(APIConstants.DEFAULT_API_POLICY_TWENTY_THOUSAND_REQ_PER_MIN) : 20000; long fiftyThousandPerMinTier = defualtLimits.containsKey(APIConstants.DEFAULT_API_POLICY_FIFTY_THOUSAND_REQ_PER_MIN) ? defualtLimits.get(APIConstants.DEFAULT_API_POLICY_FIFTY_THOUSAND_REQ_PER_MIN) : 50000; long[] requestCountAPIPolicies = new long[]{fiftyThousandPerMinTier, twentyThousandPerMinTier, tenThousandPerMinTier, Integer.MAX_VALUE}; //Adding Resource level policies String[] apiPolicies = new String[]{APIConstants.DEFAULT_API_POLICY_FIFTY_THOUSAND_REQ_PER_MIN, APIConstants.DEFAULT_API_POLICY_TWENTY_THOUSAND_REQ_PER_MIN, APIConstants.DEFAULT_API_POLICY_TEN_THOUSAND_REQ_PER_MIN, APIConstants.DEFAULT_API_POLICY_UNLIMITED}; String[] apiPolicyDecs = new String[]{APIConstants.DEFAULT_API_POLICY_ULTIMATE_DESC, APIConstants.DEFAULT_API_POLICY_PLUS_DESC, APIConstants.DEFAULT_API_POLICY_BASIC_DESC, APIConstants.DEFAULT_API_POLICY_UNLIMITED_DESC}; for (int i = 0; i < apiPolicies.length; i++) { boolean needDeployment = false; policyName = apiPolicies[i]; APIPolicy apiPolicy = new APIPolicy(policyName); apiPolicy.setDisplayName(policyName); apiPolicy.setDescription(apiPolicyDecs[i]); apiPolicy.setTenantId(tenantId); apiPolicy.setUserLevel(APIConstants.API_POLICY_API_LEVEL); apiPolicy.setDeployed(false); apiPolicy.setTenantDomain(tenantDomain); QuotaPolicy defaultQuotaPolicy = new QuotaPolicy(); RequestCountLimit requestCountLimit = new RequestCountLimit(); requestCountLimit.setRequestCount(requestCountAPIPolicies[i]); requestCountLimit.setUnitTime(1); requestCountLimit.setTimeUnit(APIConstants.TIME_UNIT_MINUTE); defaultQuotaPolicy.setType(PolicyConstants.REQUEST_COUNT_TYPE); defaultQuotaPolicy.setLimit(requestCountLimit); apiPolicy.setDefaultQuotaPolicy(defaultQuotaPolicy); if (!apiMgtDAO.isPolicyExist(PolicyConstants.POLICY_LEVEL_API, tenantId, policyName)) { apiMgtDAO.addAPIPolicy(apiPolicy); } if (!apiMgtDAO.isPolicyDeployed(PolicyConstants.POLICY_LEVEL_API, tenantId, policyName)) { needDeployment = true; } if (needDeployment) { String policyString; try { policyString = policyBuilder.getThrottlePolicyForAPILevelDefault(apiPolicy); String policyFile = apiPolicy.getTenantDomain() + "_" +PolicyConstants.POLICY_LEVEL_API + "_" + apiPolicy.getPolicyName() + "_default"; if(!APIConstants.DEFAULT_API_POLICY_UNLIMITED.equalsIgnoreCase(policyName)) { deploymentManager.deployPolicyToGlobalCEP(policyString); } apiMgtDAO.setPolicyDeploymentStatus(PolicyConstants.POLICY_LEVEL_API, apiPolicy.getPolicyName(), apiPolicy.getTenantId(), true); } catch (APITemplateException e) { throw new APIManagementException("Error while adding default api policy " + apiPolicy.getPolicyName(), e); } } } } /** * Used to get advence throttling is enable * * @return condition of advance throttling */ public static boolean isAdvanceThrottlingEnabled() { return ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService().getAPIManagerConfiguration() .getThrottleProperties().isEnabled(); } /** * Used to get unlimited throttling tier is enable * * @return condition of enable unlimited tier */ public static boolean isEnabledUnlimitedTier() { ThrottleProperties throttleProperties = ServiceReferenceHolder.getInstance() .getAPIManagerConfigurationService().getAPIManagerConfiguration() .getThrottleProperties(); if (throttleProperties.isEnabled()) { return throttleProperties.isEnableUnlimitedTier(); } else { APIManagerConfiguration config = ServiceReferenceHolder.getInstance(). getAPIManagerConfigurationService().getAPIManagerConfiguration(); return JavaUtils.isTrueExplicitly(config.getFirstProperty(APIConstants.ENABLE_UNLIMITED_TIER)); } } /** * Used to get subscription Spike arrest Enable * * @return condition of Subscription Spike arrest configuration */ public static boolean isEnabledSubscriptionSpikeArrest() { ThrottleProperties throttleProperties = ServiceReferenceHolder.getInstance() .getAPIManagerConfigurationService().getAPIManagerConfiguration() .getThrottleProperties(); return throttleProperties.isEnabledSubscriptionLevelSpikeArrest(); } public static Map<String, Tier> getTiersFromPolicies(String policyLevel, int tenantId) throws APIManagementException { Map<String, Tier> tierMap = new HashMap<String, Tier>(); ApiMgtDAO apiMgtDAO = ApiMgtDAO.getInstance(); Policy[] policies; if (PolicyConstants.POLICY_LEVEL_SUB.equalsIgnoreCase(policyLevel)) { policies = apiMgtDAO.getSubscriptionPolicies(tenantId); } else if (PolicyConstants.POLICY_LEVEL_API.equalsIgnoreCase(policyLevel)) { policies = apiMgtDAO.getAPIPolicies(tenantId); } else if (PolicyConstants.POLICY_LEVEL_APP.equalsIgnoreCase(policyLevel)) { policies = apiMgtDAO.getApplicationPolicies(tenantId); } else { throw new APIManagementException("No such a policy type : " + policyLevel); } for (Policy policy : policies) { if (!APIConstants.UNLIMITED_TIER.equalsIgnoreCase(policy.getPolicyName())) { Tier tier = new Tier(policy.getPolicyName()); tier.setDescription(policy.getDescription()); tier.setDisplayName(policy.getDisplayName()); Limit limit = policy.getDefaultQuotaPolicy().getLimit(); tier.setTimeUnit(limit.getTimeUnit()); tier.setUnitTime(limit.getUnitTime()); //If the policy is a subscription policy if(policy instanceof SubscriptionPolicy){ SubscriptionPolicy subscriptionPolicy = (SubscriptionPolicy)policy; setBillingPlanAndCustomAttributesToTier(subscriptionPolicy, tier); } if(limit instanceof RequestCountLimit) { RequestCountLimit countLimit = (RequestCountLimit) limit; tier.setRequestsPerMin(countLimit.getRequestCount()); tier.setRequestCount(countLimit.getRequestCount()); } else { BandwidthLimit bandwidthLimit = (BandwidthLimit) limit; tier.setRequestsPerMin(bandwidthLimit.getDataAmount()); tier.setRequestCount(bandwidthLimit.getDataAmount()); } tierMap.put(policy.getPolicyName(), tier); } else { if (APIUtil.isEnabledUnlimitedTier()) { Tier tier = new Tier(policy.getPolicyName()); tier.setDescription(policy.getDescription()); tier.setDisplayName(policy.getDisplayName()); tier.setRequestsPerMin(Integer.MAX_VALUE); tier.setRequestCount(Integer.MAX_VALUE); if (isUnlimitedTierPaid(getTenantDomainFromTenantId(tenantId))) { tier.setTierPlan(APIConstants.COMMERCIAL_TIER_PLAN); } else { tier.setTierPlan(APIConstants.BILLING_PLAN_FREE); } tierMap.put(policy.getPolicyName(), tier); } } } if (PolicyConstants.POLICY_LEVEL_SUB.equalsIgnoreCase(policyLevel)) { tierMap.remove(APIConstants.UNAUTHENTICATED_TIER); } return tierMap; } /** * Extract custom attributes and billing plan from subscription policy and set to tier. * @param subscriptionPolicy - The SubscriptionPolicy object to extract details from * @param tier - The Tier to set information into */ public static void setBillingPlanAndCustomAttributesToTier(SubscriptionPolicy subscriptionPolicy, Tier tier){ //set the billing plan. tier.setTierPlan(subscriptionPolicy.getBillingPlan()); //If the tier has custom attributes if(subscriptionPolicy.getCustomAttributes() != null && subscriptionPolicy.getCustomAttributes().length > 0){ Map<String, Object> tierAttributes = new HashMap<String, Object>(); try { String customAttr = new String(subscriptionPolicy.getCustomAttributes(), "UTF-8"); JSONParser parser = new JSONParser(); JSONArray jsonArr = (JSONArray) parser.parse(customAttr); Iterator jsonArrIterator = jsonArr.iterator(); while(jsonArrIterator.hasNext()){ JSONObject json = (JSONObject)jsonArrIterator.next(); tierAttributes.put(String.valueOf(json.get("name")), json.get("value")); } tier.setTierAttributes(tierAttributes); } catch (ParseException e) { log.error("Unable to convert String to Json", e); tier.setTierAttributes(null); } catch (UnsupportedEncodingException e) { log.error("Custom attribute byte array does not use UTF-8 character set", e); tier.setTierAttributes(null); } } } public static Set<Tier> getAvailableTiers(Map<String, Tier> definedTiers, String tiers, String apiName) { Set<Tier> availableTier = new HashSet<Tier>(); if (tiers != null && !"".equals(tiers)) { String[] tierNames = tiers.split("\\|\\|"); for (String tierName : tierNames) { Tier definedTier = definedTiers.get(tierName); if (definedTier != null) { availableTier.add(definedTier); } else { log.warn("Unknown tier: " + tierName + " found on API: " + apiName); } } } return availableTier; } public static byte[] toByteArray(InputStream is) throws IOException { return IOUtils.toByteArray(is); } public static long ipToLong(String ipAddress) { long result = 0; String[] ipAddressInArray = ipAddress.split("\\."); for (int i = 3; i >= 0; i--) { long ip = Long.parseLong(ipAddressInArray[3 - i]); //left shifting 24,16,8,0 and bitwise OR //1. 192 << 24 //1. 168 << 16 //1. 1 << 8 //1. 2 << 0 result |= ip << (i * 8); } return result; } public String getFullLifeCycleData(Registry registry) throws XMLStreamException, RegistryException { return CommonUtil.getLifecycleConfiguration(APIConstants.API_LIFE_CYCLE, registry); } /** * Composes OR based search criteria from provided array of values * * @param values * @return */ public static String getORBasedSearchCriteria(String[] values) { String criteria = "("; if (values != null) { for (int i = 0; i < values.length; i++) { criteria = criteria + values[i]; if (i != values.length - 1) { criteria = criteria + " OR "; } else { criteria = criteria + ")"; } } return criteria; } return null; } /** * Generates solr compatible search criteria synatax from user entered query criteria. * Ex: From version:1.0.0, this returns version=*1.0.0* * * @param criteria * @return solar compatible criteria * @throws APIManagementException */ public static String getSingleSearchCriteria(String criteria) throws APIManagementException { criteria = criteria.trim(); String searchValue = criteria; String searchKey = APIConstants.NAME_TYPE_PREFIX; if (criteria.contains(":")) { if (criteria.split(":").length > 1) { searchKey = criteria.split(":")[0].trim(); //if search key is 'tag' instead of 'tags', allow it as well since rest api document says query // param to use for tag search is 'tag' if (APIConstants.TAG_SEARCH_TYPE_PREFIX3.equals(searchKey)) { searchKey = APIConstants.TAG_SEARCH_TYPE_PREFIX; } searchValue = criteria.split(":")[1]; if (!APIConstants.DOCUMENTATION_SEARCH_TYPE_PREFIX.equalsIgnoreCase(searchKey) && !APIConstants.TAG_SEARCH_TYPE_PREFIX.equalsIgnoreCase(searchKey)) { if (!searchValue.endsWith("*")) { searchValue = searchValue + "*"; } if (!searchValue.startsWith("*")) { searchValue = "*" + searchValue; } } } else { throw new APIManagementException("Search term is missing. Try again with valid search query."); } } else { if (!searchValue.endsWith("*")) { searchValue = searchValue + "*"; } if (!searchValue.startsWith("*")) { searchValue = "*" + searchValue; } } if (APIConstants.API_PROVIDER.equalsIgnoreCase(searchKey)) { searchValue = searchValue.replaceAll("@", "-AT-"); } return searchKey + "=" + searchValue; } /** * return whether store forum feature is enabled * * @return true or false indicating enable or not */ public static boolean isStoreForumEnabled() { APIManagerConfiguration config = ServiceReferenceHolder.getInstance(). getAPIManagerConfigurationService().getAPIManagerConfiguration(); String forumEnabled = config.getFirstProperty(APIConstants.API_STORE_FORUM_ENABLED); if (forumEnabled == null) { return true; } return Boolean.parseBoolean(forumEnabled); } /** * Returns a secured DocumentBuilderFactory instance * * @return DocumentBuilderFactory */ public static DocumentBuilderFactory getSecuredDocumentBuilder() { org.apache.xerces.impl.Constants Constants = null; DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); dbf.setNamespaceAware(true); dbf.setXIncludeAware(false); dbf.setExpandEntityReferences(false); try { dbf.setFeature(Constants.SAX_FEATURE_PREFIX + Constants.EXTERNAL_GENERAL_ENTITIES_FEATURE, false); dbf.setFeature(Constants.SAX_FEATURE_PREFIX + Constants.EXTERNAL_PARAMETER_ENTITIES_FEATURE, false); dbf.setFeature(Constants.XERCES_FEATURE_PREFIX + Constants.LOAD_EXTERNAL_DTD_FEATURE, false); } catch (ParserConfigurationException e) { log.error( "Failed to load XML Processor Feature " + Constants.EXTERNAL_GENERAL_ENTITIES_FEATURE + " or " + Constants.EXTERNAL_PARAMETER_ENTITIES_FEATURE + " or " + Constants.LOAD_EXTERNAL_DTD_FEATURE); } SecurityManager securityManager = new SecurityManager(); securityManager.setEntityExpansionLimit(ENTITY_EXPANSION_LIMIT); dbf.setAttribute(Constants.XERCES_PROPERTY_PREFIX + Constants.SECURITY_MANAGER_PROPERTY, securityManager); return dbf; } /** * Logs an audit message on actions performed on entities (APIs, Applications, etc). The log is printed in the * following JSON format * { * "typ": "API", * "action": "update", * "performedBy": "[email protected]", * "info": { * "name": "Twitter", * "context": "/twitter", * "version": "1.0.0", * "provider": "nuwan" * } * } * * @param entityType - The entity type. Ex: API, Application * @param entityInfo - The details of the entity. Ex: API Name, Context * @param action - The type of action performed. Ex: Create, Update * @param performedBy - The user who performs the action. */ public static void logAuditMessage(String entityType, String entityInfo, String action, String performedBy) { JSONObject jsonObject = new JSONObject(); jsonObject.put("typ", entityType); jsonObject.put("action", action); jsonObject.put("performedBy", performedBy); jsonObject.put("info", entityInfo); audit.info(jsonObject.toString()); } public static int getPortOffset() { ServerConfiguration carbonConfig = ServerConfiguration.getInstance(); String portOffset = System.getProperty(APIConstants.PORT_OFFSET_SYSTEM_VAR, carbonConfig.getFirstProperty(APIConstants.PORT_OFFSET_CONFIG)); try { if ((portOffset != null)) { return Integer.parseInt(portOffset.trim()); } else { return 0; } } catch (NumberFormatException e) { log.error("Invalid Port Offset: " + portOffset + ". Default value 0 will be used.", e); return 0; } } public static boolean isQueryParamDataPublishingEnabled() { return ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService().getAPIManagerConfiguration(). getThrottleProperties().isEnableQueryParamConditions(); } public static boolean isHeaderDataPublishingEnabled() { return ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService().getAPIManagerConfiguration(). getThrottleProperties().isEnableHeaderConditions(); } public static boolean isJwtTokenPublishingEnabled() { return ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService().getAPIManagerConfiguration(). getThrottleProperties().isEnableJwtConditions(); } public static String getAnalyticsServerURL() { return ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService().getAPIAnalyticsConfiguration(). getDasServerUrl(); } public static String getAnalyticsServerUserName() { return ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService().getAPIAnalyticsConfiguration(). getDasReceiverServerUser(); } public static String getAnalyticsServerPassword() { return ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService().getAPIAnalyticsConfiguration(). getDasReceiverServerPassword(); } /** * Create the Cache object from the given parameters * @param cacheManagerName - Name of the Cache Manager * @param cacheName - Name of the Cache * @param modifiedExp - Value of the MODIFIED Expiry Type * @param accessExp - Value of the ACCESSED Expiry Type * @return - The cache object */ public static Cache getCache(final String cacheManagerName, final String cacheName, final long modifiedExp, final long accessExp){ return Caching.getCacheManager( cacheManagerName).createCacheBuilder(cacheName). setExpiry(CacheConfiguration.ExpiryType.MODIFIED, new CacheConfiguration.Duration(TimeUnit.SECONDS, modifiedExp)). setExpiry(CacheConfiguration.ExpiryType.ACCESSED, new CacheConfiguration.Duration(TimeUnit.SECONDS, accessExp)).setStoreByValue(false).build(); } /** * This method is used to get the actual endpoint password of an API from the hidden property * in the case where the handler APIEndpointPasswordRegistryHandler is enabled in registry.xml * * @param api The API * @param registry The registry object * @return The actual password of the endpoint if exists * @throws RegistryException Throws if the api resource doesn't exist */ private static String getActualEpPswdFromHiddenProperty(API api, Registry registry) throws RegistryException { String apiPath = APIUtil.getAPIPath(api.getId()); Resource apiResource = registry.get(apiPath); return apiResource.getProperty(APIConstants.REGISTRY_HIDDEN_ENDPOINT_PROPERTY); } /** * To check whether given role exist in the array of roles. * * @param userRoleList Role list to check against. * @param accessControlRole Access Control Role. * @return true if the Array contains the role specified. */ public static boolean compareRoleList(String[] userRoleList, String accessControlRole) { if (userRoleList != null) { for (String userRole : userRoleList) { if (userRole.equalsIgnoreCase(accessControlRole)) { return true; } } } return false; } /** * To clear the publisherRoleCache for certain users. * * @param userName Names of the user. */ public static void clearRoleCache(String userName) { if (isPublisherRoleCacheEnabled) { Caching.getCacheManager(APIConstants.API_MANAGER_CACHE_MANAGER).getCache(APIConstants .API_PUBLISHER_ADMIN_PERMISSION_CACHE).remove(userName); Caching.getCacheManager(APIConstants.API_MANAGER_CACHE_MANAGER).getCache(APIConstants .API_PUBLISHER_USER_ROLE_CACHE).remove(userName); } } /** * Used to reconstruct the input search query as sub context and doc content doesn't support AND search * * @param query Input search query * @return Reconstructed new search query * @throws APIManagementException If there is an error in the search query */ public static String constructNewSearchQuery(String query) throws APIManagementException { String newSearchQuery = ""; String inputSearchQuery = query.trim(); // sub context and doc content doesn't support AND search if (inputSearchQuery != null && inputSearchQuery.contains(" ")) { if (inputSearchQuery.split(" ").length > 1) { String[] searchCriterias = inputSearchQuery.split(" "); for (int i = 0; i < searchCriterias.length; i++) { if (searchCriterias[i].contains(":") && searchCriterias[i].split(":").length > 1) { if (APIConstants.DOCUMENTATION_SEARCH_TYPE_PREFIX .equalsIgnoreCase(searchCriterias[i].split(":")[0]) || APIConstants.SUBCONTEXT_SEARCH_TYPE_PREFIX .equalsIgnoreCase(searchCriterias[i].split(":")[0])) { throw new APIManagementException("Invalid query. AND based search is not supported for " + "doc and subcontext prefixes"); } } if (i == 0) { newSearchQuery = APIUtil.getSingleSearchCriteria(searchCriterias[i]); } else { newSearchQuery = newSearchQuery + APIConstants.SEARCH_AND_TAG + APIUtil .getSingleSearchCriteria(searchCriterias[i]); } } } } else { newSearchQuery = APIUtil.getSingleSearchCriteria(inputSearchQuery); } return newSearchQuery; } }
Added support for httpclient.hostnameVerifier
components/apimgt/org.wso2.carbon.apimgt.impl/src/main/java/org/wso2/carbon/apimgt/impl/utils/APIUtil.java
Added support for httpclient.hostnameVerifier
Java
apache-2.0
ed5d6155ce2defea797dfcd4b089b6144016dfa4
0
andreaturli/legacy-brooklyn,andreaturli/legacy-brooklyn,bmwshop/brooklyn,andreaturli/legacy-brooklyn,neykov/incubator-brooklyn,bmwshop/brooklyn,bmwshop/brooklyn,neykov/incubator-brooklyn,aledsage/legacy-brooklyn,aledsage/legacy-brooklyn,bmwshop/brooklyn,aledsage/legacy-brooklyn,aledsage/legacy-brooklyn,andreaturli/legacy-brooklyn,neykov/incubator-brooklyn,andreaturli/legacy-brooklyn,andreaturli/legacy-brooklyn,andreaturli/legacy-brooklyn,aledsage/legacy-brooklyn,neykov/incubator-brooklyn,aledsage/legacy-brooklyn,neykov/incubator-brooklyn,aledsage/legacy-brooklyn,bmwshop/brooklyn,neykov/incubator-brooklyn,bmwshop/brooklyn,bmwshop/brooklyn
package brooklyn.entity.webapp.tomcat; import static org.testng.Assert.assertFalse; import static org.testng.Assert.fail; import java.net.ServerSocket; import java.util.Iterator; import org.jclouds.util.Throwables2; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import brooklyn.entity.basic.ApplicationBuilder; import brooklyn.entity.basic.Entities; import brooklyn.entity.proxying.EntitySpec; import brooklyn.location.PortRange; import brooklyn.location.basic.LocalhostMachineProvisioningLocation; import brooklyn.location.basic.PortRanges; import brooklyn.test.entity.TestApplication; import brooklyn.util.net.Networking; import com.google.common.collect.ImmutableList; /** * This tests the operation of the {@link TomcatServer} entity. * * FIXME this test is largely superseded by WebApp*IntegrationTest which tests inter alia Tomcat */ public class TomcatServerSimpleIntegrationTest { @SuppressWarnings("unused") private static final Logger LOG = LoggerFactory.getLogger(TomcatServerSimpleIntegrationTest.class); /** don't use 8080 since that is commonly used by testing software; use different from other tests. */ static PortRange DEFAULT_HTTP_PORT_RANGE = PortRanges.fromString("7880-7980"); private TestApplication app; private TomcatServer tc; private int httpPort; @BeforeMethod(alwaysRun=true) public void pickFreePort() { for (Iterator<Integer> iter = DEFAULT_HTTP_PORT_RANGE.iterator(); iter.hasNext();) { Integer port = iter.next(); if (Networking.isPortAvailable(port)) { httpPort = port; return; } } fail("someone is already listening on ports "+DEFAULT_HTTP_PORT_RANGE+"; tests assume that port is free on localhost"); } @AfterMethod(alwaysRun=true) public void tearDown() throws Exception { if (app != null) Entities.destroyAll(app.getManagementContext()); } @Test(groups="Integration") public void detectFailureIfTomcatCantBindToPort() throws Exception { ServerSocket listener = new ServerSocket(httpPort); try { app = ApplicationBuilder.newManagedApp(TestApplication.class); tc = app.createAndManageChild(EntitySpec.create(TomcatServer.class).configure("httpPort", httpPort)); try { tc.start(ImmutableList.of(app.getManagementContext().getLocationManager().manage(new LocalhostMachineProvisioningLocation()))); fail("Should have thrown start-exception"); } catch (Exception e) { // LocalhostMachineProvisioningLocation does NetworkUtils.isPortAvailable, so get -1 IllegalArgumentException iae = Throwables2.getFirstThrowableOfType(e, IllegalArgumentException.class); if (iae == null || iae.getMessage() == null || !iae.getMessage().equals("port for httpPort is null")) throw e; } finally { tc.stop(); } assertFalse(tc.getAttribute(TomcatServerImpl.SERVICE_UP)); } finally { listener.close(); } } }
software/webapp/src/test/java/brooklyn/entity/webapp/tomcat/TomcatServerSimpleIntegrationTest.java
package brooklyn.entity.webapp.tomcat; import static org.testng.Assert.assertFalse; import static org.testng.Assert.fail; import java.net.ServerSocket; import java.util.Iterator; import org.jclouds.util.Throwables2; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import brooklyn.entity.basic.ApplicationBuilder; import brooklyn.entity.basic.Entities; import brooklyn.entity.proxying.EntitySpec; import brooklyn.location.PortRange; import brooklyn.location.basic.LocalhostMachineProvisioningLocation; import brooklyn.location.basic.PortRanges; import brooklyn.test.entity.TestApplication; import brooklyn.util.net.Networking; import com.google.common.collect.ImmutableList; /** * This tests the operation of the {@link TomcatServer} entity. * * FIXME this test is largely superseded by WebApp*IntegrationTest which tests inter alia Tomcat */ public class TomcatServerSimpleIntegrationTest { @SuppressWarnings("unused") private static final Logger LOG = LoggerFactory.getLogger(TomcatServerSimpleIntegrationTest.class); /** don't use 8080 since that is commonly used by testing software; use different from other tests. */ static PortRange DEFAULT_HTTP_PORT_RANGE = PortRanges.fromString("7880-7980"); private TestApplication app; private TomcatServer tc; private int httpPort; @BeforeMethod(alwaysRun=true) public void pickFreePort() { for (Iterator<Integer> iter = DEFAULT_HTTP_PORT_RANGE.iterator(); iter.hasNext();) { Integer port = iter.next(); if (Networking.isPortAvailable(port)) { httpPort = port; return; } } fail("someone is already listening on ports "+DEFAULT_HTTP_PORT_RANGE+"; tests assume that port is free on localhost"); } @AfterMethod(alwaysRun=true) public void tearDown() throws Exception { if (app != null) Entities.destroy(app); } @Test(groups="Integration") public void detectFailureIfTomcatCantBindToPort() throws Exception { ServerSocket listener = new ServerSocket(httpPort); try { app = ApplicationBuilder.newManagedApp(TestApplication.class); tc = app.createAndManageChild(EntitySpec.create(TomcatServer.class).configure("httpPort", httpPort)); try { tc.start(ImmutableList.of(app.getManagementContext().getLocationManager().manage(new LocalhostMachineProvisioningLocation()))); fail("Should have thrown start-exception"); } catch (Exception e) { // LocalhostMachineProvisioningLocation does NetworkUtils.isPortAvailable, so get -1 IllegalArgumentException iae = Throwables2.getFirstThrowableOfType(e, IllegalArgumentException.class); if (iae == null || iae.getMessage() == null || !iae.getMessage().equals("port for httpPort is null")) throw e; } finally { tc.stop(); } assertFalse(tc.getAttribute(TomcatServerImpl.SERVICE_UP)); } finally { listener.close(); } } }
Fix TomcatServerSimpleIntegrationTest.teraDown - so stops management context
software/webapp/src/test/java/brooklyn/entity/webapp/tomcat/TomcatServerSimpleIntegrationTest.java
Fix TomcatServerSimpleIntegrationTest.teraDown - so stops management context
Java
bsd-3-clause
07741156d439a6095e94c94803d85059773dcd85
0
larskotthoff/recomputation-ss-paper,larskotthoff/recomputation-ss-paper,larskotthoff/recomputation-ss-paper
package uk.ac.standrews.cs.emcsr2014.group_2; import java.io.File; import java.io.IOException; import java.net.InetAddress; import java.nio.file.FileSystem; import java.nio.file.Files; import java.nio.file.Path; import java.text.SimpleDateFormat; import java.util.Arrays; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CancellationException; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import net.schmizz.sshj.userauth.keyprovider.OpenSSHKeyFile; import net.schmizz.sshj.userauth.method.AuthPublickey; import org.apache.commons.io.FileUtils; import org.apache.commons.io.FilenameUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import uk.ac.standrews.cs.shabdiz.ApplicationDescriptor; import uk.ac.standrews.cs.shabdiz.ApplicationState; import uk.ac.standrews.cs.shabdiz.host.Host; import uk.ac.standrews.cs.shabdiz.host.SSHHost; import uk.ac.standrews.cs.shabdiz.job.Worker; import uk.ac.standrews.cs.shabdiz.job.WorkerManager; import uk.ac.standrews.cs.shabdiz.job.WorkerNetwork; import uk.ac.standrews.cs.shabdiz.util.Duration; import uk.ac.standrews.cs.trombone.core.ChordConfiguration; import uk.ac.standrews.cs.trombone.core.Key; import uk.ac.standrews.cs.trombone.evaluation.util.FileSystemUtils; import uk.ac.standrews.cs.trombone.evaluation.util.ScenarioUtils; import uk.ac.standrews.cs.trombone.event.Scenario; import uk.ac.standrews.cs.trombone.event.environment.Churn; import uk.ac.standrews.cs.trombone.event.environment.ExponentialIntervalGenerator; import uk.ac.standrews.cs.trombone.event.environment.RandomKeySupplier; import uk.ac.standrews.cs.trombone.event.environment.Workload; import uk.ac.standrews.cs.trombone.event.util.SequentialPortNumberSupplier; /** * @author Masih Hajiarabderkani ([email protected]) */ @RunWith(Parameterized.class) public class ChordExperiment { private static final Logger LOGGER = LoggerFactory.getLogger(ChordExperiment.class); static final Scenario scenario = new Scenario("chord_11", 1413); private static final SequentialPortNumberSupplier PORT_NUMBER_PROVIDER = new SequentialPortNumberSupplier(55000); private static final ExponentialIntervalGenerator THREE_MINUTES_EXPONENTIAL = new ExponentialIntervalGenerator(new Duration(3, TimeUnit.MINUTES), 7376); private static final RandomKeySupplier LOOKUP_TARGET_KEY_SUPPLIER = new RandomKeySupplier(889); private static final ExponentialIntervalGenerator ONE_SECOND_EXPONENTIAL = new ExponentialIntervalGenerator(new Duration(500, TimeUnit.MILLISECONDS), 192); private static final ChordConfiguration CHORD_CONFIGURATION = new ChordConfiguration(3, Key.TWO, 3, 5, TimeUnit.SECONDS, 100); private static final Workload WORKLOAD = new Workload(LOOKUP_TARGET_KEY_SUPPLIER, ONE_SECOND_EXPONENTIAL); private static final Churn CHURN = new Churn(THREE_MINUTES_EXPONENTIAL, THREE_MINUTES_EXPONENTIAL); private static final Duration WORKER_DEPLOYMENT_TIMEOUT = new Duration(5, TimeUnit.MINUTES); private static final OpenSSHKeyFile SSH_KEY_FILE; private static final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss-SSS"); private static final Duration ADDITIONAL_WAIT = new Duration(10, TimeUnit.MINUTES); private static final String EXPERIMENT_HOST_NAMES_PROPERTY_KEY = "experiment.host_names"; private static final String HOSTS_PROPERTY_VALUE = System.getProperty(EXPERIMENT_HOST_NAMES_PROPERTY_KEY); private static final List<String> AVAILABLE_HOST_NAMES = getHostNames(); static { SSH_KEY_FILE = new OpenSSHKeyFile(); SSH_KEY_FILE.init(new File(System.getProperty("user.home") + File.separator + ".ssh", "id_rsa")); // Example of using a specific private key (used by Amazon and Azure) // SSH_KEY_FILE.init(new File("/path/to/private_key.pem")); scenario.setLookupRetryCount(5); scenario.setExperimentDuration(new Duration(30, TimeUnit.MINUTES)); scenario.setObservationInterval(new Duration(10, TimeUnit.SECONDS)); scenario.setPeerKeyProvider(new RandomKeySupplier(78218713)); for (String host_name : AVAILABLE_HOST_NAMES) { scenario.addHost(host_name, 1, PORT_NUMBER_PROVIDER, CHURN, WORKLOAD, CHORD_CONFIGURATION); } } private static final int REPETITIONS = 5; @Parameterized.Parameters public static List<Object[]> data() { return Arrays.asList(new Object[REPETITIONS][0]); } private WorkerNetwork network; private WorkerManager manager; @Before public void setUp() throws Exception { LOGGER.info("starting experiment..."); network = new WorkerNetwork(56001); manager = network.getWorkerManager(); manager.setWorkerJVMArguments("-D" + EXPERIMENT_HOST_NAMES_PROPERTY_KEY + '=' + HOSTS_PROPERTY_VALUE + " -Xmx1G"); manager.setWorkerDeploymentTimeout(WORKER_DEPLOYMENT_TIMEOUT); network.addCurrentJVMClasspath(); network.setAutoDeployEnabled(false); LOGGER.info("preparing worker network of {} hosts...", AVAILABLE_HOST_NAMES.size()); for (String host_name : AVAILABLE_HOST_NAMES) { network.add(new SSHHost(host_name, "ubuntu", new AuthPublickey(SSH_KEY_FILE))); LOGGER.info("added host {}", host_name); } LOGGER.info("deploying workers..."); network.deployAll(); LOGGER.info("awaiting running state..."); network.awaitAnyOfStates(ApplicationState.RUNNING); LOGGER.info("worker network is up and running."); } @Test public void doExperiment() throws Exception { LOGGER.info("starting execution of scenario {}...", scenario.getName()); final Map<Host, Future<String>> host_event_executions = new HashMap<>(); for (ApplicationDescriptor descriptor : network) { final Worker worker = descriptor.getApplicationReference(); final Host host = descriptor.getHost(); final int host_index = getHostIndexByName(host.getName()); LOGGER.info("submitting job to {} indexed as {}", host, host_index); final Future<String> future_event_execution = worker.submit(new ScenarioExecutionJob()); host_event_executions.put(host, future_event_execution); } final Path repetitions = ScenarioUtils.getScenarioRepetitionsHome(scenario.getName()); assureRepetitionsDirectoryExists(repetitions); final Path observations = newObservationsPath(repetitions); LOGGER.info("collected observations will be stored at {}", observations.toAbsolutePath()); Exception error = null; try (FileSystem observations_fs = FileSystemUtils.newZipFileSystem(observations, true)) { final Path root_observations = observations_fs.getPath(observations_fs.getSeparator()); for (Map.Entry<Host, Future<String>> host_event_entry : host_event_executions.entrySet()) { final Host host = host_event_entry.getKey(); final Future<String> future_event_execution = host_event_entry.getValue(); try { final Duration timeout = ADDITIONAL_WAIT.add(scenario.getExperimentDuration()); final String results_path = future_event_execution.get(timeout.getLength(), timeout.getTimeUnit()); LOGGER.info("successfully finished executing events on host {} - {}", host, results_path); final Path destination = Files.createTempDirectory(host.getName()); host.download(results_path, destination.toFile()); LOGGER.info("downloaded observations from host {} to {}", host.getName(), destination); final File zip = new File(destination.toFile(), FilenameUtils.getName(results_path)); final int host_index = getHostIndexByName(host.getName()); final Path local_observations = root_observations.resolve(String.valueOf(host_index)); Files.createDirectories(local_observations); try (final FileSystem fileSystem = FileSystemUtils.newZipFileSystem(zip.getAbsolutePath(), false)) { FileSystemUtils.copyRecursively(fileSystem.getPath("/"), local_observations); } FileUtils.deleteQuietly(destination.toFile()); } catch (InterruptedException | ExecutionException | TimeoutException | CancellationException e) { final Throwable cause = e.getCause(); LOGGER.error("Event execution on host {} failed due to {}", host, cause != null ? cause : e); LOGGER.error("Failure details", cause != null ? cause : e); error = e; break; } } } if (error != null) { Files.deleteIfExists(observations); throw error; } } @After public void tearDown() throws Exception { LOGGER.info("shutting down worker network..."); network.shutdown(); } private Integer getHostIndexByName(final String host_name) { LOGGER.info("looking up host {}", host_name); return ScenarioExecutionJob.getHostIndex(host_name); } private static List<String> getHostNames() { LOGGER.info("loading host names from {} system property", EXPERIMENT_HOST_NAMES_PROPERTY_KEY); LOGGER.info("given host names: {}", HOSTS_PROPERTY_VALUE); return new CopyOnWriteArrayList<>(HOSTS_PROPERTY_VALUE.trim() .split(",")); } static synchronized Path newObservationsPath(final Path repetitions) { return repetitions.resolve(DATE_FORMAT.format(new Date()) + ".zip"); } static void assureRepetitionsDirectoryExists(final Path repetitions) throws IOException { if (!Files.isDirectory(repetitions)) { Files.createDirectories(repetitions); } } }
Group_2/p2p_experiments/source/src/test/java/uk/ac/standrews/cs/emcsr2014/group_2/ChordExperiment.java
package uk.ac.standrews.cs.emcsr2014.group_2; import java.io.File; import java.io.IOException; import java.net.InetAddress; import java.nio.file.FileSystem; import java.nio.file.Files; import java.nio.file.Path; import java.text.SimpleDateFormat; import java.util.Arrays; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CancellationException; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import net.schmizz.sshj.userauth.keyprovider.OpenSSHKeyFile; import net.schmizz.sshj.userauth.method.AuthPublickey; import org.apache.commons.io.FileUtils; import org.apache.commons.io.FilenameUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import uk.ac.standrews.cs.shabdiz.ApplicationDescriptor; import uk.ac.standrews.cs.shabdiz.ApplicationState; import uk.ac.standrews.cs.shabdiz.host.Host; import uk.ac.standrews.cs.shabdiz.host.SSHHost; import uk.ac.standrews.cs.shabdiz.job.Worker; import uk.ac.standrews.cs.shabdiz.job.WorkerManager; import uk.ac.standrews.cs.shabdiz.job.WorkerNetwork; import uk.ac.standrews.cs.shabdiz.util.Duration; import uk.ac.standrews.cs.trombone.core.ChordConfiguration; import uk.ac.standrews.cs.trombone.core.Key; import uk.ac.standrews.cs.trombone.evaluation.util.FileSystemUtils; import uk.ac.standrews.cs.trombone.evaluation.util.ScenarioUtils; import uk.ac.standrews.cs.trombone.event.Scenario; import uk.ac.standrews.cs.trombone.event.environment.Churn; import uk.ac.standrews.cs.trombone.event.environment.ExponentialIntervalGenerator; import uk.ac.standrews.cs.trombone.event.environment.RandomKeySupplier; import uk.ac.standrews.cs.trombone.event.environment.Workload; import uk.ac.standrews.cs.trombone.event.util.SequentialPortNumberSupplier; /** * @author Masih Hajiarabderkani ([email protected]) */ @RunWith(Parameterized.class) public class ChordExperiment { private static final Logger LOGGER = LoggerFactory.getLogger(ChordExperiment.class); static final Scenario scenario = new Scenario("chord_11", 1413); private static final SequentialPortNumberSupplier PORT_NUMBER_PROVIDER = new SequentialPortNumberSupplier(55000); private static final ExponentialIntervalGenerator THREE_MINUTES_EXPONENTIAL = new ExponentialIntervalGenerator(new Duration(3, TimeUnit.MINUTES), 7376); private static final RandomKeySupplier LOOKUP_TARGET_KEY_SUPPLIER = new RandomKeySupplier(889); private static final ExponentialIntervalGenerator ONE_SECOND_EXPONENTIAL = new ExponentialIntervalGenerator(new Duration(500, TimeUnit.MILLISECONDS), 192); private static final ChordConfiguration CHORD_CONFIGURATION = new ChordConfiguration(3, Key.TWO, 3, 5, TimeUnit.SECONDS, 100); private static final Workload WORKLOAD = new Workload(LOOKUP_TARGET_KEY_SUPPLIER, ONE_SECOND_EXPONENTIAL); private static final Churn CHURN = new Churn(THREE_MINUTES_EXPONENTIAL, THREE_MINUTES_EXPONENTIAL); private static final Duration WORKER_DEPLOYMENT_TIMEOUT = new Duration(5, TimeUnit.MINUTES); private static final OpenSSHKeyFile SSH_KEY_FILE; private static final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss-SSS"); private static final Duration ADDITIONAL_WAIT = new Duration(10, TimeUnit.MINUTES); private static final String EXPERIMENT_HOST_NAMES_PROPERTY_KEY = "experiment.host_names"; private static final String HOSTS_PROPERTY_VALUE = System.getProperty(EXPERIMENT_HOST_NAMES_PROPERTY_KEY); private static final List<String> AVAILABLE_HOST_NAMES = getHostNames(); static { SSH_KEY_FILE = new OpenSSHKeyFile(); SSH_KEY_FILE.init(new File(System.getProperty("user.home") + File.separator + ".ssh", "id_rsa")); // Example of using a specific private key (used by Amazon and Azure) // SSH_KEY_FILE.init(new File(System.getProperty("user.home") , "private_key.pem")); scenario.setLookupRetryCount(5); scenario.setExperimentDuration(new Duration(30, TimeUnit.MINUTES)); scenario.setObservationInterval(new Duration(10, TimeUnit.SECONDS)); scenario.setPeerKeyProvider(new RandomKeySupplier(78218713)); for (String host_name : AVAILABLE_HOST_NAMES) { scenario.addHost(host_name, 1, PORT_NUMBER_PROVIDER, CHURN, WORKLOAD, CHORD_CONFIGURATION); } } private static final int REPETITIONS = 5; @Parameterized.Parameters public static List<Object[]> data() { return Arrays.asList(new Object[REPETITIONS][0]); } private WorkerNetwork network; private WorkerManager manager; @Before public void setUp() throws Exception { LOGGER.info("starting experiment..."); network = new WorkerNetwork(56001); manager = network.getWorkerManager(); manager.setWorkerJVMArguments("-D" + EXPERIMENT_HOST_NAMES_PROPERTY_KEY + '=' + HOSTS_PROPERTY_VALUE + " -Xmx1G"); manager.setWorkerDeploymentTimeout(WORKER_DEPLOYMENT_TIMEOUT); network.addCurrentJVMClasspath(); network.setAutoDeployEnabled(false); LOGGER.info("preparing worker network of {} hosts...", AVAILABLE_HOST_NAMES.size()); for (String host_name : AVAILABLE_HOST_NAMES) { network.add(new SSHHost(host_name, "ubuntu", new AuthPublickey(SSH_KEY_FILE))); LOGGER.info("added host {}", host_name); } LOGGER.info("deploying workers..."); network.deployAll(); LOGGER.info("awaiting running state..."); network.awaitAnyOfStates(ApplicationState.RUNNING); LOGGER.info("worker network is up and running."); } @Test public void doExperiment() throws Exception { LOGGER.info("starting execution of scenario {}...", scenario.getName()); final Map<Host, Future<String>> host_event_executions = new HashMap<>(); for (ApplicationDescriptor descriptor : network) { final Worker worker = descriptor.getApplicationReference(); final Host host = descriptor.getHost(); final int host_index = getHostIndexByName(host.getName()); LOGGER.info("submitting job to {} indexed as {}", host, host_index); final Future<String> future_event_execution = worker.submit(new ScenarioExecutionJob()); host_event_executions.put(host, future_event_execution); } final Path repetitions = ScenarioUtils.getScenarioRepetitionsHome(scenario.getName()); assureRepetitionsDirectoryExists(repetitions); final Path observations = newObservationsPath(repetitions); LOGGER.info("collected observations will be stored at {}", observations.toAbsolutePath()); Exception error = null; try (FileSystem observations_fs = FileSystemUtils.newZipFileSystem(observations, true)) { final Path root_observations = observations_fs.getPath(observations_fs.getSeparator()); for (Map.Entry<Host, Future<String>> host_event_entry : host_event_executions.entrySet()) { final Host host = host_event_entry.getKey(); final Future<String> future_event_execution = host_event_entry.getValue(); try { final Duration timeout = ADDITIONAL_WAIT.add(scenario.getExperimentDuration()); final String results_path = future_event_execution.get(timeout.getLength(), timeout.getTimeUnit()); LOGGER.info("successfully finished executing events on host {} - {}", host, results_path); final Path destination = Files.createTempDirectory(host.getName()); host.download(results_path, destination.toFile()); LOGGER.info("downloaded observations from host {} to {}", host.getName(), destination); final File zip = new File(destination.toFile(), FilenameUtils.getName(results_path)); final int host_index = getHostIndexByName(host.getName()); final Path local_observations = root_observations.resolve(String.valueOf(host_index)); Files.createDirectories(local_observations); try (final FileSystem fileSystem = FileSystemUtils.newZipFileSystem(zip.getAbsolutePath(), false)) { FileSystemUtils.copyRecursively(fileSystem.getPath("/"), local_observations); } FileUtils.deleteQuietly(destination.toFile()); } catch (InterruptedException | ExecutionException | TimeoutException | CancellationException e) { final Throwable cause = e.getCause(); LOGGER.error("Event execution on host {} failed due to {}", host, cause != null ? cause : e); LOGGER.error("Failure details", cause != null ? cause : e); error = e; break; } } } if (error != null) { Files.deleteIfExists(observations); throw error; } } @After public void tearDown() throws Exception { LOGGER.info("shutting down worker network..."); network.shutdown(); } private Integer getHostIndexByName(final String host_name) { LOGGER.info("looking up host {}", host_name); return ScenarioExecutionJob.getHostIndex(host_name); } private static List<String> getHostNames() { LOGGER.info("loading host names from {} system property", EXPERIMENT_HOST_NAMES_PROPERTY_KEY); LOGGER.info("given host names: {}", HOSTS_PROPERTY_VALUE); return new CopyOnWriteArrayList<>(HOSTS_PROPERTY_VALUE.trim() .split(",")); } static synchronized Path newObservationsPath(final Path repetitions) { return repetitions.resolve(DATE_FORMAT.format(new Date()) + ".zip"); } static void assureRepetitionsDirectoryExists(final Path repetitions) throws IOException { if (!Files.isDirectory(repetitions)) { Files.createDirectories(repetitions); } } }
Update ChordExperiment.java
Group_2/p2p_experiments/source/src/test/java/uk/ac/standrews/cs/emcsr2014/group_2/ChordExperiment.java
Update ChordExperiment.java
Java
apache-2.0
c9ab734988c72a018dab540027300263083ad743
0
ahararwala/javaRefactoring101
src/refactor/learn/VideoClubApplication.java
package refactor.learn; public class VideoClubApplication { public static void main(String[] arg) { Movie m1 = new Movie("Sky Captain", 1); Movie m3 = new Movie("Revenant", 0); Movie m4 = new Movie("Skyfall", 2); Customer c1 = new Customer("Manuel"); Rental r1 = new Rental(m1, 5); Rental r2 = new Rental(m3, 1); Rental r3 = new Rental(m4, 10); c1.addRental(r1); c1.addRental(r2); c1.addRental(r3); System.out.println(c1.statement()); } }
ahararwala: Deleted main driver class, since tests have been added.
src/refactor/learn/VideoClubApplication.java
ahararwala: Deleted main driver class, since tests have been added.
Java
apache-2.0
1ac48e17e8a96d94e34ce6669e800538f63e6434
0
StuxSoftware/SimpleDev
Core/src/main/java/net/stuxcrystal/commandhandler/FallbackScheduler.java
package net.stuxcrystal.commandhandler; /** * Represents a fallback scheduler. */ public class FallbackScheduler { /** * The backend that issues the warning. */ private final CommandHandler backend; /** * Has the user already been warned? */ private boolean warned = false; /** * Creates a new fallback scheduler. * @param backend The actual backend. */ public FallbackScheduler(CommandHandler backend) { this.backend = backend; } /** * Schedules a new asynchronous task. * @param runnable The runnable to run. */ public void schedule(Runnable runnable) { if (!this.warned) { this.backend.getServerBackend().getLogger().warning( this.backend.getTranslationManager().translate( this.backend.getServerBackend().getConsole(), "internal.threading.no-scheduler" ) ); this.warned = true; } new Thread(runnable).start(); } }
Remove accidentally copied FallbackScheduler.
Core/src/main/java/net/stuxcrystal/commandhandler/FallbackScheduler.java
Remove accidentally copied FallbackScheduler.
Java
mit
4091309145b160bd050fe76312e58de8054909cc
0
TakayukiHoshi1984/DeviceConnect-Android,DeviceConnect/DeviceConnect-Android,TakayukiHoshi1984/DeviceConnect-Android,Onuzimoyr/dAndroid,DeviceConnect/DeviceConnect-Android,DeviceConnect/DeviceConnect-Android,DeviceConnect/DeviceConnect-Android,TakayukiHoshi1984/DeviceConnect-Android,Onuzimoyr/dAndroid,ssdwa/android,TakayukiHoshi1984/DeviceConnect-Android,DeviceConnect/DeviceConnect-Android,TakayukiHoshi1984/DeviceConnect-Android
/* HostBatteryManager.java Copyright (c) 2014 NTT DOCOMO,INC. Released under the MIT license http://opensource.org/licenses/mit-license.php */ package org.deviceconnect.android.deviceplugin.host.manager; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.os.BatteryManager; /** * バッテリー関連の値の処理と保持. */ public class HostBatteryManager { /** バッテリーの状態. */ private int mStatusBattery; /** プラグの状態. */ private int mStatusPlugged; /** バッテリーのレベル. */ private int mValueLevel; /** バッテリーのスケール. */ private int mValueScale; /** バッテリーの状態 不明. */ public static final int BATTERY_STATUS_UNKNOWN = 1; /** バッテリーの状態 充電中. */ public static final int BATTERY_STATUS_CHARGING = 2; /** バッテリーの状態 放電中. */ public static final int BATTERY_STATUS_DISCHARGING = 3; /** バッテリーの状態 非充電中. */ public static final int BATTERY_STATUS_NOT_CHARGING = 4; /** バッテリーの状態 満杯. */ public static final int BATTERY_STATUS_FULL = 5; /** 充電中 AC. */ public static final int BATTERY_PLUGGED_AC = 1; /** 充電中 USB. */ public static final int BATTERY_PLUGGED_USB = 2; /** * バッテリーのIntentから情報を取得. * * @param context Context */ public void getBatteryInfo(final Context context) { IntentFilter ifilter = new IntentFilter(Intent.ACTION_BATTERY_CHANGED); Intent batteryStatus = null; int i = 0; do { batteryStatus = context.registerReceiver(null, ifilter); } while (i++ < 3 && batteryStatus == null); if (batteryStatus == null) { mStatusBattery = HostBatteryManager.BATTERY_STATUS_UNKNOWN; mValueLevel = 0; mValueScale = 0; return; } // バッテリーの変化を取得 int status = batteryStatus.getIntExtra(BatteryManager.EXTRA_STATUS, -1); switch (status) { case BatteryManager.BATTERY_STATUS_UNKNOWN: mStatusBattery = HostBatteryManager.BATTERY_STATUS_UNKNOWN; break; case BatteryManager.BATTERY_STATUS_CHARGING: mStatusBattery = HostBatteryManager.BATTERY_STATUS_CHARGING; break; case BatteryManager.BATTERY_STATUS_DISCHARGING: mStatusBattery = HostBatteryManager.BATTERY_STATUS_DISCHARGING; break; case BatteryManager.BATTERY_STATUS_NOT_CHARGING: mStatusBattery = HostBatteryManager.BATTERY_STATUS_NOT_CHARGING; break; case BatteryManager.BATTERY_STATUS_FULL: mStatusBattery = HostBatteryManager.BATTERY_STATUS_FULL; break; default: mStatusBattery = HostBatteryManager.BATTERY_STATUS_UNKNOWN; break; } // プラグの状態を取得 int plugged = batteryStatus.getIntExtra("plugged", 0); switch (plugged) { case BatteryManager.BATTERY_PLUGGED_AC: mStatusPlugged = BATTERY_PLUGGED_AC; break; case BatteryManager.BATTERY_PLUGGED_USB: mStatusPlugged = BATTERY_PLUGGED_USB; break; default: break; } mValueLevel = batteryStatus.getIntExtra("level", 0); mValueScale = batteryStatus.getIntExtra("scale", 0); } /** * バッテリーのIntentを設定. * * @param intent Batteryの変化で取得できたIntent */ public void setBatteryRequest(final Intent intent) { String mAction = intent.getAction(); if (Intent.ACTION_BATTERY_CHANGED.equals(mAction) || Intent.ACTION_BATTERY_LOW.equals(mAction) || Intent.ACTION_BATTERY_OKAY.equals(mAction)) { // バッテリーの変化を取得 int status = intent.getIntExtra(BatteryManager.EXTRA_STATUS, -1); switch (status) { case BatteryManager.BATTERY_STATUS_UNKNOWN: mStatusBattery = HostBatteryManager.BATTERY_STATUS_UNKNOWN; break; case BatteryManager.BATTERY_STATUS_CHARGING: mStatusBattery = HostBatteryManager.BATTERY_STATUS_CHARGING; break; case BatteryManager.BATTERY_STATUS_DISCHARGING: mStatusBattery = HostBatteryManager.BATTERY_STATUS_DISCHARGING; break; case BatteryManager.BATTERY_STATUS_NOT_CHARGING: mStatusBattery = HostBatteryManager.BATTERY_STATUS_NOT_CHARGING; break; case BatteryManager.BATTERY_STATUS_FULL: mStatusBattery = HostBatteryManager.BATTERY_STATUS_FULL; break; default: mStatusBattery = HostBatteryManager.BATTERY_STATUS_UNKNOWN; break; } mValueLevel = intent.getIntExtra("level", 0); mValueScale = intent.getIntExtra("scale", 0); } else if (Intent.ACTION_POWER_CONNECTED.equals(mAction) || Intent.ACTION_POWER_DISCONNECTED.equals(mAction)) { // プラグの状態を取得 int plugged = intent.getIntExtra("plugged", 0); switch (plugged) { case BatteryManager.BATTERY_PLUGGED_AC: mStatusPlugged = BATTERY_PLUGGED_AC; break; case BatteryManager.BATTERY_PLUGGED_USB: mStatusPlugged = BATTERY_PLUGGED_USB; break; default: break; } } } /** * バッテリーの状態を取得. * * @return statusBattery バッテリーの状態 */ public int getBatteryStatus() { return mStatusBattery; } /** * プラグの状態を取得. * * @return statusPlugged プラグの状態 */ public int getStatusPlugged() { return mStatusPlugged; } /** * バッテリーレベルの取得. * * @return valueLevel バッテリーレベル */ public int getBatteryLevel() { return mValueLevel; } /** * スケールの取得. * * @return batteryStatus バッテリーの状態 */ public int getBatteryScale() { return mValueScale; } }
dConnectDevicePlugin/dConnectDeviceHost/src/org/deviceconnect/android/deviceplugin/host/manager/HostBatteryManager.java
/* HostBatteryManager.java Copyright (c) 2014 NTT DOCOMO,INC. Released under the MIT license http://opensource.org/licenses/mit-license.php */ package org.deviceconnect.android.deviceplugin.host.manager; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.os.BatteryManager; /** * バッテリー関連の値の処理と保持. */ public class HostBatteryManager { /** バッテリーの状態. */ private int mStatusBattery; /** プラグの状態. */ private int mStatusPlugged; /** バッテリーのレベル. */ private int mValueLevel; /** バッテリーのスケール. */ private int mValueScale; /** バッテリーの状態 不明. */ public static final int BATTERY_STATUS_UNKNOWN = 1; /** バッテリーの状態 充電中. */ public static final int BATTERY_STATUS_CHARGING = 2; /** バッテリーの状態 放電中. */ public static final int BATTERY_STATUS_DISCHARGING = 3; /** バッテリーの状態 非充電中. */ public static final int BATTERY_STATUS_NOT_CHARGING = 4; /** バッテリーの状態 満杯. */ public static final int BATTERY_STATUS_FULL = 5; /** 充電中 AC. */ public static final int BATTERY_PLUGGED_AC = 1; /** 充電中 USB. */ public static final int BATTERY_PLUGGED_USB = 2; /** * バッテリーのIntentから情報を取得. * * @param context Context */ public void getBatteryInfo(final Context context) { IntentFilter ifilter = new IntentFilter(Intent.ACTION_BATTERY_CHANGED); Intent batteryStatus = context.registerReceiver(null, ifilter); // バッテリーの変化を取得 int status = batteryStatus.getIntExtra(BatteryManager.EXTRA_STATUS, -1); switch (status) { case BatteryManager.BATTERY_STATUS_UNKNOWN: mStatusBattery = HostBatteryManager.BATTERY_STATUS_UNKNOWN; break; case BatteryManager.BATTERY_STATUS_CHARGING: mStatusBattery = HostBatteryManager.BATTERY_STATUS_CHARGING; break; case BatteryManager.BATTERY_STATUS_DISCHARGING: mStatusBattery = HostBatteryManager.BATTERY_STATUS_DISCHARGING; break; case BatteryManager.BATTERY_STATUS_NOT_CHARGING: mStatusBattery = HostBatteryManager.BATTERY_STATUS_NOT_CHARGING; break; case BatteryManager.BATTERY_STATUS_FULL: mStatusBattery = HostBatteryManager.BATTERY_STATUS_FULL; break; default: mStatusBattery = HostBatteryManager.BATTERY_STATUS_UNKNOWN; break; } // プラグの状態を取得 int plugged = batteryStatus.getIntExtra("plugged", 0); switch (plugged) { case BatteryManager.BATTERY_PLUGGED_AC: mStatusPlugged = BATTERY_PLUGGED_AC; break; case BatteryManager.BATTERY_PLUGGED_USB: mStatusPlugged = BATTERY_PLUGGED_USB; break; default: break; } mValueLevel = batteryStatus.getIntExtra("level", 0); mValueScale = batteryStatus.getIntExtra("scale", 0); } /** * バッテリーのIntentを設定. * * @param intent Batteryの変化で取得できたIntent */ public void setBatteryRequest(final Intent intent) { String mAction = intent.getAction(); if (Intent.ACTION_BATTERY_CHANGED.equals(mAction) || Intent.ACTION_BATTERY_LOW.equals(mAction) || Intent.ACTION_BATTERY_OKAY.equals(mAction)) { // バッテリーの変化を取得 int status = intent.getIntExtra(BatteryManager.EXTRA_STATUS, -1); switch (status) { case BatteryManager.BATTERY_STATUS_UNKNOWN: mStatusBattery = HostBatteryManager.BATTERY_STATUS_UNKNOWN; break; case BatteryManager.BATTERY_STATUS_CHARGING: mStatusBattery = HostBatteryManager.BATTERY_STATUS_CHARGING; break; case BatteryManager.BATTERY_STATUS_DISCHARGING: mStatusBattery = HostBatteryManager.BATTERY_STATUS_DISCHARGING; break; case BatteryManager.BATTERY_STATUS_NOT_CHARGING: mStatusBattery = HostBatteryManager.BATTERY_STATUS_NOT_CHARGING; break; case BatteryManager.BATTERY_STATUS_FULL: mStatusBattery = HostBatteryManager.BATTERY_STATUS_FULL; break; default: mStatusBattery = HostBatteryManager.BATTERY_STATUS_UNKNOWN; break; } mValueLevel = intent.getIntExtra("level", 0); mValueScale = intent.getIntExtra("scale", 0); } else if (Intent.ACTION_POWER_CONNECTED.equals(mAction) || Intent.ACTION_POWER_DISCONNECTED.equals(mAction)) { // プラグの状態を取得 int plugged = intent.getIntExtra("plugged", 0); switch (plugged) { case BatteryManager.BATTERY_PLUGGED_AC: mStatusPlugged = BATTERY_PLUGGED_AC; break; case BatteryManager.BATTERY_PLUGGED_USB: mStatusPlugged = BATTERY_PLUGGED_USB; break; default: break; } } } /** * バッテリーの状態を取得. * * @return statusBattery バッテリーの状態 */ public int getBatteryStatus() { return mStatusBattery; } /** * プラグの状態を取得. * * @return statusPlugged プラグの状態 */ public int getStatusPlugged() { return mStatusPlugged; } /** * バッテリーレベルの取得. * * @return valueLevel バッテリーレベル */ public int getBatteryLevel() { return mValueLevel; } /** * スケールの取得. * * @return batteryStatus バッテリーの状態 */ public int getBatteryScale() { return mValueScale; } }
Modify host plug-in. Add retry processing in the case of Intent is null.
dConnectDevicePlugin/dConnectDeviceHost/src/org/deviceconnect/android/deviceplugin/host/manager/HostBatteryManager.java
Modify host plug-in. Add retry processing in the case of Intent is null.