lang
stringclasses
1 value
license
stringclasses
13 values
stderr
stringlengths
0
350
commit
stringlengths
40
40
returncode
int64
0
128
repos
stringlengths
7
45.1k
new_contents
stringlengths
0
1.87M
new_file
stringlengths
6
292
old_contents
stringlengths
0
1.87M
message
stringlengths
6
9.26k
old_file
stringlengths
6
292
subject
stringlengths
0
4.45k
Java
apache-2.0
3a5057ce3ffb36b9ccd0215e15ed2fe41f6a90e0
0
pinnamur/titanium_mobile,collinprice/titanium_mobile,pec1985/titanium_mobile,jvkops/titanium_mobile,jvkops/titanium_mobile,shopmium/titanium_mobile,KangaCoders/titanium_mobile,sriks/titanium_mobile,jvkops/titanium_mobile,rblalock/titanium_mobile,jhaynie/titanium_mobile,cheekiatng/titanium_mobile,falkolab/titanium_mobile,perdona/titanium_mobile,formalin14/titanium_mobile,bright-sparks/titanium_mobile,pinnamur/titanium_mobile,rblalock/titanium_mobile,rblalock/titanium_mobile,collinprice/titanium_mobile,sriks/titanium_mobile,indera/titanium_mobile,indera/titanium_mobile,collinprice/titanium_mobile,prop/titanium_mobile,mano-mykingdom/titanium_mobile,smit1625/titanium_mobile,smit1625/titanium_mobile,ashcoding/titanium_mobile,indera/titanium_mobile,openbaoz/titanium_mobile,AngelkPetkov/titanium_mobile,bhatfield/titanium_mobile,jvkops/titanium_mobile,pinnamur/titanium_mobile,pinnamur/titanium_mobile,bhatfield/titanium_mobile,taoger/titanium_mobile,openbaoz/titanium_mobile,smit1625/titanium_mobile,benbahrenburg/titanium_mobile,FokkeZB/titanium_mobile,linearhub/titanium_mobile,ashcoding/titanium_mobile,indera/titanium_mobile,cheekiatng/titanium_mobile,csg-coder/titanium_mobile,jhaynie/titanium_mobile,KangaCoders/titanium_mobile,pinnamur/titanium_mobile,ashcoding/titanium_mobile,FokkeZB/titanium_mobile,emilyvon/titanium_mobile,csg-coder/titanium_mobile,pinnamur/titanium_mobile,jhaynie/titanium_mobile,bright-sparks/titanium_mobile,bright-sparks/titanium_mobile,KoketsoMabuela92/titanium_mobile,emilyvon/titanium_mobile,ashcoding/titanium_mobile,linearhub/titanium_mobile,benbahrenburg/titanium_mobile,perdona/titanium_mobile,prop/titanium_mobile,sriks/titanium_mobile,AngelkPetkov/titanium_mobile,perdona/titanium_mobile,linearhub/titanium_mobile,cheekiatng/titanium_mobile,sriks/titanium_mobile,pec1985/titanium_mobile,bright-sparks/titanium_mobile,KoketsoMabuela92/titanium_mobile,smit1625/titanium_mobile,peymanmortazavi/titanium_mobile,mano-mykingdom/titanium_mobile,falkolab/titanium_mobile,collinprice/titanium_mobile,prop/titanium_mobile,csg-coder/titanium_mobile,smit1625/titanium_mobile,indera/titanium_mobile,indera/titanium_mobile,benbahrenburg/titanium_mobile,KoketsoMabuela92/titanium_mobile,formalin14/titanium_mobile,taoger/titanium_mobile,falkolab/titanium_mobile,emilyvon/titanium_mobile,shopmium/titanium_mobile,prop/titanium_mobile,formalin14/titanium_mobile,csg-coder/titanium_mobile,linearhub/titanium_mobile,mvitr/titanium_mobile,shopmium/titanium_mobile,peymanmortazavi/titanium_mobile,benbahrenburg/titanium_mobile,FokkeZB/titanium_mobile,smit1625/titanium_mobile,ashcoding/titanium_mobile,benbahrenburg/titanium_mobile,rblalock/titanium_mobile,pec1985/titanium_mobile,mano-mykingdom/titanium_mobile,emilyvon/titanium_mobile,mvitr/titanium_mobile,FokkeZB/titanium_mobile,bhatfield/titanium_mobile,pec1985/titanium_mobile,linearhub/titanium_mobile,ashcoding/titanium_mobile,bhatfield/titanium_mobile,AngelkPetkov/titanium_mobile,peymanmortazavi/titanium_mobile,formalin14/titanium_mobile,bright-sparks/titanium_mobile,pinnamur/titanium_mobile,rblalock/titanium_mobile,taoger/titanium_mobile,peymanmortazavi/titanium_mobile,FokkeZB/titanium_mobile,mvitr/titanium_mobile,csg-coder/titanium_mobile,shopmium/titanium_mobile,KoketsoMabuela92/titanium_mobile,prop/titanium_mobile,emilyvon/titanium_mobile,csg-coder/titanium_mobile,pec1985/titanium_mobile,peymanmortazavi/titanium_mobile,openbaoz/titanium_mobile,peymanmortazavi/titanium_mobile,sriks/titanium_mobile,peymanmortazavi/titanium_mobile,pinnamur/titanium_mobile,taoger/titanium_mobile,AngelkPetkov/titanium_mobile,linearhub/titanium_mobile,emilyvon/titanium_mobile,kopiro/titanium_mobile,KangaCoders/titanium_mobile,AngelkPetkov/titanium_mobile,pec1985/titanium_mobile,collinprice/titanium_mobile,prop/titanium_mobile,formalin14/titanium_mobile,smit1625/titanium_mobile,cheekiatng/titanium_mobile,jhaynie/titanium_mobile,AngelkPetkov/titanium_mobile,perdona/titanium_mobile,jhaynie/titanium_mobile,shopmium/titanium_mobile,bhatfield/titanium_mobile,perdona/titanium_mobile,taoger/titanium_mobile,perdona/titanium_mobile,jhaynie/titanium_mobile,benbahrenburg/titanium_mobile,rblalock/titanium_mobile,rblalock/titanium_mobile,ashcoding/titanium_mobile,kopiro/titanium_mobile,peymanmortazavi/titanium_mobile,pinnamur/titanium_mobile,KangaCoders/titanium_mobile,mano-mykingdom/titanium_mobile,bhatfield/titanium_mobile,benbahrenburg/titanium_mobile,collinprice/titanium_mobile,KoketsoMabuela92/titanium_mobile,indera/titanium_mobile,pec1985/titanium_mobile,KoketsoMabuela92/titanium_mobile,sriks/titanium_mobile,formalin14/titanium_mobile,rblalock/titanium_mobile,formalin14/titanium_mobile,ashcoding/titanium_mobile,KangaCoders/titanium_mobile,mano-mykingdom/titanium_mobile,falkolab/titanium_mobile,jvkops/titanium_mobile,openbaoz/titanium_mobile,taoger/titanium_mobile,falkolab/titanium_mobile,mvitr/titanium_mobile,jvkops/titanium_mobile,taoger/titanium_mobile,FokkeZB/titanium_mobile,emilyvon/titanium_mobile,collinprice/titanium_mobile,kopiro/titanium_mobile,falkolab/titanium_mobile,pec1985/titanium_mobile,FokkeZB/titanium_mobile,shopmium/titanium_mobile,shopmium/titanium_mobile,FokkeZB/titanium_mobile,sriks/titanium_mobile,collinprice/titanium_mobile,mvitr/titanium_mobile,jhaynie/titanium_mobile,kopiro/titanium_mobile,cheekiatng/titanium_mobile,perdona/titanium_mobile,AngelkPetkov/titanium_mobile,mvitr/titanium_mobile,mano-mykingdom/titanium_mobile,KangaCoders/titanium_mobile,linearhub/titanium_mobile,sriks/titanium_mobile,KangaCoders/titanium_mobile,csg-coder/titanium_mobile,mano-mykingdom/titanium_mobile,linearhub/titanium_mobile,smit1625/titanium_mobile,mano-mykingdom/titanium_mobile,jvkops/titanium_mobile,KangaCoders/titanium_mobile,openbaoz/titanium_mobile,formalin14/titanium_mobile,benbahrenburg/titanium_mobile,prop/titanium_mobile,KoketsoMabuela92/titanium_mobile,bright-sparks/titanium_mobile,pec1985/titanium_mobile,kopiro/titanium_mobile,cheekiatng/titanium_mobile,kopiro/titanium_mobile,csg-coder/titanium_mobile,prop/titanium_mobile,AngelkPetkov/titanium_mobile,mvitr/titanium_mobile,perdona/titanium_mobile,indera/titanium_mobile,bhatfield/titanium_mobile,cheekiatng/titanium_mobile,kopiro/titanium_mobile,falkolab/titanium_mobile,mvitr/titanium_mobile,jhaynie/titanium_mobile,openbaoz/titanium_mobile,bhatfield/titanium_mobile,falkolab/titanium_mobile,KoketsoMabuela92/titanium_mobile,openbaoz/titanium_mobile,shopmium/titanium_mobile,taoger/titanium_mobile,emilyvon/titanium_mobile,bright-sparks/titanium_mobile,bright-sparks/titanium_mobile,jvkops/titanium_mobile,openbaoz/titanium_mobile,kopiro/titanium_mobile,cheekiatng/titanium_mobile
/** * Appcelerator Titanium Mobile * Copyright (c) 2009-2013 by Appcelerator, Inc. All Rights Reserved. * Licensed under the terms of the Apache Public License * Please see the LICENSE included with this distribution for details. */ package ti.modules.titanium.ui.widget; import java.lang.ref.WeakReference; import org.appcelerator.kroll.common.Log; import org.appcelerator.titanium.proxy.TiViewProxy; import org.appcelerator.titanium.util.TiUIHelper; import android.content.Context; import android.graphics.Bitmap; import android.graphics.ColorFilter; import android.graphics.Matrix; import android.graphics.Rect; import android.graphics.RectF; import android.graphics.drawable.Drawable; import android.os.Handler; import android.os.Looper; import android.os.Message; import android.view.GestureDetector; import android.view.MotionEvent; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.widget.ImageView; import android.widget.ImageView.ScaleType; import android.widget.ZoomControls; public class TiImageView extends ViewGroup implements Handler.Callback, OnClickListener { private static final String TAG = "TiImageView"; private static final int CONTROL_TIMEOUT = 4000; private static final int MSG_HIDE_CONTROLS = 500; private Handler handler; private OnClickListener clickListener; private boolean enableScale; private boolean enableZoomControls; private GestureDetector gestureDetector; private ImageView imageView; private ZoomControls zoomControls; private float scaleFactor; private float scaleIncrement; private float scaleMin; private float scaleMax; private Matrix baseMatrix; private Matrix changeMatrix; // Flags to help determine whether width/height is defined, so we can scale appropriately private boolean viewWidthDefined; private boolean viewHeightDefined; private int orientation; private WeakReference<TiViewProxy> proxy; public TiImageView(Context context) { super(context); final TiImageView me = this; handler = new Handler(Looper.getMainLooper(), this); enableZoomControls = false; scaleFactor = 1.0f; scaleIncrement = 0.1f; scaleMin = 1.0f; scaleMax = 5.0f; orientation = 0; baseMatrix = new Matrix(); changeMatrix = new Matrix(); imageView = new ImageView(context); addView(imageView); setEnableScale(true); gestureDetector = new GestureDetector(getContext(), new GestureDetector.SimpleOnGestureListener() { @Override public boolean onDown(MotionEvent e) { if (zoomControls.getVisibility() == View.VISIBLE) { super.onDown(e); return true; } else { onClick(me); return false; } } @Override public boolean onScroll(MotionEvent e1, MotionEvent e2, float dx, float dy) { boolean retValue = false; // Allow scrolling only if the image is zoomed in if (zoomControls.getVisibility() == View.VISIBLE && scaleFactor > 1) { // check if image scroll beyond its borders if (!checkImageScrollBeyondBorders(dx, dy)) { changeMatrix.postTranslate(-dx, -dy); imageView.setImageMatrix(getViewMatrix()); requestLayout(); scheduleControlTimeout(); retValue = true; } } return retValue; } @Override public boolean onSingleTapConfirmed(MotionEvent e) { onClick(me); return super.onSingleTapConfirmed(e); } }); gestureDetector.setIsLongpressEnabled(false); zoomControls = new ZoomControls(context); addView(zoomControls); zoomControls.setVisibility(View.GONE); zoomControls.setZoomSpeed(75); zoomControls.setOnZoomInClickListener(new OnClickListener() { public void onClick(View v) { handleScaleUp(); } }); zoomControls.setOnZoomOutClickListener(new OnClickListener() { public void onClick(View v) { handleScaleDown(); } }); super.setOnClickListener(this); } /** * Constructs a new TiImageView object. * @param context the associated context. * @param proxy the associated proxy. */ public TiImageView(Context context, TiViewProxy proxy) { this(context); this.proxy = new WeakReference<TiViewProxy>(proxy); } public void setEnableScale(boolean enableScale) { this.enableScale = enableScale; updateScaleType(); } public void setEnableZoomControls(boolean enableZoomControls) { this.enableZoomControls = enableZoomControls; updateScaleType(); } public Drawable getImageDrawable() { return imageView.getDrawable(); } /** * Sets a Bitmap as the content of imageView * @param bitmap The bitmap to set. If it is null, it will clear the previous image. */ public void setImageBitmap(Bitmap bitmap) { imageView.setImageBitmap(bitmap); } public void setOnClickListener(OnClickListener clickListener) { this.clickListener = clickListener; } public boolean handleMessage(Message msg) { switch (msg.what) { case MSG_HIDE_CONTROLS: { handleHideControls(); return true; } } return false; } public void onClick(View view) { boolean sendClick = true; if (enableZoomControls) { if (zoomControls.getVisibility() != View.VISIBLE) { sendClick = false; manageControls(); zoomControls.setVisibility(View.VISIBLE); } scheduleControlTimeout(); } if (sendClick && clickListener != null) { clickListener.onClick(view); } } private void handleScaleUp() { if (scaleFactor < scaleMax) { onViewChanged(scaleIncrement); } } private void handleScaleDown() { if (scaleFactor > scaleMin) { onViewChanged(-scaleIncrement); } } private void handleHideControls() { zoomControls.setVisibility(View.GONE); } private void manageControls() { if (scaleFactor == scaleMax) { zoomControls.setIsZoomInEnabled(false); } else { zoomControls.setIsZoomInEnabled(true); } if (scaleFactor == scaleMin) { zoomControls.setIsZoomOutEnabled(false); } else { zoomControls.setIsZoomOutEnabled(true); } } private void onViewChanged(float dscale) { updateChangeMatrix(dscale); manageControls(); requestLayout(); scheduleControlTimeout(); } private void computeBaseMatrix() { Drawable d = imageView.getDrawable(); baseMatrix.reset(); if (d != null) { // The base matrix is the matrix that displays the entire image bitmap. // It orients the image when orientation is set and scales in X and Y independently, // so that src matches dst exactly. // This may change the aspect ratio of the src. Rect r = new Rect(); getDrawingRect(r); int intrinsicWidth = d.getIntrinsicWidth(); int intrinsicHeight = d.getIntrinsicHeight(); int dwidth = intrinsicWidth; int dheight = intrinsicHeight; if (orientation > 0) { baseMatrix.postRotate(orientation); if (orientation == 90 || orientation == 270) { dwidth = intrinsicHeight; dheight = intrinsicWidth; } } float vwidth = getWidth() - getPaddingLeft() - getPaddingRight(); float vheight = getHeight() - getPaddingTop() - getPaddingBottom(); RectF dRectF = null; RectF vRectF = new RectF(0, 0, vwidth, vheight); if (orientation == 0) { dRectF = new RectF(0, 0, dwidth, dheight); } else if (orientation == 90) { dRectF = new RectF(-dwidth, 0, 0, dheight); } else if (orientation == 180) { dRectF = new RectF(-dwidth, -dheight, 0, 0); } else if (orientation == 270) { dRectF = new RectF(0, -dheight, dwidth, 0); } else { Log.e(TAG, "Invalid value for orientation. Cannot compute the base matrix for the image."); return; } Matrix m = new Matrix(); Matrix.ScaleToFit scaleType; if (viewWidthDefined && viewHeightDefined) { scaleType = Matrix.ScaleToFit.FILL; } else { scaleType = Matrix.ScaleToFit.CENTER; } m.setRectToRect(dRectF, vRectF, scaleType); baseMatrix.postConcat(m); } } private void updateChangeMatrix(float dscale) { changeMatrix.reset(); scaleFactor += dscale; scaleFactor = Math.max(scaleFactor, scaleMin); scaleFactor = Math.min(scaleFactor, scaleMax); changeMatrix.postScale(scaleFactor, scaleFactor, getWidth() / 2, getHeight() / 2); } private Matrix getViewMatrix() { Matrix m = new Matrix(baseMatrix); m.postConcat(changeMatrix); return m; } private void scheduleControlTimeout() { handler.removeMessages(MSG_HIDE_CONTROLS); handler.sendEmptyMessageDelayed(MSG_HIDE_CONTROLS, CONTROL_TIMEOUT); } @Override public boolean onTouchEvent(MotionEvent ev) { boolean handled = false; if (enableZoomControls) { if (zoomControls.getVisibility() == View.VISIBLE) { zoomControls.onTouchEvent(ev); } handled = gestureDetector.onTouchEvent(ev); } if (!handled) { handled = super.onTouchEvent(ev); } return handled; } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { super.onMeasure(widthMeasureSpec, heightMeasureSpec); int maxWidth = 0; int maxHeight = 0; // if (DBG) { // int w = MeasureSpec.getSize(widthMeasureSpec); // int wm = MeasureSpec.getMode(widthMeasureSpec); // int h = MeasureSpec.getSize(heightMeasureSpec); // int hm = MeasureSpec.getMode(heightMeasureSpec); // // Log.i(LCAT, "w: " + w + " wm: " + wm + " h: " + h + " hm: " + hm); // } // If height or width is not defined, we need to set the height/width properly // so that it doesn't get the content height/width if (!viewWidthDefined || !viewHeightDefined) { Drawable d = imageView.getDrawable(); float aspectRaio = 1; int w = MeasureSpec.getSize(widthMeasureSpec); int h = MeasureSpec.getSize(heightMeasureSpec); if (d != null) { int ih = d.getIntrinsicHeight(); int iw = d.getIntrinsicWidth(); if (ih != 0 && iw != 0) { aspectRaio = ih / iw; } } if (viewWidthDefined) { maxWidth = w; maxHeight = Math.round(w * aspectRaio); } if (viewHeightDefined) { maxHeight = h; maxWidth = Math.round(h / aspectRaio); } } // TODO padding and margins measureChild(imageView, widthMeasureSpec, heightMeasureSpec); maxWidth = Math.max(maxWidth, imageView.getMeasuredWidth()); maxHeight = Math.max(maxHeight, imageView.getMeasuredHeight()); // Allow for zoom controls. if (enableZoomControls) { measureChild(zoomControls, widthMeasureSpec, heightMeasureSpec); maxWidth = Math.max(maxWidth, zoomControls.getMeasuredWidth()); maxHeight = Math.max(maxHeight, zoomControls.getMeasuredHeight()); } setMeasuredDimension(resolveSize(maxWidth, widthMeasureSpec), resolveSize(maxHeight, heightMeasureSpec)); } @Override protected void onLayout(boolean changed, int left, int top, int right, int bottom) { computeBaseMatrix(); imageView.setImageMatrix(getViewMatrix()); int parentLeft = 0; int parentRight = right - left; int parentTop = 0; int parentBottom = bottom - top; // imageView.layout(parentLeft, parentTop, imageView.getMeasuredWidth(), imageView.getMeasuredHeight()); imageView.layout(parentLeft, parentTop, parentRight, parentBottom); if (enableZoomControls && zoomControls.getVisibility() == View.VISIBLE) { int zoomWidth = zoomControls.getMeasuredWidth(); int zoomHeight = zoomControls.getMeasuredHeight(); zoomControls.layout(parentRight - zoomWidth, parentBottom - zoomHeight, parentRight, parentBottom); } TiViewProxy viewProxy = (proxy == null ? null : proxy.get()); TiUIHelper.firePostLayoutEvent(viewProxy); } public void setColorFilter(ColorFilter filter) { imageView.setColorFilter(filter); } private void updateScaleType() { if (orientation > 0 || enableZoomControls) { imageView.setScaleType(ScaleType.MATRIX); imageView.setAdjustViewBounds(false); } else { if (viewWidthDefined && viewHeightDefined) { imageView.setAdjustViewBounds(false); imageView.setScaleType(ScaleType.FIT_XY); } else if (!enableScale) { imageView.setAdjustViewBounds(false); imageView.setScaleType(ScaleType.CENTER); } else { imageView.setAdjustViewBounds(true); imageView.setScaleType(ScaleType.FIT_CENTER); } } requestLayout(); } public void setWidthDefined(boolean defined) { viewWidthDefined = defined; updateScaleType(); } public void setHeightDefined(boolean defined) { viewHeightDefined = defined; updateScaleType(); } public void setOrientation(int orientation) { this.orientation = orientation; updateScaleType(); } private boolean checkImageScrollBeyondBorders(float dx, float dy) { float[] matrixValues = new float[9]; Matrix m = new Matrix(changeMatrix); // Apply the translation m.postTranslate(-dx, -dy); m.getValues(matrixValues); // Image can move only the extra width or height that is available // after scaling from the original width or height float scaledAdditionalHeight = imageView.getHeight() * (matrixValues[4] - 1); float scaledAdditionalWidth = imageView.getWidth() * (matrixValues[0] - 1); if (matrixValues[5] > -scaledAdditionalHeight && matrixValues[5] < 0 && matrixValues[2] > -scaledAdditionalWidth && matrixValues[2] < 0) { return false; } return true; } }
android/modules/ui/src/java/ti/modules/titanium/ui/widget/TiImageView.java
/** * Appcelerator Titanium Mobile * Copyright (c) 2009-2013 by Appcelerator, Inc. All Rights Reserved. * Licensed under the terms of the Apache Public License * Please see the LICENSE included with this distribution for details. */ package ti.modules.titanium.ui.widget; import java.lang.ref.WeakReference; import org.appcelerator.kroll.common.Log; import org.appcelerator.titanium.proxy.TiViewProxy; import org.appcelerator.titanium.util.TiUIHelper; import android.content.Context; import android.graphics.Bitmap; import android.graphics.ColorFilter; import android.graphics.Matrix; import android.graphics.Rect; import android.graphics.RectF; import android.graphics.drawable.Drawable; import android.os.Handler; import android.os.Looper; import android.os.Message; import android.view.GestureDetector; import android.view.MotionEvent; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.widget.ImageView; import android.widget.ImageView.ScaleType; import android.widget.ZoomControls; public class TiImageView extends ViewGroup implements Handler.Callback, OnClickListener { private static final String TAG = "TiImageView"; private static final int CONTROL_TIMEOUT = 4000; private static final int MSG_HIDE_CONTROLS = 500; private Handler handler; private OnClickListener clickListener; private boolean enableScale; private boolean enableZoomControls; private GestureDetector gestureDetector; private ImageView imageView; private ZoomControls zoomControls; private float scaleFactor; private float scaleIncrement; private float scaleMin; private float scaleMax; private Matrix baseMatrix; private Matrix changeMatrix; // Flags to help determine whether width/height is defined, so we can scale appropriately private boolean viewWidthDefined; private boolean viewHeightDefined; private int orientation; private WeakReference<TiViewProxy> proxy; public TiImageView(Context context) { super(context); final TiImageView me = this; handler = new Handler(Looper.getMainLooper(), this); enableZoomControls = false; scaleFactor = 1.0f; scaleIncrement = 0.1f; scaleMin = 1.0f; scaleMax = 5.0f; orientation = 0; baseMatrix = new Matrix(); changeMatrix = new Matrix(); imageView = new ImageView(context); addView(imageView); setEnableScale(true); gestureDetector = new GestureDetector(getContext(), new GestureDetector.SimpleOnGestureListener() { @Override public boolean onDown(MotionEvent e) { if (zoomControls.getVisibility() == View.VISIBLE) { super.onDown(e); return true; } else { onClick(me); return false; } } @Override public boolean onScroll(MotionEvent e1, MotionEvent e2, float dx, float dy) { boolean retValue = false; // Allow scrolling only if the image is zoomed in if (zoomControls.getVisibility() == View.VISIBLE && scaleFactor > 1) { // check if image scroll beyond its borders if (!checkImageScrollBeyondBorders(dx, dy)) { changeMatrix.postTranslate(-dx, -dy); imageView.setImageMatrix(getViewMatrix()); requestLayout(); scheduleControlTimeout(); retValue = true; } } return retValue; } @Override public boolean onSingleTapConfirmed(MotionEvent e) { onClick(me); return super.onSingleTapConfirmed(e); } }); gestureDetector.setIsLongpressEnabled(false); zoomControls = new ZoomControls(context); addView(zoomControls); zoomControls.setVisibility(View.GONE); zoomControls.setZoomSpeed(75); zoomControls.setOnZoomInClickListener(new OnClickListener() { public void onClick(View v) { handleScaleUp(); } }); zoomControls.setOnZoomOutClickListener(new OnClickListener() { public void onClick(View v) { handleScaleDown(); } }); super.setOnClickListener(this); } /** * Constructs a new TiImageView object. * @param context the associated context. * @param proxy the associated proxy. */ public TiImageView(Context context, TiViewProxy proxy) { this(context); this.proxy = new WeakReference<TiViewProxy>(proxy); } public void setEnableScale(boolean enableScale) { this.enableScale = enableScale; updateScaleType(); } public void setEnableZoomControls(boolean enableZoomControls) { this.enableZoomControls = enableZoomControls; updateScaleType(); } public Drawable getImageDrawable() { return imageView.getDrawable(); } /** * Sets a Bitmap as the content of imageView * @param bitmap The bitmap to set. If it is null, it will clear the previous image. */ public void setImageBitmap(Bitmap bitmap) { imageView.setImageBitmap(bitmap); } public void setOnClickListener(OnClickListener clickListener) { this.clickListener = clickListener; } public boolean handleMessage(Message msg) { switch (msg.what) { case MSG_HIDE_CONTROLS: { handleHideControls(); return true; } } return false; } public void onClick(View view) { boolean sendClick = true; if (enableZoomControls) { if (zoomControls.getVisibility() != View.VISIBLE) { sendClick = false; manageControls(); zoomControls.setVisibility(View.VISIBLE); } scheduleControlTimeout(); } if (sendClick && clickListener != null) { clickListener.onClick(view); } } private void handleScaleUp() { if (scaleFactor < scaleMax) { onViewChanged(scaleIncrement); } } private void handleScaleDown() { if (scaleFactor > scaleMin) { onViewChanged(-scaleIncrement); } } private void handleHideControls() { zoomControls.setVisibility(View.GONE); } private void manageControls() { if (scaleFactor == scaleMax) { zoomControls.setIsZoomInEnabled(false); } else { zoomControls.setIsZoomInEnabled(true); } if (scaleFactor == scaleMin) { zoomControls.setIsZoomOutEnabled(false); } else { zoomControls.setIsZoomOutEnabled(true); } } private void onViewChanged(float dscale) { updateChangeMatrix(dscale); manageControls(); requestLayout(); scheduleControlTimeout(); } private void computeBaseMatrix() { Drawable d = imageView.getDrawable(); baseMatrix.reset(); if (d != null) { // The base matrix is the matrix that displays the entire image bitmap. // It orients the image when orientation is set and scales in X and Y independently, // so that src matches dst exactly. // This may change the aspect ratio of the src. Rect r = new Rect(); getDrawingRect(r); int intrinsicWidth = d.getIntrinsicWidth(); int intrinsicHeight = d.getIntrinsicHeight(); int dwidth = intrinsicWidth; int dheight = intrinsicHeight; if (orientation > 0) { baseMatrix.postRotate(orientation); if (orientation == 90 || orientation == 270) { dwidth = intrinsicHeight; dheight = intrinsicWidth; } } float vwidth = getWidth() - getPaddingLeft() - getPaddingRight(); float vheight = getHeight() - getPaddingTop() - getPaddingBottom(); RectF dRectF = null; RectF vRectF = new RectF(0, 0, vwidth, vheight); if (orientation == 0) { dRectF = new RectF(0, 0, dwidth, dheight); } else if (orientation == 90) { dRectF = new RectF(-dwidth, 0, 0, dheight); } else if (orientation == 180) { dRectF = new RectF(-dwidth, -dheight, 0, 0); } else if (orientation == 270) { dRectF = new RectF(0, -dheight, dwidth, 0); } else { Log.e(TAG, "Invalid value for orientation. Cannot compute the base matrix for the image."); return; } Matrix m = new Matrix(); Matrix.ScaleToFit scaleType; if (viewWidthDefined && viewHeightDefined) { scaleType = Matrix.ScaleToFit.FILL; } else { scaleType = Matrix.ScaleToFit.CENTER; } m.setRectToRect(dRectF, vRectF, scaleType); baseMatrix.postConcat(m); } } private void updateChangeMatrix(float dscale) { changeMatrix.reset(); scaleFactor += dscale; scaleFactor = Math.max(scaleFactor, scaleMin); scaleFactor = Math.min(scaleFactor, scaleMax); changeMatrix.postScale(scaleFactor, scaleFactor, getWidth() / 2, getHeight() / 2); } private Matrix getViewMatrix() { Matrix m = new Matrix(baseMatrix); m.postConcat(changeMatrix); return m; } private void scheduleControlTimeout() { handler.removeMessages(MSG_HIDE_CONTROLS); handler.sendEmptyMessageDelayed(MSG_HIDE_CONTROLS, CONTROL_TIMEOUT); } @Override public boolean onTouchEvent(MotionEvent ev) { boolean handled = false; if (enableZoomControls) { if (zoomControls.getVisibility() == View.VISIBLE) { zoomControls.onTouchEvent(ev); } handled = gestureDetector.onTouchEvent(ev); } if (!handled) { handled = super.onTouchEvent(ev); } return handled; } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { super.onMeasure(widthMeasureSpec, heightMeasureSpec); int maxWidth = 0; int maxHeight = 0; // if (DBG) { // int w = MeasureSpec.getSize(widthMeasureSpec); // int wm = MeasureSpec.getMode(widthMeasureSpec); // int h = MeasureSpec.getSize(heightMeasureSpec); // int hm = MeasureSpec.getMode(heightMeasureSpec); // // Log.i(LCAT, "w: " + w + " wm: " + wm + " h: " + h + " hm: " + hm); // } // TODO padding and margins measureChild(imageView, widthMeasureSpec, heightMeasureSpec); maxWidth = Math.max(maxWidth, imageView.getMeasuredWidth()); maxHeight = Math.max(maxHeight, imageView.getMeasuredHeight()); // Allow for zoom controls. if (enableZoomControls) { measureChild(zoomControls, widthMeasureSpec, heightMeasureSpec); maxWidth = Math.max(maxWidth, zoomControls.getMeasuredWidth()); maxHeight = Math.max(maxHeight, zoomControls.getMeasuredHeight()); } setMeasuredDimension(resolveSize(maxWidth, widthMeasureSpec), resolveSize(maxHeight, heightMeasureSpec)); } @Override protected void onLayout(boolean changed, int left, int top, int right, int bottom) { computeBaseMatrix(); imageView.setImageMatrix(getViewMatrix()); int parentLeft = 0; int parentRight = right - left; int parentTop = 0; int parentBottom = bottom - top; // imageView.layout(parentLeft, parentTop, imageView.getMeasuredWidth(), imageView.getMeasuredHeight()); imageView.layout(parentLeft, parentTop, parentRight, parentBottom); if (enableZoomControls && zoomControls.getVisibility() == View.VISIBLE) { int zoomWidth = zoomControls.getMeasuredWidth(); int zoomHeight = zoomControls.getMeasuredHeight(); zoomControls.layout(parentRight - zoomWidth, parentBottom - zoomHeight, parentRight, parentBottom); } TiViewProxy viewProxy = (proxy == null ? null : proxy.get()); TiUIHelper.firePostLayoutEvent(viewProxy); } public void setColorFilter(ColorFilter filter) { imageView.setColorFilter(filter); } private void updateScaleType() { if (orientation > 0 || enableZoomControls) { imageView.setScaleType(ScaleType.MATRIX); imageView.setAdjustViewBounds(false); } else { if (viewWidthDefined && viewHeightDefined) { imageView.setAdjustViewBounds(false); imageView.setScaleType(ScaleType.FIT_XY); } else if (!enableScale) { imageView.setAdjustViewBounds(false); imageView.setScaleType(ScaleType.CENTER); } else { imageView.setAdjustViewBounds(true); imageView.setScaleType(ScaleType.FIT_CENTER); } } requestLayout(); } public void setWidthDefined(boolean defined) { viewWidthDefined = defined; updateScaleType(); } public void setHeightDefined(boolean defined) { viewHeightDefined = defined; updateScaleType(); } public void setOrientation(int orientation) { this.orientation = orientation; updateScaleType(); } private boolean checkImageScrollBeyondBorders(float dx, float dy) { float[] matrixValues = new float[9]; Matrix m = new Matrix(changeMatrix); // Apply the translation m.postTranslate(-dx, -dy); m.getValues(matrixValues); // Image can move only the extra width or height that is available // after scaling from the original width or height float scaledAdditionalHeight = imageView.getHeight() * (matrixValues[4] - 1); float scaledAdditionalWidth = imageView.getWidth() * (matrixValues[0] - 1); if (matrixValues[5] > -scaledAdditionalHeight && matrixValues[5] < 0 && matrixValues[2] > -scaledAdditionalWidth && matrixValues[2] < 0) { return false; } return true; } }
TIMOB-14395-if height or width not defined, set those keeping aspect ratio
android/modules/ui/src/java/ti/modules/titanium/ui/widget/TiImageView.java
TIMOB-14395-if height or width not defined, set those keeping aspect ratio
Java
apache-2.0
3c5e13105ed8830597f78b10c217d885f8e9e394
0
argv-minus-one/fop,Distrotech/fop,StrategyObject/fop,argv-minus-one/fop,StrategyObject/fop,StrategyObject/fop,Distrotech/fop,StrategyObject/fop,argv-minus-one/fop,Distrotech/fop,spepping/fop-cs,spepping/fop-cs,argv-minus-one/fop,Distrotech/fop,spepping/fop-cs,StrategyObject/fop,argv-minus-one/fop,spepping/fop-cs
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* $Id$ */ package org.apache.fop.plan; import java.io.IOException; import java.io.InputStream; import javax.xml.transform.ErrorListener; import javax.xml.transform.Source; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMResult; import javax.xml.transform.stream.StreamSource; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.xmlgraphics.image.loader.Image; import org.apache.xmlgraphics.image.loader.ImageContext; import org.apache.xmlgraphics.image.loader.ImageInfo; import org.apache.xmlgraphics.image.loader.ImageSize; import org.apache.xmlgraphics.image.loader.impl.AbstractImagePreloader; import org.apache.xmlgraphics.image.loader.impl.ImageXMLDOM; import org.apache.xmlgraphics.image.loader.util.ImageUtil; import org.apache.fop.util.DefaultErrorListener; import org.apache.fop.util.UnclosableInputStream; /** * Image preloader for Plan images. */ public class PreloaderPlan extends AbstractImagePreloader { /** Logger instance */ private static Log log = LogFactory.getLog(PreloaderPlan.class); /** {@inheritDoc} */ public ImageInfo preloadImage(String uri, Source src, ImageContext context) throws IOException { if (!ImageUtil.hasInputStream(src)) { //TODO Remove this and support DOMSource and possibly SAXSource return null; } ImageInfo info = getImage(uri, src, context); if (info != null) { ImageUtil.closeQuietly(src); //Image is fully read } return info; } private ImageInfo getImage(String uri, Source src, ImageContext context) throws IOException { InputStream in = new UnclosableInputStream(ImageUtil.needInputStream(src)); try { Document planDoc = getDocument(in); Element rootEl = planDoc.getDocumentElement(); if (!PlanElementMapping.NAMESPACE.equals( rootEl.getNamespaceURI())) { in.reset(); return null; } //Have to render the plan to know its size PlanRenderer pr = new PlanRenderer(); Document svgDoc = pr.createSVGDocument(planDoc); float width = pr.getWidth(); float height = pr.getHeight(); //Return converted SVG image ImageInfo info = new ImageInfo(uri, "image/svg+xml"); final ImageSize size = new ImageSize(); size.setSizeInMillipoints( Math.round(width * 1000), Math.round(height * 1000)); //Set the resolution to that of the FOUserAgent size.setResolution(context.getSourceResolution()); size.calcPixelsFromSize(); info.setSize(size); //The whole image had to be loaded for this, so keep it Image image = new ImageXMLDOM(info, svgDoc, svgDoc.getDocumentElement().getNamespaceURI()); info.getCustomObjects().put(ImageInfo.ORIGINAL_IMAGE, image); return info; } catch (TransformerException e) { try { in.reset(); } catch (IOException ioe) { // we're more interested in the original exception } log.debug("Error while trying to parsing a Plan file: " + e.getMessage()); return null; } } private Document getDocument(InputStream in) throws TransformerException { TransformerFactory tFactory = TransformerFactory.newInstance(); //Custom error listener to minimize output to console ErrorListener errorListener = new DefaultErrorListener(log); tFactory.setErrorListener(errorListener); Transformer transformer = tFactory.newTransformer(); transformer.setErrorListener(errorListener); Source source = new StreamSource(in); DOMResult res = new DOMResult(); transformer.transform(source, res); Document doc = (Document)res.getNode(); return doc; } }
examples/plan/src/org/apache/fop/plan/PreloaderPlan.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* $Id$ */ package org.apache.fop.plan; import java.io.IOException; import java.io.InputStream; import javax.xml.transform.Source; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMResult; import javax.xml.transform.stream.StreamSource; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.xmlgraphics.image.loader.Image; import org.apache.xmlgraphics.image.loader.ImageContext; import org.apache.xmlgraphics.image.loader.ImageInfo; import org.apache.xmlgraphics.image.loader.ImageSize; import org.apache.xmlgraphics.image.loader.impl.AbstractImagePreloader; import org.apache.xmlgraphics.image.loader.impl.ImageXMLDOM; import org.apache.xmlgraphics.image.loader.util.ImageUtil; import org.apache.fop.util.UnclosableInputStream; /** * Image preloader for Plan images. */ public class PreloaderPlan extends AbstractImagePreloader { /** Logger instance */ private static Log log = LogFactory.getLog(PreloaderPlan.class); /** {@inheritDoc} */ public ImageInfo preloadImage(String uri, Source src, ImageContext context) throws IOException { if (!ImageUtil.hasInputStream(src)) { //TODO Remove this and support DOMSource and possibly SAXSource return null; } ImageInfo info = getImage(uri, src, context); if (info != null) { ImageUtil.closeQuietly(src); //Image is fully read } return info; } private ImageInfo getImage(String uri, Source src, ImageContext context) throws IOException { InputStream in = new UnclosableInputStream(ImageUtil.needInputStream(src)); try { TransformerFactory tFactory = TransformerFactory.newInstance(); Transformer transformer = tFactory.newTransformer(); Source source = new StreamSource(in); DOMResult res = new DOMResult(); transformer.transform(source, res); //Have to render the plan to know its size PlanRenderer pr = new PlanRenderer(); Document planDoc = (Document)res.getNode(); Element rootEl = planDoc.getDocumentElement(); if (!PlanElementMapping.NAMESPACE.equals(rootEl.getNamespaceURI())) { in.reset(); return null; } Document svgDoc = pr.createSVGDocument(planDoc); float width = pr.getWidth(); float height = pr.getHeight(); //Return converted SVG image ImageInfo info = new ImageInfo(uri, "image/svg+xml"); final ImageSize size = new ImageSize(); size.setSizeInMillipoints( Math.round(width * 1000), Math.round(height * 1000)); //Set the resolution to that of the FOUserAgent size.setResolution(context.getSourceResolution()); size.calcPixelsFromSize(); info.setSize(size); //The whole image had to be loaded for this, so keep it Image image = new ImageXMLDOM(info, svgDoc, svgDoc.getDocumentElement().getNamespaceURI()); info.getCustomObjects().put(ImageInfo.ORIGINAL_IMAGE, image); return info; } catch (TransformerException e) { try { in.reset(); } catch (IOException ioe) { // we're more interested in the original exception } log.debug("Error while trying to parsing a Plan file: " + e.getMessage()); return null; } } }
Added ErrorListener for plan extension's DOM loading because of bugs in some XSLT implementations. git-svn-id: c0267665a169db5dcedcb03e6f59a7ba0e44e6f2@744860 13f79535-47bb-0310-9956-ffa450edef68
examples/plan/src/org/apache/fop/plan/PreloaderPlan.java
Added ErrorListener for plan extension's DOM loading because of bugs in some XSLT implementations.
Java
apache-2.0
0b8461de3a8c6cd5228b9cb987d1b0c1f63de9db
0
darranl/directory-shared
/* * Copyright 2005 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.ldap.common.codec.search; import org.apache.asn1.codec.DecoderException; import org.apache.asn1.ber.grammar.IGrammar; import org.apache.asn1.ber.grammar.AbstractGrammar; import org.apache.asn1.ber.grammar.GrammarTransition; import org.apache.asn1.ber.grammar.GrammarAction; import org.apache.asn1.ber.IAsn1Container; import org.apache.asn1.ber.tlv.UniversalTag; import org.apache.asn1.ber.tlv.TLV; import org.apache.asn1.ber.tlv.Value; import org.apache.asn1.util.BooleanDecoderException; import org.apache.asn1.util.BooleanDecoder; import org.apache.ldap.common.codec.AttributeValueAssertion; import org.apache.ldap.common.codec.LdapConstants; import org.apache.ldap.common.codec.LdapMessage; import org.apache.ldap.common.codec.LdapMessageContainer; import org.apache.ldap.common.codec.LdapStatesEnum; import org.apache.ldap.common.codec.util.LdapString; import org.apache.ldap.common.codec.util.LdapStringEncodingException; import org.apache.ldap.common.name.LdapDN; import org.apache.ldap.common.util.StringTools; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class implements the Filter grammar. All the actions are declared in this * class. As it is a singleton, these declaration are only done once. * * If an action is to be added or modified, this is where the work is to be done ! * * @author <a href="mailto:[email protected]">Apache Directory Project</a> */ public class FilterGrammar extends AbstractGrammar implements IGrammar { //~ Static fields/initializers ----------------------------------------------------------------- /** The logger */ private static final Logger log = LoggerFactory.getLogger( FilterGrammar.class ); /** The instance of grammar. FilterGrammar is a singleton */ private static IGrammar instance = new FilterGrammar(); //~ Constructors ------------------------------------------------------------------------------- /** * Creates a new LdapResultGrammar object. */ private FilterGrammar() { name = FilterGrammar.class.getName(); statesEnum = LdapStatesEnum.getInstance(); // Create the transitions table super.transitions = new GrammarTransition[LdapStatesEnum.LAST_FILTER_STATE][256]; //============================================================================================ // Search Request And Filter // This is quite complicated, because we have a tree structure to build, // and we may have many elements on each node. For instance, considering the // search filter : // (& (| (a = b) (c = d)) (! (e = f)) (attr =* h)) // We will have to create an And filter with three children : // - an Or child, // - a Not child // - and a Present child. // The Or child will also have two children. // // We know when we have a children while decoding the PDU, because the length // of its parent has not yet reached its expected length. // // This search filter : // (&(|(objectclass=top)(ou=contacts))(!(objectclass=ttt))(objectclass=*top)) // is encoded like this : // +----------------+---------------+ // | ExpectedLength | CurrentLength | //+-----------------------------+----------------+---------------+ //|A0 52 | 82 | 0 | new level 1 //| A1 24 | 82 36 | 0 0 | new level 2 //| A3 12 | 82 36 18 | 0 0 0 | new level 3 //| 04 0B 'objectclass' | 82 36 18 | 0 0 13 | //| 04 03 'top' | 82 36 18 | 0 20 18 | //| | ^ ^ | //| | | | | //| | +---------------+ | //+-----------------------------* end level 3 -------------------* //| A3 0E | 82 36 14 | 0 0 0 | new level 3 //| 04 02 'ou' | 82 36 14 | 0 0 4 | //| 04 08 'contacts' | 82 36 14 | 38 36 14 | //| | ^ ^ ^ ^ | //| | | | | | | //| | | +-------------|--+ | //| | +----------------+ | //+-----------------------------* end level 3, end level 2 ------* //| A2 14 | 82 20 | 38 0 | new level 2 //| A3 12 | 82 20 18 | 38 0 0 | new level 3 //| 04 0B 'objectclass' | 82 20 18 | 38 0 13 | //| 04 03 'ttt' | 82 20 18 | 60 20 18 | //| | ^ ^ ^ ^ | //| | | | | | | //| | | +-------------|--+ | //| | +----------------+ | //+-----------------------------* end level 3, end level 2 ------* //| A4 14 | 82 20 | 60 0 | new level 2 //| 04 0B 'objectclass' | 82 20 | 60 13 | //| 30 05 | 82 20 | 60 13 | //| 82 03 'top' | 82 20 | 82 20 | //| | ^ ^ ^ ^ | //| | | | | | | //| | | +-------------|--+ | //| | +----------------+ | //+-----------------------------* end level 2, end level 1 ------* //+-----------------------------+----------------+---------------+ // // When the current length equals the expected length of the parent PDU, // then we are able to 'close' the parent : it has all its children. This // is propagated through all the tree, until either there are no more // parents, or the expected length of the parent is different from the // current length. // //============================================================================================ // Filter ::= CHOICE { // and [0] SET OF Filter, (Tag) // ... // Nothing to do super.transitions[LdapStatesEnum.FILTER_TAG][LdapConstants.AND_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_TAG, LdapStatesEnum.FILTER_AND_VALUE, null ); // Filter ::= CHOICE { // ... // or [1] SET OF Filter, (Tag) // ... // Nothing to do super.transitions[LdapStatesEnum.FILTER_TAG][LdapConstants.OR_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_TAG, LdapStatesEnum.FILTER_OR_VALUE, null ); // Filter ::= CHOICE { // ... // not [2] Filter, (Tag) // ... // Nothing to do super.transitions[LdapStatesEnum.FILTER_TAG][LdapConstants.NOT_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_TAG, LdapStatesEnum.FILTER_NOT_VALUE, null ); // Filter ::= CHOICE { // ... // equalityMatch [3] AttributeValueAssertion, (Tag) // ... // Nothing to do super.transitions[LdapStatesEnum.FILTER_TAG][LdapConstants.EQUALITY_MATCH_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_TAG, LdapStatesEnum.FILTER_EQUALITY_MATCH_VALUE, null ); // Filter ::= CHOICE { // ... // substrings [4] SubstringFilter, (Tag) // ... // Nothing to do super.transitions[LdapStatesEnum.FILTER_TAG][LdapConstants.SUBSTRINGS_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_TAG, LdapStatesEnum.FILTER_SUBSTRINGS_VALUE, null ); // Filter ::= CHOICE { // ... // greaterOrEqual [5] AttributeValueAssertion, (Tag) // ... // Nothing to do super.transitions[LdapStatesEnum.FILTER_TAG][LdapConstants.GREATER_OR_EQUAL_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_TAG, LdapStatesEnum.FILTER_GREATER_OR_EQUAL_VALUE, null ); // Filter ::= CHOICE { // ... // lessOrEqual [6] AttributeValueAssertion, (Tag) // ... // Nothing to do super.transitions[LdapStatesEnum.FILTER_TAG][LdapConstants.LESS_OR_EQUAL_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_TAG, LdapStatesEnum.FILTER_LESS_OR_EQUAL_VALUE, null ); // Filter ::= CHOICE { // ... // present [7] AttributeDescription, (Tag) // ... // Nothing to do super.transitions[LdapStatesEnum.FILTER_TAG][LdapConstants.PRESENT_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_TAG, LdapStatesEnum.FILTER_PRESENT_VALUE, null ); // Filter ::= CHOICE { // ... // approxMatch [8] AttributeValueAssertion, (Tag) // ... // Nothing to do super.transitions[LdapStatesEnum.FILTER_TAG][LdapConstants.APPROX_MATCH_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_TAG, LdapStatesEnum.FILTER_APPROX_MATCH_VALUE, null ); // Filter ::= CHOICE { // ... // extensibleMatch [9] ExtensibleMatchFilter } (Tag) // Nothing to do super.transitions[LdapStatesEnum.FILTER_TAG][LdapConstants.EXTENSIBLE_MATCH_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_TAG, LdapStatesEnum.FILTER_EXTENSIBLE_MATCH_VALUE, null ); // Filter ::= CHOICE { // and [0] SET OF Filter, (Value) // ... // We just have to switch to the initial state of Filter, because this is what // we will get ! super.transitions[LdapStatesEnum.FILTER_AND_VALUE][LdapConstants.AND_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_AND_VALUE, LdapStatesEnum.FILTER_TAG, new GrammarAction( "Init And Filter" ) { public void action( IAsn1Container container ) throws DecoderException { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); // We can allocate the SearchRequest Filter andFilter = new AndFilter(); // Get the parent, if any Filter currentFilter = searchRequest.getCurrentFilter(); if (currentFilter != null) { // Ok, we have a parent. The new Filter will be added to // this parent, then. ((ConnectorFilter)currentFilter).addFilter(andFilter); andFilter.setParent( currentFilter ); } else { // No parent. This Filter will become the root. searchRequest.setFilter(andFilter); andFilter.setParent( searchRequest ); } searchRequest.setCurrentFilter(andFilter); } }); // Filter ::= CHOICE { // ... // or [1] SET OF Filter, (Value) // ... // We just have to switch to the initial state of Filter, because this is what // we will get ! super.transitions[LdapStatesEnum.FILTER_OR_VALUE][LdapConstants.OR_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_OR_VALUE, LdapStatesEnum.FILTER_TAG, new GrammarAction( "Init Or Filter" ) { public void action( IAsn1Container container ) throws DecoderException { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); // We can allocate the SearchRequest Filter orFilter = new OrFilter(); // Get the parent, if any Filter currentFilter = searchRequest.getCurrentFilter(); if (currentFilter != null) { // Ok, we have a parent. The new Filter will be added to // this parent, then. ((ConnectorFilter)currentFilter).addFilter(orFilter); orFilter.setParent( currentFilter ); } else { // No parent. This Filter will become the root. searchRequest.setFilter(orFilter); orFilter.setParent( searchRequest ); } searchRequest.setCurrentFilter(orFilter); } }); // Filter ::= CHOICE { // ... // not [2] Filter, (Value) // ... // We just have to switch to the initial state of Filter, because this is what // we will get ! super.transitions[LdapStatesEnum.FILTER_NOT_VALUE][LdapConstants.NOT_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_NOT_VALUE, LdapStatesEnum.FILTER_TAG, new GrammarAction( "Init Not Filter" ) { public void action( IAsn1Container container ) throws DecoderException { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); // We can allocate the SearchRequest Filter notFilter = new NotFilter(); // Get the parent, if any Filter currentFilter = searchRequest.getCurrentFilter(); if (currentFilter != null) { // Ok, we have a parent. The new Filter will be added to // this parent, then. ((ConnectorFilter)currentFilter).addFilter(notFilter); notFilter.setParent( currentFilter ); } else { // No parent. This Filter will become the root. searchRequest.setFilter(notFilter); notFilter.setParent( searchRequest ); } searchRequest.setCurrentFilter(notFilter); } }); // Filter ::= CHOICE { // ... // equalityMatch [3] AttributeValueAssertion, (Value) // ... // We will create the filter container (as this is an equalityMatch filter, // we will create an AttributeValueAssertionFilter). super.transitions[LdapStatesEnum.FILTER_EQUALITY_MATCH_VALUE][LdapConstants.EQUALITY_MATCH_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_EQUALITY_MATCH_VALUE, LdapStatesEnum.FILTER_ATTRIBUTE_DESC_TAG, new GrammarAction( "Init Equality Match Filter" ) { public void action( IAsn1Container container ) throws DecoderException { compareFilterAction(container, LdapConstants.EQUALITY_MATCH_FILTER); } }); // Filter ::= CHOICE { // ... // greaterOrEqual [5] AttributeValueAssertion, (Value) // ... // We will create the filter container (as this is an GreaterOrEqual filter, // we will create an AttributeValueAssertionFilter). super.transitions[LdapStatesEnum.FILTER_GREATER_OR_EQUAL_VALUE][LdapConstants.GREATER_OR_EQUAL_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_GREATER_OR_EQUAL_VALUE, LdapStatesEnum.FILTER_ATTRIBUTE_DESC_TAG, new GrammarAction( "Init Greater Or Equal Filter" ) { public void action( IAsn1Container container ) throws DecoderException { compareFilterAction(container, LdapConstants.GREATER_OR_EQUAL_FILTER); } } ); // Filter ::= CHOICE { // ... // lessOrEqual [6] AttributeValueAssertion, (Value) // ... // We will create the filter container (as this is an lessOrEqual filter, // we will create an AttributeValueAssertionFilter). super.transitions[LdapStatesEnum.FILTER_LESS_OR_EQUAL_VALUE][LdapConstants.LESS_OR_EQUAL_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_LESS_OR_EQUAL_VALUE, LdapStatesEnum.FILTER_ATTRIBUTE_DESC_TAG, new GrammarAction( "Init Less Or Equal Filter" ) { public void action( IAsn1Container container ) throws DecoderException { compareFilterAction(container, LdapConstants.LESS_OR_EQUAL_FILTER ); } } ); // Filter ::= CHOICE { // ... // approxMatch [8] AttributeValueAssertion, (Value) // ... // We will create the filter container (as this is an approxMatch filter, // we will create an AttributeValueAssertionFilter). super.transitions[LdapStatesEnum.FILTER_APPROX_MATCH_VALUE][LdapConstants.APPROX_MATCH_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_APPROX_MATCH_VALUE, LdapStatesEnum.FILTER_ATTRIBUTE_DESC_TAG, new GrammarAction( "Init ApproxMatch Filter" ) { public void action( IAsn1Container container ) throws DecoderException { compareFilterAction(container, LdapConstants.APPROX_MATCH_FILTER ); } } ); // AttributeValueAssertion ::= SEQUENCE { // attributeDesc AttributeDescription, (TAG) // ... // Nothing to do. super.transitions[LdapStatesEnum.FILTER_ATTRIBUTE_DESC_TAG][UniversalTag.OCTET_STRING_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_ATTRIBUTE_DESC_TAG, LdapStatesEnum.FILTER_ATTRIBUTE_DESC_VALUE, null); // AttributeValueAssertion ::= SEQUENCE { // attributeDesc AttributeDescription, (VALUE) // ... // We have to set the attribute description in the current filter. // It could be an equalityMatch, greaterOrEqual, lessOrEqual or an // approxMatch filter. super.transitions[LdapStatesEnum.FILTER_ATTRIBUTE_DESC_VALUE][UniversalTag.OCTET_STRING_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_ATTRIBUTE_DESC_VALUE, LdapStatesEnum.FILTER_ASSERTION_VALUE_TAG, new GrammarAction( "Init attributeDesc Value" ) { public void action( IAsn1Container container ) throws DecoderException { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); TLV tlv = ldapMessageContainer.getCurrentTLV(); AttributeValueAssertion assertion = new AttributeValueAssertion(); try { LdapString type = LdapDN.normalizeAttribute( tlv.getValue().getData() ); assertion.setAttributeDesc( type ); } catch ( LdapStringEncodingException lsee ) { String msg = StringTools.dumpBytes( tlv.getValue().getData() ); log.error( "The assertion description ({}) is invalid", msg ); throw new DecoderException( "Invalid assertion description " + msg + ", : " + lsee.getMessage() ); } AttributeValueAssertionFilter currentFilter = (AttributeValueAssertionFilter)searchRequest.getCurrentFilter(); currentFilter.setAssertion(assertion); } }); // AttributeValueAssertion ::= SEQUENCE { // ... // assertionValue AssertionValue } (TAG) // Nothing to do. super.transitions[LdapStatesEnum.FILTER_ASSERTION_VALUE_TAG][UniversalTag.OCTET_STRING_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_ASSERTION_VALUE_TAG, LdapStatesEnum.FILTER_ASSERTION_VALUE_VALUE, null); // AttributeValueAssertion ::= SEQUENCE { // ... // assertionValue AssertionValue } (VALUE) // We have to set the attribute description in the current filter. // It could be an equalityMatch, greaterOrEqual, lessOrEqual or an // approxMatch filter. // Whgen finished, we will transit to the first state. super.transitions[LdapStatesEnum.FILTER_ASSERTION_VALUE_VALUE][UniversalTag.OCTET_STRING_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_ASSERTION_VALUE_VALUE, LdapStatesEnum.FILTER_TAG, new GrammarAction( "Init AssertionValue Value" ) { public void action( IAsn1Container container ) throws DecoderException { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); TLV tlv = ldapMessageContainer.getCurrentTLV(); // The value can be null. Object assertionValue = StringTools.EMPTY_BYTES; if ( tlv.getLength().getLength() != 0 ) { assertionValue = tlv.getValue().getData(); } AttributeValueAssertionFilter currentFilter = (AttributeValueAssertionFilter)searchRequest.getCurrentFilter(); AttributeValueAssertion assertion = currentFilter.getAssertion(); if ( ldapMessageContainer.isBinary( assertion.getAttributeDesc() ) ) { assertion.setAssertionValue( assertionValue ); } else { assertion.setAssertionValue( StringTools.utf8ToString( (byte[])assertionValue ) ); } // We now have to get back to the nearest filter which is not terminal. unstackFilters( container ); } }); // AttributeValueAssertion ::= SEQUENCE { // attributeDesc AttributeDescription, (VALUE) // ... // We have to set the attribute description in the current filter. // It could be an equalityMatch, greaterOrEqual, lessOrEqual or an // approxMatch filter. super.transitions[LdapStatesEnum.FILTER_ATTRIBUTE_DESC_VALUE][UniversalTag.OCTET_STRING_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_ATTRIBUTE_DESC_VALUE, LdapStatesEnum.FILTER_ASSERTION_VALUE_TAG, new GrammarAction( "Init attributeDesc Value" ) { public void action( IAsn1Container container ) throws DecoderException { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); TLV tlv = ldapMessageContainer.getCurrentTLV(); AttributeValueAssertion assertion = new AttributeValueAssertion(); try { LdapString type = LdapDN.normalizeAttribute( tlv.getValue().getData() ); assertion.setAttributeDesc( type ); } catch ( LdapStringEncodingException lsee ) { String msg = StringTools.dumpBytes( tlv.getValue().getData() ); log.error( "The assertion value ({}) is invalid", msg ); throw new DecoderException( "Invalid assertion value " + msg + ", : " + lsee.getMessage() ); } AttributeValueAssertionFilter currentFilter = (AttributeValueAssertionFilter)searchRequest.getCurrentFilter(); currentFilter.setAssertion(assertion); } }); // AttributeValueAssertion ::= SEQUENCE { // ... // assertionValue AssertionValue } (TAG) // Nothing to do. super.transitions[LdapStatesEnum.FILTER_ASSERTION_VALUE_TAG][UniversalTag.OCTET_STRING_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_ASSERTION_VALUE_TAG, LdapStatesEnum.FILTER_ASSERTION_VALUE_VALUE, null); // Filter ::= CHOICE { // ... // present [7] AttributeDescription, (Value) // ... super.transitions[LdapStatesEnum.FILTER_PRESENT_VALUE][LdapConstants.PRESENT_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_PRESENT_VALUE, LdapStatesEnum.FILTER_TAG, new GrammarAction( "Init present filter Value" ) { public void action( IAsn1Container container ) throws DecoderException { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); TLV tlv = ldapMessageContainer.getCurrentTLV(); // We can allocate the Attribute Value Assertion PresentFilter presentFilter = new PresentFilter(); // Get the parent, if any Filter currentFilter = searchRequest.getCurrentFilter(); if (currentFilter != null) { // Ok, we have a parent. The new Filter will be added to // this parent, then. ((ConnectorFilter)currentFilter).addFilter(presentFilter); presentFilter.setParent( currentFilter ); } else { // No parent. This Filter will become the root. //searchRequest.setCurrentFilter(presentFilter); presentFilter.setParent( searchRequest ); searchRequest.setFilter( presentFilter ); } String value = StringTools.utf8ToString( tlv.getValue().getData() ); if ( StringTools.isEmpty( value ) ) { presentFilter.setAttributeDescription( LdapString.EMPTY_STRING ); } else { // Store the value. try { LdapString type = LdapDN.normalizeAttribute( tlv.getValue().getData() ); presentFilter.setAttributeDescription( type ); } catch ( LdapStringEncodingException lsee ) { String msg = StringTools.dumpBytes( tlv.getValue().getData() ); log.error( "Present filter attribute description ({}) is invalid", msg ); throw new DecoderException( "Invalid present filter attribute description " + msg + ", : " + lsee.getMessage() ); } } // We now have to get back to the nearest filter which is not terminal. unstackFilters( container ); } } ); // Here we are dealing with substrings. LDAP grammar is not very explicit about // what is allowed (-- at least one must be present !!!), while RFC 2254 is // really clear. But LDAP grammar is the one to follow... // // substring ::= attr "=" [AttributeValue] any [AttributeValue] // any ::= "*" *(AttributeValue "*") // // Filter ::= CHOICE { // ... // substrings [4] SubstringFilter, (Value) // ... // Store the substring super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_VALUE][LdapConstants.SUBSTRINGS_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_VALUE, LdapStatesEnum.FILTER_SUBSTRINGS_TYPE_TAG, new GrammarAction( "Init Substring Filter" ) { public void action( IAsn1Container container ) throws DecoderException { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); TLV tlv = ldapMessageContainer.getCurrentTLV(); // We can allocate the SearchRequest Filter substringFilter = new SubstringFilter(); // Get the parent, if any Filter currentFilter = searchRequest.getCurrentFilter(); if (currentFilter != null) { // Ok, we have a parent. The new Filter will be added to // this parent, then. ((ConnectorFilter)currentFilter).addFilter(substringFilter); substringFilter.setParent( currentFilter ); } else { // No parent. This Filter will become the root. searchRequest.setFilter(substringFilter); substringFilter.setParent( searchRequest ); } searchRequest.setCurrentFilter(substringFilter); // As this is a new Constructed object, we have to init its length int expectedLength = tlv.getLength().getLength(); substringFilter.setExpectedLength( expectedLength ); substringFilter.setCurrentLength( 0 ); } } ); // SubstringFilter ::= SEQUENCE { // type AttributeDescription, (Tag) // ... // Nothing to do super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_TYPE_TAG][UniversalTag.OCTET_STRING_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_TYPE_TAG, LdapStatesEnum.FILTER_SUBSTRINGS_TYPE_VALUE, null ); // SubstringFilter ::= SEQUENCE { // type AttributeDescription, (Value) // ... // super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_TYPE_VALUE][UniversalTag.OCTET_STRING_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_TYPE_VALUE, LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_TAG, new GrammarAction( "Store substring filter Value" ) { public void action( IAsn1Container container ) throws DecoderException { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); TLV tlv = ldapMessageContainer.getCurrentTLV(); // Store the value. SubstringFilter substringFilter = (SubstringFilter)searchRequest.getCurrentFilter(); try { LdapString type = LdapDN.normalizeAttribute( tlv.getValue().getData() ); substringFilter.setType( type ); } catch ( LdapStringEncodingException lsee ) { String msg = StringTools.dumpBytes( tlv.getValue().getData() ); log.error( "The substring filter type ({}) is invalid", msg ); throw new DecoderException( "Invalid substring filter type " + msg + ", : " + lsee.getMessage() ); } // We now have to get back to the nearest filter which is not terminal. unstackFilters( container ); } } ); // SubstringFilter ::= SEQUENCE { // ... // -- at least one must be present // substrings SEQUENCE OF CHOICE { (Tag) // ... // Nothing to do. super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_TAG][UniversalTag.SEQUENCE_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_TAG, LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_VALUE, null ); // SubstringFilter ::= SEQUENCE { // ... // -- at least one must be present // substrings SEQUENCE OF CHOICE { (Value) // ... // Nothing to do. Here, we may have three possibilities. We may have an "initial" value, // or an "any" value, or a "final" value. Any other option is an error. super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_VALUE][UniversalTag.SEQUENCE_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_VALUE, LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG, null ); // SubstringFilter ::= SEQUENCE { // ... // -- at least one must be present // substrings SEQUENCE OF CHOICE { // initial [0] LDAPString, (Tag) // ... // We have an "initial" value. Nothing to do. super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG][LdapConstants.SEARCH_SUBSTRINGS_INITIAL_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG, LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_INITIAL_VALUE, null ); // SubstringFilter ::= SEQUENCE { // ... // -- at least one must be present // substrings SEQUENCE OF CHOICE { // initial [0] LDAPString, (Value) // ... // Store the initial value. super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_INITIAL_VALUE][LdapConstants.SEARCH_SUBSTRINGS_INITIAL_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_INITIAL_VALUE, LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_ANY_OR_FINAL_TAG, new GrammarAction( "Store substring filter initial Value" ) { public void action( IAsn1Container container ) throws DecoderException { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); TLV tlv = ldapMessageContainer.getCurrentTLV(); // Store the value. SubstringFilter substringFilter = (SubstringFilter)searchRequest.getCurrentFilter(); try { substringFilter.setInitialSubstrings(new LdapString(tlv.getValue().getData())); } catch ( LdapStringEncodingException lsee ) { String msg = StringTools.dumpBytes( tlv.getValue().getData() ); log.error( "The substring filter initial ({}) is invalid" ); throw new DecoderException( "Invalid substring filter initial " + msg + ", : " + lsee.getMessage() ); } // We now have to get back to the nearest filter which is not terminal. unstackFilters( container ); } }); // SubstringFilter ::= SEQUENCE { // ... // -- at least one must be present // substrings SEQUENCE OF CHOICE { // ... // any [1] LDAPString, (Tag) // ... // We have an 'any' value without an 'initial' value. Nothing to do. super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG][LdapConstants.SEARCH_SUBSTRINGS_ANY_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG, LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_ANY_VALUE, null); // SubstringFilter ::= SEQUENCE { // ... // -- at least one must be present // substrings SEQUENCE OF CHOICE { // initial [0] LDAPString, // any [1] LDAPString, (Tag) // ... // We had an 'initial' substring, and now we have an 'any' substring. Nothing to do. super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_ANY_OR_FINAL_TAG][LdapConstants.SEARCH_SUBSTRINGS_ANY_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_ANY_OR_FINAL_TAG, LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_ANY_VALUE, null ); // SubstringFilter ::= SEQUENCE { // ... // -- at least one must be present // substrings SEQUENCE OF CHOICE { // ... // any [1] LDAPString, (Value) // ... // Store the 'any' value. super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_ANY_VALUE][LdapConstants.SEARCH_SUBSTRINGS_ANY_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_ANY_VALUE, LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_ANY_OR_FINAL_TAG, new GrammarAction( "Store substring filter any Value" ) { public void action( IAsn1Container container ) throws DecoderException { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); TLV tlv = ldapMessageContainer.getCurrentTLV(); // Store the value. SubstringFilter substringFilter = (SubstringFilter)searchRequest.getCurrentFilter(); try { substringFilter.addAnySubstrings(new LdapString(tlv.getValue().getData())); } catch ( LdapStringEncodingException lsee ) { String msg = StringTools.dumpBytes( tlv.getValue().getData() ); log.error( "The substring any filter ({}) is invalid", msg ); throw new DecoderException( "Invalid substring any filter " + msg + ", : " + lsee.getMessage() ); } // We now have to get back to the nearest filter which is not terminal. unstackFilters( container ); } }); // SubstringFilter ::= SEQUENCE { // ... // -- at least one must be present // substrings SEQUENCE OF CHOICE { // ... // any [1] LDAPString, // final [2] LDAPString, (Tag) // } // // We have an 'final' value after an 'any' value. Nothing to do. super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_ANY_OR_FINAL_TAG][LdapConstants.SEARCH_SUBSTRINGS_FINAL_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG, LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_FINAL_VALUE, null); // SubstringFilter ::= SEQUENCE { // ... // -- at least one must be present // substrings SEQUENCE OF CHOICE { // ... // final [2] LDAPString, (Tag) // } // // We have an 'final' value only. Nothing to do. super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG][LdapConstants.SEARCH_SUBSTRINGS_FINAL_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG, LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_FINAL_VALUE, null); // SubstringFilter ::= SEQUENCE { // ... // -- at least one must be present // substrings SEQUENCE OF CHOICE { // initial [0] LDAPString, // final [2] LDAPString, (Tag) // } // We had an 'initial' substring, and now we have an 'final' substring. Nothing to do. super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_ANY_OR_FINAL_TAG][LdapConstants.SEARCH_SUBSTRINGS_FINAL_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_ANY_OR_FINAL_TAG, LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_FINAL_VALUE, null ); // SubstringFilter ::= SEQUENCE { // ... // -- at least one must be present // substrings SEQUENCE OF CHOICE { // ... // final [2] LDAPString, (Value) // Store the initial value. super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_FINAL_VALUE][LdapConstants.SEARCH_SUBSTRINGS_FINAL_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_FINAL_VALUE, LdapStatesEnum.END_STATE, new GrammarAction( "Store substring filter final Value" ) { public void action( IAsn1Container container ) throws DecoderException { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); TLV tlv = ldapMessageContainer.getCurrentTLV(); // Store the value. SubstringFilter substringFilter = (SubstringFilter)searchRequest.getCurrentFilter(); try { substringFilter.setFinalSubstrings(new LdapString(tlv.getValue().getData())); } catch ( LdapStringEncodingException lsee ) { String msg = StringTools.dumpBytes( tlv.getValue().getData() ); log.error( "The substring final filter ({}) is invalid", msg ); throw new DecoderException( "Invalid substring final filter " + msg + ", : " + lsee.getMessage() ); } // We now have to get back to the nearest filter which is not terminal. unstackFilters( container ); } }); // SubstringFilter ::= SEQUENCE { // ... // -- at least one must be present // } // // Has we must have at least an initial, any or final value, every other value is an error. super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG][LdapConstants.AND_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG, LdapStatesEnum.FILTER_TAG, new GrammarAction( "Bad tag exception" ) { public void action( IAsn1Container container ) throws DecoderException { throw new DecoderException("At least an 'initial', 'any' or 'final' value must be found"); } }); super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG][LdapConstants.OR_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG, LdapStatesEnum.FILTER_TAG, new GrammarAction( "Bad tag exception" ) { public void action( IAsn1Container container ) throws DecoderException { throw new DecoderException("At least an 'initial', 'any' or 'final' value must be found"); } }); super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG][LdapConstants.NOT_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG, LdapStatesEnum.FILTER_TAG, new GrammarAction( "Bad tag exception" ) { public void action( IAsn1Container container ) throws DecoderException { throw new DecoderException("At least an 'initial', 'any' or 'final' value must be found"); } }); super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG][LdapConstants.EQUALITY_MATCH_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG, LdapStatesEnum.FILTER_TAG, new GrammarAction( "Bad tag exception" ) { public void action( IAsn1Container container ) throws DecoderException { throw new DecoderException("At least an 'initial', 'any' or 'final' value must be found"); } }); super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG][LdapConstants.SUBSTRINGS_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG, LdapStatesEnum.FILTER_TAG, new GrammarAction( "Bad tag exception" ) { public void action( IAsn1Container container ) throws DecoderException { throw new DecoderException("At least an 'initial', 'any' or 'final' value must be found"); } }); super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG][LdapConstants.GREATER_OR_EQUAL_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG, LdapStatesEnum.FILTER_TAG, new GrammarAction( "Bad tag exception" ) { public void action( IAsn1Container container ) throws DecoderException { throw new DecoderException("At least an 'initial', 'any' or 'final' value must be found"); } }); super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG][LdapConstants.LESS_OR_EQUAL_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG, LdapStatesEnum.FILTER_TAG, new GrammarAction( "Bad tag exception" ) { public void action( IAsn1Container container ) throws DecoderException { throw new DecoderException("At least an 'initial', 'any' or 'final' value must be found"); } }); super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG][LdapConstants.PRESENT_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG, LdapStatesEnum.FILTER_TAG, new GrammarAction( "Bad tag exception" ) { public void action( IAsn1Container container ) throws DecoderException { throw new DecoderException("At least an 'initial', 'any' or 'final' value must be found"); } }); super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG][LdapConstants.APPROX_MATCH_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG, LdapStatesEnum.FILTER_TAG, new GrammarAction( "Bad tag exception" ) { public void action( IAsn1Container container ) throws DecoderException { throw new DecoderException("At least an 'initial', 'any' or 'final' value must be found"); } }); super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG][LdapConstants.EXTENSIBLE_MATCH_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG, LdapStatesEnum.FILTER_TAG, new GrammarAction( "Bad tag exception" ) { public void action( IAsn1Container container ) throws DecoderException { throw new DecoderException("At least an 'initial', 'any' or 'final' value must be found"); } }); super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG][UniversalTag.SEQUENCE_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG, LdapStatesEnum.FILTER_TAG, new GrammarAction( "Bad tag exception" ) { public void action( IAsn1Container container ) throws DecoderException { throw new DecoderException("At least an 'initial', 'any' or 'final' value must be found"); } }); // Here we are dealing with extensible matches // // Filter ::= CHOICE { // ... // extensibleMatch [9] MatchingRuleAssertion} (Value) // // Nothing to do super.transitions[LdapStatesEnum.FILTER_EXTENSIBLE_MATCH_VALUE][LdapConstants.EXTENSIBLE_MATCH_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_EXTENSIBLE_MATCH_VALUE, LdapStatesEnum.FILTER_MATCHING_RULE_ASSERTION_TAG, new GrammarAction( "Init extensible match Filter" ) { public void action( IAsn1Container container ) throws DecoderException { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); // We can allocate the ExtensibleMatch Filter Filter extensibleMatchFilter = new ExtensibleMatchFilter(); // Get the parent, if any Filter currentFilter = searchRequest.getCurrentFilter(); if (currentFilter != null) { // Ok, we have a parent. The new Filter will be added to // this parent, then. ((ConnectorFilter)currentFilter).addFilter(extensibleMatchFilter); extensibleMatchFilter.setParent( currentFilter ); } else { // No parent. This Filter will become the root. searchRequest.setFilter(extensibleMatchFilter); extensibleMatchFilter.setParent( searchRequest ); } searchRequest.setCurrentFilter(extensibleMatchFilter); // We now have to get back to the nearest filter which is not terminal. unstackFilters( container ); } } ); // MatchingRuleAssertion ::= SEQUENCE { (Tag) // ... // Nothing to do super.transitions[LdapStatesEnum.FILTER_MATCHING_RULE_ASSERTION_TAG][UniversalTag.SEQUENCE_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_MATCHING_RULE_ASSERTION_TAG, LdapStatesEnum.FILTER_MATCHING_RULE_ASSERTION_VALUE, null ); // MatchingRuleAssertion ::= SEQUENCE { (Value) // ... // Nothing to do. Two cases next : we may have a matching rule or a type. // At least one of those two elements must be present. super.transitions[LdapStatesEnum.FILTER_MATCHING_RULE_ASSERTION_VALUE][UniversalTag.SEQUENCE_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_MATCHING_RULE_ASSERTION_VALUE, LdapStatesEnum.FILTER_MATCHING_RULE_OR_TYPE_TAG, null); // MatchingRuleAssertion ::= SEQUENCE { // matchingRule [1] MatchingRuleId OPTIONAL, (Tag) // ... // Nothing to do. super.transitions[LdapStatesEnum.FILTER_MATCHING_RULE_OR_TYPE_TAG][LdapConstants.SEARCH_MATCHING_RULE_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_MATCHING_RULE_OR_TYPE_TAG, LdapStatesEnum.FILTER_MATCHING_RULE_VALUE, null ); // MatchingRuleAssertion ::= SEQUENCE { // matchingRule [1] MatchingRuleId OPTIONAL, (Value) // ... // Store the matching rule value. super.transitions[LdapStatesEnum.FILTER_MATCHING_RULE_VALUE][LdapConstants.SEARCH_MATCHING_RULE_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_MATCHING_RULE_VALUE, LdapStatesEnum.FILTER_MATCHING_TYPE_OR_MATCH_VALUE_TAG, new GrammarAction( "Store matching rule Value" ) { public void action( IAsn1Container container ) throws DecoderException { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); TLV tlv = ldapMessageContainer.getCurrentTLV(); // Store the value. ExtensibleMatchFilter extensibleMatchFilter = (ExtensibleMatchFilter)searchRequest.getCurrentFilter(); try { extensibleMatchFilter.setMatchingRule(new LdapString(tlv.getValue().getData())); } catch ( LdapStringEncodingException lsee ) { String msg = StringTools.dumpBytes( tlv.getValue().getData() ); log.error( "The matching rule ({}) is invalid", msg ); throw new DecoderException( "Invalid matching rule " + msg + ", : " + lsee.getMessage() ); } } }); // MatchingRuleAssertion ::= SEQUENCE { // (void) // type [2] AttributeDescription OPTIONAL, (Tag) // ... // Nothing to do. super.transitions[LdapStatesEnum.FILTER_MATCHING_RULE_OR_TYPE_TAG][LdapConstants.SEARCH_TYPE_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_MATCHING_RULE_OR_TYPE_TAG, LdapStatesEnum.FILTER_MATCHING_TYPE_VALUE, null ); // MatchingRuleAssertion ::= SEQUENCE { // matchingRule [1] MatchingRuleId OPTIONAL, // type [2] AttributeDescription OPTIONAL, (Tag) // ... // Nothing to do. super.transitions[LdapStatesEnum.FILTER_MATCHING_TYPE_OR_MATCH_VALUE_TAG][LdapConstants.SEARCH_TYPE_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_MATCHING_RULE_OR_TYPE_TAG, LdapStatesEnum.FILTER_MATCHING_TYPE_VALUE, null ); // MatchingRuleAssertion ::= SEQUENCE { // ... // type [2] AttributeDescription OPTIONAL, (Length) // ... // Store the matching type value. super.transitions[LdapStatesEnum.FILTER_MATCHING_TYPE_VALUE][LdapConstants.SEARCH_TYPE_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_MATCHING_TYPE_VALUE, LdapStatesEnum.FILTER_MATCHING_MATCH_VALUE_TAG, new GrammarAction( "Store matching type Value" ) { public void action( IAsn1Container container ) throws DecoderException { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); TLV tlv = ldapMessageContainer.getCurrentTLV(); // Store the value. ExtensibleMatchFilter extensibleMatchFilter = (ExtensibleMatchFilter)searchRequest.getCurrentFilter(); try { LdapString type = LdapDN.normalizeAttribute( tlv.getValue().getData() ); extensibleMatchFilter.setType( type ); } catch ( LdapStringEncodingException lsee ) { String msg = StringTools.dumpBytes( tlv.getValue().getData() ); log.error( "The match filter ({}) is invalid", msg ); throw new DecoderException( "Invalid match filter " + msg + ", : " + lsee.getMessage() ); } } }); // MatchingRuleAssertion ::= SEQUENCE { // ... // matchValue [3] AssertionValue, (Tag) // ... // Nothing to do. super.transitions[LdapStatesEnum.FILTER_MATCHING_MATCH_VALUE_TAG][LdapConstants.SEARCH_MATCH_VALUE_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_MATCHING_MATCH_VALUE_TAG, LdapStatesEnum.FILTER_MATCHING_MATCH_VALUE_VALUE, null ); // MatchingRuleAssertion ::= SEQUENCE { // ... // matchValue [3] AssertionValue, (Value) // ... // Store the matching type value. super.transitions[LdapStatesEnum.FILTER_MATCHING_MATCH_VALUE_VALUE][LdapConstants.SEARCH_MATCH_VALUE_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_MATCHING_MATCH_VALUE_VALUE, LdapStatesEnum.FILTER_MATCHING_DN_ATTRIBUTES_OR_END_TAG, new GrammarAction( "Store matching match value Value" ) { public void action( IAsn1Container container ) { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); TLV tlv = ldapMessageContainer.getCurrentTLV(); // Store the value. ExtensibleMatchFilter extensibleMatchFilter = (ExtensibleMatchFilter)searchRequest.getCurrentFilter(); extensibleMatchFilter.setMatchValue( StringTools.utf8ToString( tlv.getValue().getData() ) ); } }); // MatchingRuleAssertion ::= SEQUENCE { // ... // dnAttributes [4] BOOLEAN DEFAULT FALSE } (Tag) // Nothing to do. super.transitions[LdapStatesEnum.FILTER_MATCHING_DN_ATTRIBUTES_OR_END_TAG][LdapConstants.DN_ATTRIBUTES_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_MATCHING_DN_ATTRIBUTES_OR_END_TAG, LdapStatesEnum.FILTER_MATCHING_DN_ATTRIBUTES_VALUE, null ); // MatchingRuleAssertion ::= SEQUENCE { // ... // dnAttributes [4] BOOLEAN DEFAULT FALSE } (Length) // Store the matching type value. super.transitions[LdapStatesEnum.FILTER_MATCHING_DN_ATTRIBUTES_VALUE][LdapConstants.DN_ATTRIBUTES_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_MATCHING_DN_ATTRIBUTES_VALUE, LdapStatesEnum.END_STATE, new GrammarAction( "Store matching dnAttributes Value" ) { public void action( IAsn1Container container ) throws DecoderException { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); TLV tlv = ldapMessageContainer.getCurrentTLV(); // Store the value. ExtensibleMatchFilter extensibleMatchFilter = (ExtensibleMatchFilter)searchRequest.getCurrentFilter(); // We get the value. If it's a 0, it's a FALSE. If it's // a FF, it's a TRUE. Any other value should be an error, // but we could relax this constraint. So if we have something // which is not 0, it will be interpreted as TRUE, but we // will generate a warning. Value value = tlv.getValue(); try { extensibleMatchFilter.setDnAttributes( BooleanDecoder.parse( value ) ); } catch ( BooleanDecoderException bde ) { log.error("The DN attributes flag {} is invalid : {}. It should be 0 or 255", StringTools.dumpBytes( value.getData() ), bde.getMessage() ); throw new DecoderException( bde.getMessage() ); } if ( log.isDebugEnabled() ) { log.debug( "DN Attributes : {}", new Boolean( extensibleMatchFilter.isDnAttributes() ) ); } } }); } //~ Methods ------------------------------------------------------------------------------------ /** * This class is a singleton. * * @return An instance on this grammar */ public static IGrammar getInstance() { return instance; } /** * This method is used to clear the filter's stack for terminated elements. An element * is considered as terminated either if : * - it's a final element (ie an element which cannot contains a Filter) * - its current length equals its expected length. * * @param container The container being decoded */ private void unstackFilters( IAsn1Container container ) { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); TLV tlv = ldapMessageContainer.getCurrentTLV(); // Get the parent, if any Filter currentFilter = searchRequest.getCurrentFilter(); // We know have to check if the parent has been completed if (tlv.getParent().getExpectedLength() == 0) { TLV parent = tlv.getParent(); // The parent has been completed, we have to switch it while ( (parent != null) && (parent.getExpectedLength() == 0) ) { parent = parent.getParent(); if ( ( currentFilter != null ) && ( currentFilter.getParent() instanceof Filter ) ) { currentFilter = (Filter)currentFilter.getParent(); } else { currentFilter = null; break; } } searchRequest.setCurrentFilter(currentFilter); } } /** * This method is used by each comparaison filters (=, <=, >= or ~=). * * @param container The LdapContainer * @throws DecoderException If any error occurs. */ private void compareFilterAction( IAsn1Container container , int filterType ) throws DecoderException { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); // We can allocate the Attribute Value Assertion Filter filter = new AttributeValueAssertionFilter( filterType ); // Get the parent, if any Filter currentFilter = searchRequest.getCurrentFilter(); if (currentFilter != null) { // Ok, we have a parent. The new Filter will be added to // this parent, then. ((ConnectorFilter)currentFilter).addFilter(filter); filter.setParent( currentFilter ); } else { // No parent. This Filter will become the root. filter.setParent( searchRequest ); searchRequest.setFilter(filter); } searchRequest.setCurrentFilter(filter); } }
ldap/src/main/java/org/apache/ldap/common/codec/search/FilterGrammar.java
/* * Copyright 2005 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.ldap.common.codec.search; import org.apache.asn1.codec.DecoderException; import org.apache.asn1.ber.grammar.IGrammar; import org.apache.asn1.ber.grammar.AbstractGrammar; import org.apache.asn1.ber.grammar.GrammarTransition; import org.apache.asn1.ber.grammar.GrammarAction; import org.apache.asn1.ber.IAsn1Container; import org.apache.asn1.ber.tlv.UniversalTag; import org.apache.asn1.ber.tlv.TLV; import org.apache.asn1.ber.tlv.Value; import org.apache.asn1.util.BooleanDecoderException; import org.apache.asn1.util.BooleanDecoder; import org.apache.ldap.common.codec.AttributeValueAssertion; import org.apache.ldap.common.codec.LdapConstants; import org.apache.ldap.common.codec.LdapMessage; import org.apache.ldap.common.codec.LdapMessageContainer; import org.apache.ldap.common.codec.LdapStatesEnum; import org.apache.ldap.common.codec.util.LdapString; import org.apache.ldap.common.codec.util.LdapStringEncodingException; import org.apache.ldap.common.name.LdapDN; import org.apache.ldap.common.util.StringTools; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class implements the Filter grammar. All the actions are declared in this * class. As it is a singleton, these declaration are only done once. * * If an action is to be added or modified, this is where the work is to be done ! * * @author <a href="mailto:[email protected]">Apache Directory Project</a> */ public class FilterGrammar extends AbstractGrammar implements IGrammar { //~ Static fields/initializers ----------------------------------------------------------------- /** The logger */ private static final Logger log = LoggerFactory.getLogger( FilterGrammar.class ); /** The instance of grammar. FilterGrammar is a singleton */ private static IGrammar instance = new FilterGrammar(); //~ Constructors ------------------------------------------------------------------------------- /** * Creates a new LdapResultGrammar object. */ private FilterGrammar() { name = FilterGrammar.class.getName(); statesEnum = LdapStatesEnum.getInstance(); // Create the transitions table super.transitions = new GrammarTransition[LdapStatesEnum.LAST_FILTER_STATE][256]; //============================================================================================ // Search Request And Filter // This is quite complicated, because we have a tree structure to build, // and we may have many elements on each node. For instance, considering the // search filter : // (& (| (a = b) (c = d)) (! (e = f)) (attr =* h)) // We will have to create an And filter with three children : // - an Or child, // - a Not child // - and a Present child. // The Or child will also have two children. // // We know when we have a children while decoding the PDU, because the length // of its parent has not yet reached its expected length. // // This search filter : // (&(|(objectclass=top)(ou=contacts))(!(objectclass=ttt))(objectclass=*top)) // is encoded like this : // +----------------+---------------+ // | ExpectedLength | CurrentLength | //+-----------------------------+----------------+---------------+ //|A0 52 | 82 | 0 | new level 1 //| A1 24 | 82 36 | 0 0 | new level 2 //| A3 12 | 82 36 18 | 0 0 0 | new level 3 //| 04 0B 'objectclass' | 82 36 18 | 0 0 13 | //| 04 03 'top' | 82 36 18 | 0 20 18 | //| | ^ ^ | //| | | | | //| | +---------------+ | //+-----------------------------* end level 3 -------------------* //| A3 0E | 82 36 14 | 0 0 0 | new level 3 //| 04 02 'ou' | 82 36 14 | 0 0 4 | //| 04 08 'contacts' | 82 36 14 | 38 36 14 | //| | ^ ^ ^ ^ | //| | | | | | | //| | | +-------------|--+ | //| | +----------------+ | //+-----------------------------* end level 3, end level 2 ------* //| A2 14 | 82 20 | 38 0 | new level 2 //| A3 12 | 82 20 18 | 38 0 0 | new level 3 //| 04 0B 'objectclass' | 82 20 18 | 38 0 13 | //| 04 03 'ttt' | 82 20 18 | 60 20 18 | //| | ^ ^ ^ ^ | //| | | | | | | //| | | +-------------|--+ | //| | +----------------+ | //+-----------------------------* end level 3, end level 2 ------* //| A4 14 | 82 20 | 60 0 | new level 2 //| 04 0B 'objectclass' | 82 20 | 60 13 | //| 30 05 | 82 20 | 60 13 | //| 82 03 'top' | 82 20 | 82 20 | //| | ^ ^ ^ ^ | //| | | | | | | //| | | +-------------|--+ | //| | +----------------+ | //+-----------------------------* end level 2, end level 1 ------* //+-----------------------------+----------------+---------------+ // // When the current length equals the expected length of the parent PDU, // then we are able to 'close' the parent : it has all its children. This // is propagated through all the tree, until either there are no more // parents, or the expected length of the parent is different from the // current length. // //============================================================================================ // Filter ::= CHOICE { // and [0] SET OF Filter, (Tag) // ... // Nothing to do super.transitions[LdapStatesEnum.FILTER_TAG][LdapConstants.AND_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_TAG, LdapStatesEnum.FILTER_AND_VALUE, null ); // Filter ::= CHOICE { // ... // or [1] SET OF Filter, (Tag) // ... // Nothing to do super.transitions[LdapStatesEnum.FILTER_TAG][LdapConstants.OR_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_TAG, LdapStatesEnum.FILTER_OR_VALUE, null ); // Filter ::= CHOICE { // ... // not [2] Filter, (Tag) // ... // Nothing to do super.transitions[LdapStatesEnum.FILTER_TAG][LdapConstants.NOT_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_TAG, LdapStatesEnum.FILTER_NOT_VALUE, null ); // Filter ::= CHOICE { // ... // equalityMatch [3] AttributeValueAssertion, (Tag) // ... // Nothing to do super.transitions[LdapStatesEnum.FILTER_TAG][LdapConstants.EQUALITY_MATCH_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_TAG, LdapStatesEnum.FILTER_EQUALITY_MATCH_VALUE, null ); // Filter ::= CHOICE { // ... // substrings [4] SubstringFilter, (Tag) // ... // Nothing to do super.transitions[LdapStatesEnum.FILTER_TAG][LdapConstants.SUBSTRINGS_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_TAG, LdapStatesEnum.FILTER_SUBSTRINGS_VALUE, null ); // Filter ::= CHOICE { // ... // greaterOrEqual [5] AttributeValueAssertion, (Tag) // ... // Nothing to do super.transitions[LdapStatesEnum.FILTER_TAG][LdapConstants.GREATER_OR_EQUAL_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_TAG, LdapStatesEnum.FILTER_GREATER_OR_EQUAL_VALUE, null ); // Filter ::= CHOICE { // ... // lessOrEqual [6] AttributeValueAssertion, (Tag) // ... // Nothing to do super.transitions[LdapStatesEnum.FILTER_TAG][LdapConstants.LESS_OR_EQUAL_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_TAG, LdapStatesEnum.FILTER_LESS_OR_EQUAL_VALUE, null ); // Filter ::= CHOICE { // ... // present [7] AttributeDescription, (Tag) // ... // Nothing to do super.transitions[LdapStatesEnum.FILTER_TAG][LdapConstants.PRESENT_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_TAG, LdapStatesEnum.FILTER_PRESENT_VALUE, null ); // Filter ::= CHOICE { // ... // approxMatch [8] AttributeValueAssertion, (Tag) // ... // Nothing to do super.transitions[LdapStatesEnum.FILTER_TAG][LdapConstants.APPROX_MATCH_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_TAG, LdapStatesEnum.FILTER_APPROX_MATCH_VALUE, null ); // Filter ::= CHOICE { // ... // extensibleMatch [9] ExtensibleMatchFilter } (Tag) // Nothing to do super.transitions[LdapStatesEnum.FILTER_TAG][LdapConstants.EXTENSIBLE_MATCH_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_TAG, LdapStatesEnum.FILTER_EXTENSIBLE_MATCH_VALUE, null ); // Filter ::= CHOICE { // and [0] SET OF Filter, (Value) // ... // We just have to switch to the initial state of Filter, because this is what // we will get ! super.transitions[LdapStatesEnum.FILTER_AND_VALUE][LdapConstants.AND_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_AND_VALUE, LdapStatesEnum.FILTER_TAG, new GrammarAction( "Init And Filter" ) { public void action( IAsn1Container container ) throws DecoderException { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); // We can allocate the SearchRequest Filter andFilter = new AndFilter(); // Get the parent, if any Filter currentFilter = searchRequest.getCurrentFilter(); if (currentFilter != null) { // Ok, we have a parent. The new Filter will be added to // this parent, then. ((ConnectorFilter)currentFilter).addFilter(andFilter); andFilter.setParent( currentFilter ); } else { // No parent. This Filter will become the root. searchRequest.setFilter(andFilter); andFilter.setParent( searchRequest ); } searchRequest.setCurrentFilter(andFilter); } }); // Filter ::= CHOICE { // ... // or [1] SET OF Filter, (Value) // ... // We just have to switch to the initial state of Filter, because this is what // we will get ! super.transitions[LdapStatesEnum.FILTER_OR_VALUE][LdapConstants.OR_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_OR_VALUE, LdapStatesEnum.FILTER_TAG, new GrammarAction( "Init Or Filter" ) { public void action( IAsn1Container container ) throws DecoderException { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); // We can allocate the SearchRequest Filter orFilter = new OrFilter(); // Get the parent, if any Filter currentFilter = searchRequest.getCurrentFilter(); if (currentFilter != null) { // Ok, we have a parent. The new Filter will be added to // this parent, then. ((ConnectorFilter)currentFilter).addFilter(orFilter); orFilter.setParent( currentFilter ); } else { // No parent. This Filter will become the root. searchRequest.setFilter(orFilter); orFilter.setParent( searchRequest ); } searchRequest.setCurrentFilter(orFilter); } }); // Filter ::= CHOICE { // ... // not [2] Filter, (Value) // ... // We just have to switch to the initial state of Filter, because this is what // we will get ! super.transitions[LdapStatesEnum.FILTER_NOT_VALUE][LdapConstants.NOT_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_NOT_VALUE, LdapStatesEnum.FILTER_TAG, new GrammarAction( "Init Not Filter" ) { public void action( IAsn1Container container ) throws DecoderException { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); // We can allocate the SearchRequest Filter notFilter = new NotFilter(); // Get the parent, if any Filter currentFilter = searchRequest.getCurrentFilter(); if (currentFilter != null) { // Ok, we have a parent. The new Filter will be added to // this parent, then. ((ConnectorFilter)currentFilter).addFilter(notFilter); notFilter.setParent( currentFilter ); } else { // No parent. This Filter will become the root. searchRequest.setFilter(notFilter); notFilter.setParent( searchRequest ); } searchRequest.setCurrentFilter(notFilter); } }); // Filter ::= CHOICE { // ... // equalityMatch [3] AttributeValueAssertion, (Value) // ... // We will create the filter container (as this is an equalityMatch filter, // we will create an AttributeValueAssertionFilter). super.transitions[LdapStatesEnum.FILTER_EQUALITY_MATCH_VALUE][LdapConstants.EQUALITY_MATCH_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_EQUALITY_MATCH_VALUE, LdapStatesEnum.FILTER_ATTRIBUTE_DESC_TAG, new GrammarAction( "Init Equality Match Filter" ) { public void action( IAsn1Container container ) throws DecoderException { compareFilterAction(container, LdapConstants.EQUALITY_MATCH_FILTER); } }); // Filter ::= CHOICE { // ... // greaterOrEqual [5] AttributeValueAssertion, (Value) // ... // We will create the filter container (as this is an GreaterOrEqual filter, // we will create an AttributeValueAssertionFilter). super.transitions[LdapStatesEnum.FILTER_GREATER_OR_EQUAL_VALUE][LdapConstants.GREATER_OR_EQUAL_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_GREATER_OR_EQUAL_VALUE, LdapStatesEnum.FILTER_ATTRIBUTE_DESC_TAG, new GrammarAction( "Init Greater Or Equal Filter" ) { public void action( IAsn1Container container ) throws DecoderException { compareFilterAction(container, LdapConstants.GREATER_OR_EQUAL_FILTER); } } ); // Filter ::= CHOICE { // ... // lessOrEqual [6] AttributeValueAssertion, (Value) // ... // We will create the filter container (as this is an lessOrEqual filter, // we will create an AttributeValueAssertionFilter). super.transitions[LdapStatesEnum.FILTER_LESS_OR_EQUAL_VALUE][LdapConstants.LESS_OR_EQUAL_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_LESS_OR_EQUAL_VALUE, LdapStatesEnum.FILTER_ATTRIBUTE_DESC_TAG, new GrammarAction( "Init Less Or Equal Filter" ) { public void action( IAsn1Container container ) throws DecoderException { compareFilterAction(container, LdapConstants.LESS_OR_EQUAL_FILTER ); } } ); // Filter ::= CHOICE { // ... // approxMatch [8] AttributeValueAssertion, (Value) // ... // We will create the filter container (as this is an approxMatch filter, // we will create an AttributeValueAssertionFilter). super.transitions[LdapStatesEnum.FILTER_APPROX_MATCH_VALUE][LdapConstants.APPROX_MATCH_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_APPROX_MATCH_VALUE, LdapStatesEnum.FILTER_ATTRIBUTE_DESC_TAG, new GrammarAction( "Init ApproxMatch Filter" ) { public void action( IAsn1Container container ) throws DecoderException { compareFilterAction(container, LdapConstants.APPROX_MATCH_FILTER ); } } ); // AttributeValueAssertion ::= SEQUENCE { // attributeDesc AttributeDescription, (TAG) // ... // Nothing to do. super.transitions[LdapStatesEnum.FILTER_ATTRIBUTE_DESC_TAG][UniversalTag.OCTET_STRING_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_ATTRIBUTE_DESC_TAG, LdapStatesEnum.FILTER_ATTRIBUTE_DESC_VALUE, null); // AttributeValueAssertion ::= SEQUENCE { // attributeDesc AttributeDescription, (VALUE) // ... // We have to set the attribute description in the current filter. // It could be an equalityMatch, greaterOrEqual, lessOrEqual or an // approxMatch filter. super.transitions[LdapStatesEnum.FILTER_ATTRIBUTE_DESC_VALUE][UniversalTag.OCTET_STRING_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_ATTRIBUTE_DESC_VALUE, LdapStatesEnum.FILTER_ASSERTION_VALUE_TAG, new GrammarAction( "Init attributeDesc Value" ) { public void action( IAsn1Container container ) throws DecoderException { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); TLV tlv = ldapMessageContainer.getCurrentTLV(); AttributeValueAssertion assertion = new AttributeValueAssertion(); try { LdapString type = LdapDN.normalizeAttribute( tlv.getValue().getData() ); assertion.setAttributeDesc( type ); } catch ( LdapStringEncodingException lsee ) { log.error( "The assertion description (" + StringTools.dumpBytes( tlv.getValue().getData() ) + ") is invalid" ); throw new DecoderException( "Invalid assertion description " + StringTools.dumpBytes( tlv.getValue().getData() ) + ", : " + lsee.getMessage() ); } AttributeValueAssertionFilter currentFilter = (AttributeValueAssertionFilter)searchRequest.getCurrentFilter(); currentFilter.setAssertion(assertion); } }); // AttributeValueAssertion ::= SEQUENCE { // ... // assertionValue AssertionValue } (TAG) // Nothing to do. super.transitions[LdapStatesEnum.FILTER_ASSERTION_VALUE_TAG][UniversalTag.OCTET_STRING_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_ASSERTION_VALUE_TAG, LdapStatesEnum.FILTER_ASSERTION_VALUE_VALUE, null); // AttributeValueAssertion ::= SEQUENCE { // ... // assertionValue AssertionValue } (VALUE) // We have to set the attribute description in the current filter. // It could be an equalityMatch, greaterOrEqual, lessOrEqual or an // approxMatch filter. // Whgen finished, we will transit to the first state. super.transitions[LdapStatesEnum.FILTER_ASSERTION_VALUE_VALUE][UniversalTag.OCTET_STRING_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_ASSERTION_VALUE_VALUE, LdapStatesEnum.FILTER_TAG, new GrammarAction( "Init AssertionValue Value" ) { public void action( IAsn1Container container ) throws DecoderException { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); TLV tlv = ldapMessageContainer.getCurrentTLV(); // The value can be null. Object assertionValue = StringTools.EMPTY_BYTES; if ( tlv.getLength().getLength() != 0 ) { assertionValue = tlv.getValue().getData(); } AttributeValueAssertionFilter currentFilter = (AttributeValueAssertionFilter)searchRequest.getCurrentFilter(); AttributeValueAssertion assertion = currentFilter.getAssertion(); if ( ldapMessageContainer.isBinary( assertion.getAttributeDesc() ) ) { assertion.setAssertionValue( assertionValue ); } else { assertion.setAssertionValue( StringTools.utf8ToString( (byte[])assertionValue ) ); } // We now have to get back to the nearest filter which is not terminal. unstackFilters( container ); } }); // AttributeValueAssertion ::= SEQUENCE { // attributeDesc AttributeDescription, (VALUE) // ... // We have to set the attribute description in the current filter. // It could be an equalityMatch, greaterOrEqual, lessOrEqual or an // approxMatch filter. super.transitions[LdapStatesEnum.FILTER_ATTRIBUTE_DESC_VALUE][UniversalTag.OCTET_STRING_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_ATTRIBUTE_DESC_VALUE, LdapStatesEnum.FILTER_ASSERTION_VALUE_TAG, new GrammarAction( "Init attributeDesc Value" ) { public void action( IAsn1Container container ) throws DecoderException { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); TLV tlv = ldapMessageContainer.getCurrentTLV(); AttributeValueAssertion assertion = new AttributeValueAssertion(); try { LdapString type = LdapDN.normalizeAttribute( tlv.getValue().getData() ); assertion.setAttributeDesc( type ); } catch ( LdapStringEncodingException lsee ) { log.error( "The assertion value (" + StringTools.dumpBytes( tlv.getValue().getData() ) + ") is invalid" ); throw new DecoderException( "Invalid assertion value " + StringTools.dumpBytes( tlv.getValue().getData() ) + ", : " + lsee.getMessage() ); } AttributeValueAssertionFilter currentFilter = (AttributeValueAssertionFilter)searchRequest.getCurrentFilter(); currentFilter.setAssertion(assertion); } }); // AttributeValueAssertion ::= SEQUENCE { // ... // assertionValue AssertionValue } (TAG) // Nothing to do. super.transitions[LdapStatesEnum.FILTER_ASSERTION_VALUE_TAG][UniversalTag.OCTET_STRING_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_ASSERTION_VALUE_TAG, LdapStatesEnum.FILTER_ASSERTION_VALUE_VALUE, null); // Filter ::= CHOICE { // ... // present [7] AttributeDescription, (Value) // ... super.transitions[LdapStatesEnum.FILTER_PRESENT_VALUE][LdapConstants.PRESENT_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_PRESENT_VALUE, LdapStatesEnum.FILTER_TAG, new GrammarAction( "Init present filter Value" ) { public void action( IAsn1Container container ) throws DecoderException { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); TLV tlv = ldapMessageContainer.getCurrentTLV(); // We can allocate the Attribute Value Assertion PresentFilter presentFilter = new PresentFilter(); // Get the parent, if any Filter currentFilter = searchRequest.getCurrentFilter(); if (currentFilter != null) { // Ok, we have a parent. The new Filter will be added to // this parent, then. ((ConnectorFilter)currentFilter).addFilter(presentFilter); presentFilter.setParent( currentFilter ); } else { // No parent. This Filter will become the root. //searchRequest.setCurrentFilter(presentFilter); presentFilter.setParent( searchRequest ); searchRequest.setFilter( presentFilter ); } // Store the value. try { LdapString type = LdapDN.normalizeAttribute( tlv.getValue().getData() ); presentFilter.setAttributeDescription( type ); } catch ( LdapStringEncodingException lsee ) { log.error( "Present filter attribute description (" + StringTools.dumpBytes( tlv.getValue().getData() ) + ") is invalid" ); throw new DecoderException( "Invalid present filter attribute description " + StringTools.dumpBytes( tlv.getValue().getData() ) + ", : " + lsee.getMessage() ); } // We now have to get back to the nearest filter which is not terminal. unstackFilters( container ); } } ); // Here we are dealing with substrings. LDAP grammar is not very explicit about // what is allowed (-- at least one must be present !!!), while RFC 2254 is // really clear. But LDAP grammar is the one to follow... // // substring ::= attr "=" [AttributeValue] any [AttributeValue] // any ::= "*" *(AttributeValue "*") // // Filter ::= CHOICE { // ... // substrings [4] SubstringFilter, (Value) // ... // Store the substring super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_VALUE][LdapConstants.SUBSTRINGS_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_VALUE, LdapStatesEnum.FILTER_SUBSTRINGS_TYPE_TAG, new GrammarAction( "Init Substring Filter" ) { public void action( IAsn1Container container ) throws DecoderException { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); TLV tlv = ldapMessageContainer.getCurrentTLV(); // We can allocate the SearchRequest Filter substringFilter = new SubstringFilter(); // Get the parent, if any Filter currentFilter = searchRequest.getCurrentFilter(); if (currentFilter != null) { // Ok, we have a parent. The new Filter will be added to // this parent, then. ((ConnectorFilter)currentFilter).addFilter(substringFilter); substringFilter.setParent( currentFilter ); } else { // No parent. This Filter will become the root. searchRequest.setFilter(substringFilter); substringFilter.setParent( searchRequest ); } searchRequest.setCurrentFilter(substringFilter); // As this is a new Constructed object, we have to init its length int expectedLength = tlv.getLength().getLength(); substringFilter.setExpectedLength( expectedLength ); substringFilter.setCurrentLength( 0 ); } } ); // SubstringFilter ::= SEQUENCE { // type AttributeDescription, (Tag) // ... // Nothing to do super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_TYPE_TAG][UniversalTag.OCTET_STRING_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_TYPE_TAG, LdapStatesEnum.FILTER_SUBSTRINGS_TYPE_VALUE, null ); // SubstringFilter ::= SEQUENCE { // type AttributeDescription, (Value) // ... // super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_TYPE_VALUE][UniversalTag.OCTET_STRING_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_TYPE_VALUE, LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_TAG, new GrammarAction( "Store substring filter Value" ) { public void action( IAsn1Container container ) throws DecoderException { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); TLV tlv = ldapMessageContainer.getCurrentTLV(); // Store the value. SubstringFilter substringFilter = (SubstringFilter)searchRequest.getCurrentFilter(); try { LdapString type = LdapDN.normalizeAttribute( tlv.getValue().getData() ); substringFilter.setType( type ); } catch ( LdapStringEncodingException lsee ) { log.error( "The substring filter type (" + StringTools.dumpBytes( tlv.getValue().getData() ) + ") is invalid" ); throw new DecoderException( "Invalid substring filter type " + StringTools.dumpBytes( tlv.getValue().getData() ) + ", : " + lsee.getMessage() ); } // We now have to get back to the nearest filter which is not terminal. unstackFilters( container ); } } ); // SubstringFilter ::= SEQUENCE { // ... // -- at least one must be present // substrings SEQUENCE OF CHOICE { (Tag) // ... // Nothing to do. super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_TAG][UniversalTag.SEQUENCE_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_TAG, LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_VALUE, null ); // SubstringFilter ::= SEQUENCE { // ... // -- at least one must be present // substrings SEQUENCE OF CHOICE { (Value) // ... // Nothing to do. Here, we may have three possibilities. We may have an "initial" value, // or an "any" value, or a "final" value. Any other option is an error. super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_VALUE][UniversalTag.SEQUENCE_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_VALUE, LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG, null ); // SubstringFilter ::= SEQUENCE { // ... // -- at least one must be present // substrings SEQUENCE OF CHOICE { // initial [0] LDAPString, (Tag) // ... // We have an "initial" value. Nothing to do. super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG][LdapConstants.SEARCH_SUBSTRINGS_INITIAL_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG, LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_INITIAL_VALUE, null ); // SubstringFilter ::= SEQUENCE { // ... // -- at least one must be present // substrings SEQUENCE OF CHOICE { // initial [0] LDAPString, (Value) // ... // Store the initial value. super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_INITIAL_VALUE][LdapConstants.SEARCH_SUBSTRINGS_INITIAL_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_INITIAL_VALUE, LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_ANY_OR_FINAL_TAG, new GrammarAction( "Store substring filter initial Value" ) { public void action( IAsn1Container container ) throws DecoderException { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); TLV tlv = ldapMessageContainer.getCurrentTLV(); // Store the value. SubstringFilter substringFilter = (SubstringFilter)searchRequest.getCurrentFilter(); try { substringFilter.setInitialSubstrings(new LdapString(tlv.getValue().getData())); } catch ( LdapStringEncodingException lsee ) { log.error( "The substring filter initial (" + StringTools.dumpBytes( tlv.getValue().getData() ) + ") is invalid" ); throw new DecoderException( "Invalid substring filter initial " + StringTools.dumpBytes( tlv.getValue().getData() ) + ", : " + lsee.getMessage() ); } // We now have to get back to the nearest filter which is not terminal. unstackFilters( container ); } }); // SubstringFilter ::= SEQUENCE { // ... // -- at least one must be present // substrings SEQUENCE OF CHOICE { // ... // any [1] LDAPString, (Tag) // ... // We have an 'any' value without an 'initial' value. Nothing to do. super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG][LdapConstants.SEARCH_SUBSTRINGS_ANY_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG, LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_ANY_VALUE, null); // SubstringFilter ::= SEQUENCE { // ... // -- at least one must be present // substrings SEQUENCE OF CHOICE { // initial [0] LDAPString, // any [1] LDAPString, (Tag) // ... // We had an 'initial' substring, and now we have an 'any' substring. Nothing to do. super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_ANY_OR_FINAL_TAG][LdapConstants.SEARCH_SUBSTRINGS_ANY_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_ANY_OR_FINAL_TAG, LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_ANY_VALUE, null ); // SubstringFilter ::= SEQUENCE { // ... // -- at least one must be present // substrings SEQUENCE OF CHOICE { // ... // any [1] LDAPString, (Value) // ... // Store the 'any' value. super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_ANY_VALUE][LdapConstants.SEARCH_SUBSTRINGS_ANY_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_ANY_VALUE, LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_ANY_OR_FINAL_TAG, new GrammarAction( "Store substring filter any Value" ) { public void action( IAsn1Container container ) throws DecoderException { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); TLV tlv = ldapMessageContainer.getCurrentTLV(); // Store the value. SubstringFilter substringFilter = (SubstringFilter)searchRequest.getCurrentFilter(); try { substringFilter.addAnySubstrings(new LdapString(tlv.getValue().getData())); } catch ( LdapStringEncodingException lsee ) { log.error( "The substring any filter (" + StringTools.dumpBytes( tlv.getValue().getData() ) + ") is invalid" ); throw new DecoderException( "Invalid substring any filter " + StringTools.dumpBytes( tlv.getValue().getData() ) + ", : " + lsee.getMessage() ); } // We now have to get back to the nearest filter which is not terminal. unstackFilters( container ); } }); // SubstringFilter ::= SEQUENCE { // ... // -- at least one must be present // substrings SEQUENCE OF CHOICE { // ... // any [1] LDAPString, // final [2] LDAPString, (Tag) // } // // We have an 'final' value after an 'any' value. Nothing to do. super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_ANY_OR_FINAL_TAG][LdapConstants.SEARCH_SUBSTRINGS_FINAL_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG, LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_FINAL_VALUE, null); // SubstringFilter ::= SEQUENCE { // ... // -- at least one must be present // substrings SEQUENCE OF CHOICE { // ... // final [2] LDAPString, (Tag) // } // // We have an 'final' value only. Nothing to do. super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG][LdapConstants.SEARCH_SUBSTRINGS_FINAL_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG, LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_FINAL_VALUE, null); // SubstringFilter ::= SEQUENCE { // ... // -- at least one must be present // substrings SEQUENCE OF CHOICE { // initial [0] LDAPString, // final [2] LDAPString, (Tag) // } // We had an 'initial' substring, and now we have an 'final' substring. Nothing to do. super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_ANY_OR_FINAL_TAG][LdapConstants.SEARCH_SUBSTRINGS_FINAL_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_ANY_OR_FINAL_TAG, LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_FINAL_VALUE, null ); // SubstringFilter ::= SEQUENCE { // ... // -- at least one must be present // substrings SEQUENCE OF CHOICE { // ... // final [2] LDAPString, (Value) // Store the initial value. super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_FINAL_VALUE][LdapConstants.SEARCH_SUBSTRINGS_FINAL_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_FINAL_VALUE, LdapStatesEnum.END_STATE, new GrammarAction( "Store substring filter final Value" ) { public void action( IAsn1Container container ) throws DecoderException { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); TLV tlv = ldapMessageContainer.getCurrentTLV(); // Store the value. SubstringFilter substringFilter = (SubstringFilter)searchRequest.getCurrentFilter(); try { substringFilter.setFinalSubstrings(new LdapString(tlv.getValue().getData())); } catch ( LdapStringEncodingException lsee ) { log.error( "The substring final filter (" + StringTools.dumpBytes( tlv.getValue().getData() ) + ") is invalid" ); throw new DecoderException( "Invalid substring final filter " + StringTools.dumpBytes( tlv.getValue().getData() ) + ", : " + lsee.getMessage() ); } // We now have to get back to the nearest filter which is not terminal. unstackFilters( container ); } }); // SubstringFilter ::= SEQUENCE { // ... // -- at least one must be present // } // // Has we must have at least an initial, any or final value, every other value is an error. super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG][LdapConstants.AND_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG, LdapStatesEnum.FILTER_TAG, new GrammarAction( "Bad tag exception" ) { public void action( IAsn1Container container ) throws DecoderException { throw new DecoderException("At least an 'initial', 'any' or 'final' value must be found"); } }); super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG][LdapConstants.OR_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG, LdapStatesEnum.FILTER_TAG, new GrammarAction( "Bad tag exception" ) { public void action( IAsn1Container container ) throws DecoderException { throw new DecoderException("At least an 'initial', 'any' or 'final' value must be found"); } }); super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG][LdapConstants.NOT_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG, LdapStatesEnum.FILTER_TAG, new GrammarAction( "Bad tag exception" ) { public void action( IAsn1Container container ) throws DecoderException { throw new DecoderException("At least an 'initial', 'any' or 'final' value must be found"); } }); super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG][LdapConstants.EQUALITY_MATCH_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG, LdapStatesEnum.FILTER_TAG, new GrammarAction( "Bad tag exception" ) { public void action( IAsn1Container container ) throws DecoderException { throw new DecoderException("At least an 'initial', 'any' or 'final' value must be found"); } }); super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG][LdapConstants.SUBSTRINGS_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG, LdapStatesEnum.FILTER_TAG, new GrammarAction( "Bad tag exception" ) { public void action( IAsn1Container container ) throws DecoderException { throw new DecoderException("At least an 'initial', 'any' or 'final' value must be found"); } }); super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG][LdapConstants.GREATER_OR_EQUAL_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG, LdapStatesEnum.FILTER_TAG, new GrammarAction( "Bad tag exception" ) { public void action( IAsn1Container container ) throws DecoderException { throw new DecoderException("At least an 'initial', 'any' or 'final' value must be found"); } }); super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG][LdapConstants.LESS_OR_EQUAL_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG, LdapStatesEnum.FILTER_TAG, new GrammarAction( "Bad tag exception" ) { public void action( IAsn1Container container ) throws DecoderException { throw new DecoderException("At least an 'initial', 'any' or 'final' value must be found"); } }); super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG][LdapConstants.PRESENT_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG, LdapStatesEnum.FILTER_TAG, new GrammarAction( "Bad tag exception" ) { public void action( IAsn1Container container ) throws DecoderException { throw new DecoderException("At least an 'initial', 'any' or 'final' value must be found"); } }); super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG][LdapConstants.APPROX_MATCH_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG, LdapStatesEnum.FILTER_TAG, new GrammarAction( "Bad tag exception" ) { public void action( IAsn1Container container ) throws DecoderException { throw new DecoderException("At least an 'initial', 'any' or 'final' value must be found"); } }); super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG][LdapConstants.EXTENSIBLE_MATCH_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG, LdapStatesEnum.FILTER_TAG, new GrammarAction( "Bad tag exception" ) { public void action( IAsn1Container container ) throws DecoderException { throw new DecoderException("At least an 'initial', 'any' or 'final' value must be found"); } }); super.transitions[LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG][UniversalTag.SEQUENCE_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_SUBSTRINGS_SUBSTRINGS_CHOICE_TAG, LdapStatesEnum.FILTER_TAG, new GrammarAction( "Bad tag exception" ) { public void action( IAsn1Container container ) throws DecoderException { throw new DecoderException("At least an 'initial', 'any' or 'final' value must be found"); } }); // Here we are dealing with extensible matches // // Filter ::= CHOICE { // ... // extensibleMatch [9] MatchingRuleAssertion} (Value) // // Nothing to do super.transitions[LdapStatesEnum.FILTER_EXTENSIBLE_MATCH_VALUE][LdapConstants.EXTENSIBLE_MATCH_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_EXTENSIBLE_MATCH_VALUE, LdapStatesEnum.FILTER_MATCHING_RULE_ASSERTION_TAG, new GrammarAction( "Init extensible match Filter" ) { public void action( IAsn1Container container ) throws DecoderException { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); // We can allocate the ExtensibleMatch Filter Filter extensibleMatchFilter = new ExtensibleMatchFilter(); // Get the parent, if any Filter currentFilter = searchRequest.getCurrentFilter(); if (currentFilter != null) { // Ok, we have a parent. The new Filter will be added to // this parent, then. ((ConnectorFilter)currentFilter).addFilter(extensibleMatchFilter); extensibleMatchFilter.setParent( currentFilter ); } else { // No parent. This Filter will become the root. searchRequest.setFilter(extensibleMatchFilter); extensibleMatchFilter.setParent( searchRequest ); } searchRequest.setCurrentFilter(extensibleMatchFilter); // We now have to get back to the nearest filter which is not terminal. unstackFilters( container ); } } ); // MatchingRuleAssertion ::= SEQUENCE { (Tag) // ... // Nothing to do super.transitions[LdapStatesEnum.FILTER_MATCHING_RULE_ASSERTION_TAG][UniversalTag.SEQUENCE_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_MATCHING_RULE_ASSERTION_TAG, LdapStatesEnum.FILTER_MATCHING_RULE_ASSERTION_VALUE, null ); // MatchingRuleAssertion ::= SEQUENCE { (Value) // ... // Nothing to do. Two cases next : we may have a matching rule or a type. // At least one of those two elements must be present. super.transitions[LdapStatesEnum.FILTER_MATCHING_RULE_ASSERTION_VALUE][UniversalTag.SEQUENCE_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_MATCHING_RULE_ASSERTION_VALUE, LdapStatesEnum.FILTER_MATCHING_RULE_OR_TYPE_TAG, null); // MatchingRuleAssertion ::= SEQUENCE { // matchingRule [1] MatchingRuleId OPTIONAL, (Tag) // ... // Nothing to do. super.transitions[LdapStatesEnum.FILTER_MATCHING_RULE_OR_TYPE_TAG][LdapConstants.SEARCH_MATCHING_RULE_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_MATCHING_RULE_OR_TYPE_TAG, LdapStatesEnum.FILTER_MATCHING_RULE_VALUE, null ); // MatchingRuleAssertion ::= SEQUENCE { // matchingRule [1] MatchingRuleId OPTIONAL, (Value) // ... // Store the matching rule value. super.transitions[LdapStatesEnum.FILTER_MATCHING_RULE_VALUE][LdapConstants.SEARCH_MATCHING_RULE_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_MATCHING_RULE_VALUE, LdapStatesEnum.FILTER_MATCHING_TYPE_OR_MATCH_VALUE_TAG, new GrammarAction( "Store matching rule Value" ) { public void action( IAsn1Container container ) throws DecoderException { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); TLV tlv = ldapMessageContainer.getCurrentTLV(); // Store the value. ExtensibleMatchFilter extensibleMatchFilter = (ExtensibleMatchFilter)searchRequest.getCurrentFilter(); try { extensibleMatchFilter.setMatchingRule(new LdapString(tlv.getValue().getData())); } catch ( LdapStringEncodingException lsee ) { log.error( "The matching rule (" + StringTools.dumpBytes( tlv.getValue().getData() ) + ") is invalid" ); throw new DecoderException( "Invalid matching rule " + StringTools.dumpBytes( tlv.getValue().getData() ) + ", : " + lsee.getMessage() ); } } }); // MatchingRuleAssertion ::= SEQUENCE { // (void) // type [2] AttributeDescription OPTIONAL, (Tag) // ... // Nothing to do. super.transitions[LdapStatesEnum.FILTER_MATCHING_RULE_OR_TYPE_TAG][LdapConstants.SEARCH_TYPE_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_MATCHING_RULE_OR_TYPE_TAG, LdapStatesEnum.FILTER_MATCHING_TYPE_VALUE, null ); // MatchingRuleAssertion ::= SEQUENCE { // matchingRule [1] MatchingRuleId OPTIONAL, // type [2] AttributeDescription OPTIONAL, (Tag) // ... // Nothing to do. super.transitions[LdapStatesEnum.FILTER_MATCHING_TYPE_OR_MATCH_VALUE_TAG][LdapConstants.SEARCH_TYPE_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_MATCHING_RULE_OR_TYPE_TAG, LdapStatesEnum.FILTER_MATCHING_TYPE_VALUE, null ); // MatchingRuleAssertion ::= SEQUENCE { // ... // type [2] AttributeDescription OPTIONAL, (Length) // ... // Store the matching type value. super.transitions[LdapStatesEnum.FILTER_MATCHING_TYPE_VALUE][LdapConstants.SEARCH_TYPE_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_MATCHING_TYPE_VALUE, LdapStatesEnum.FILTER_MATCHING_MATCH_VALUE_TAG, new GrammarAction( "Store matching type Value" ) { public void action( IAsn1Container container ) throws DecoderException { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); TLV tlv = ldapMessageContainer.getCurrentTLV(); // Store the value. ExtensibleMatchFilter extensibleMatchFilter = (ExtensibleMatchFilter)searchRequest.getCurrentFilter(); try { LdapString type = LdapDN.normalizeAttribute( tlv.getValue().getData() ); extensibleMatchFilter.setType( type ); } catch ( LdapStringEncodingException lsee ) { log.error( "The match filter (" + StringTools.dumpBytes( tlv.getValue().getData() ) + ") is invalid" ); throw new DecoderException( "Invalid match filter " + StringTools.dumpBytes( tlv.getValue().getData() ) + ", : " + lsee.getMessage() ); } } }); // MatchingRuleAssertion ::= SEQUENCE { // ... // matchValue [3] AssertionValue, (Tag) // ... // Nothing to do. super.transitions[LdapStatesEnum.FILTER_MATCHING_MATCH_VALUE_TAG][LdapConstants.SEARCH_MATCH_VALUE_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_MATCHING_MATCH_VALUE_TAG, LdapStatesEnum.FILTER_MATCHING_MATCH_VALUE_VALUE, null ); // MatchingRuleAssertion ::= SEQUENCE { // ... // matchValue [3] AssertionValue, (Value) // ... // Store the matching type value. super.transitions[LdapStatesEnum.FILTER_MATCHING_MATCH_VALUE_VALUE][LdapConstants.SEARCH_MATCH_VALUE_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_MATCHING_MATCH_VALUE_VALUE, LdapStatesEnum.FILTER_MATCHING_DN_ATTRIBUTES_OR_END_TAG, new GrammarAction( "Store matching match value Value" ) { public void action( IAsn1Container container ) { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); TLV tlv = ldapMessageContainer.getCurrentTLV(); // Store the value. ExtensibleMatchFilter extensibleMatchFilter = (ExtensibleMatchFilter)searchRequest.getCurrentFilter(); extensibleMatchFilter.setMatchValue( StringTools.utf8ToString( tlv.getValue().getData() ) ); } }); // MatchingRuleAssertion ::= SEQUENCE { // ... // dnAttributes [4] BOOLEAN DEFAULT FALSE } (Tag) // Nothing to do. super.transitions[LdapStatesEnum.FILTER_MATCHING_DN_ATTRIBUTES_OR_END_TAG][LdapConstants.DN_ATTRIBUTES_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_MATCHING_DN_ATTRIBUTES_OR_END_TAG, LdapStatesEnum.FILTER_MATCHING_DN_ATTRIBUTES_VALUE, null ); // MatchingRuleAssertion ::= SEQUENCE { // ... // dnAttributes [4] BOOLEAN DEFAULT FALSE } (Length) // Store the matching type value. super.transitions[LdapStatesEnum.FILTER_MATCHING_DN_ATTRIBUTES_VALUE][LdapConstants.DN_ATTRIBUTES_FILTER_TAG] = new GrammarTransition( LdapStatesEnum.FILTER_MATCHING_DN_ATTRIBUTES_VALUE, LdapStatesEnum.END_STATE, new GrammarAction( "Store matching dnAttributes Value" ) { public void action( IAsn1Container container ) throws DecoderException { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); TLV tlv = ldapMessageContainer.getCurrentTLV(); // Store the value. ExtensibleMatchFilter extensibleMatchFilter = (ExtensibleMatchFilter)searchRequest.getCurrentFilter(); // We get the value. If it's a 0, it's a FALSE. If it's // a FF, it's a TRUE. Any other value should be an error, // but we could relax this constraint. So if we have something // which is not 0, it will be interpreted as TRUE, but we // will generate a warning. Value value = tlv.getValue(); try { extensibleMatchFilter.setDnAttributes( BooleanDecoder.parse( value ) ); } catch ( BooleanDecoderException bde ) { log.error("The DN attributes flag " + StringTools.dumpBytes( value.getData() ) + " is invalid : " + bde.getMessage() + ". It should be 0 or 255" ); throw new DecoderException( bde.getMessage() ); } if ( log.isDebugEnabled() ) { log.debug( "DN Attributes : " + extensibleMatchFilter.isDnAttributes() ); } } }); } //~ Methods ------------------------------------------------------------------------------------ /** * This class is a singleton. * * @return An instance on this grammar */ public static IGrammar getInstance() { return instance; } /** * This method is used to clear the filter's stack for terminated elements. An element * is considered as terminated either if : * - it's a final element (ie an element which cannot contains a Filter) * - its current length equals its expected length. * * @param container The container being decoded */ private void unstackFilters( IAsn1Container container ) { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); TLV tlv = ldapMessageContainer.getCurrentTLV(); // Get the parent, if any Filter currentFilter = searchRequest.getCurrentFilter(); // We know have to check if the parent has been completed if (tlv.getParent().getExpectedLength() == 0) { TLV parent = tlv.getParent(); // The parent has been completed, we have to switch it while ( (parent != null) && (parent.getExpectedLength() == 0) ) { parent = parent.getParent(); if ( currentFilter.getParent() instanceof Filter ) { currentFilter = (Filter)currentFilter.getParent(); } else { currentFilter = null; break; } } searchRequest.setCurrentFilter(currentFilter); } } /** * This method is used by each comparaison filters (=, <=, >= or ~=). * * @param container The LdapContainer * @throws DecoderException If any error occurs. */ private void compareFilterAction( IAsn1Container container , int filterType ) throws DecoderException { LdapMessageContainer ldapMessageContainer = ( LdapMessageContainer ) container; LdapMessage ldapMessage = ldapMessageContainer.getLdapMessage(); SearchRequest searchRequest = ldapMessage.getSearchRequest(); // We can allocate the Attribute Value Assertion Filter filter = new AttributeValueAssertionFilter( filterType ); // Get the parent, if any Filter currentFilter = searchRequest.getCurrentFilter(); if (currentFilter != null) { // Ok, we have a parent. The new Filter will be added to // this parent, then. ((ConnectorFilter)currentFilter).addFilter(filter); filter.setParent( currentFilter ); } else { // No parent. This Filter will become the root. filter.setParent( searchRequest ); searchRequest.setFilter(filter); } searchRequest.setCurrentFilter(filter); } }
Fixed the logs git-svn-id: 77647bf8c03f680e1577cc294a519e21c03bf3cb@371154 13f79535-47bb-0310-9956-ffa450edef68
ldap/src/main/java/org/apache/ldap/common/codec/search/FilterGrammar.java
Fixed the logs
Java
apache-2.0
72c1497b386aaba3288df34c8e0fed0583ad1131
0
MatthewTamlin/Vertigo
package com.matthewtamlin.vertigo.example; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; import android.view.View; import com.matthewtamlin.vertigo.library.SimpleVertigoCoordinator; import com.matthewtamlin.vertigo.library.VertigoCoordinator; import com.matthewtamlin.vertigo.library.VertigoView; /** * Displays three views as well as buttons for controlling the view positions. */ public class MainActivity extends AppCompatActivity { /** * Key for referring to view 1 in the coordinator. */ private static final String VIEW_1_KEY = "view 1"; /** * Key for referring to view 2 in the coordinator. */ private static final String VIEW_2_KEY = "view 2"; /** * Key for referring to view 3 in the coordinator. */ private static final String VIEW_3_KEY = "view_3"; /** * Coordinates the views by sliding them up and down. */ private VertigoCoordinator coordinator; @Override protected void onCreate(final Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); setupCoordinationBehaviour(); } /** * Sets up the three views in the coordinator. */ private void setupCoordinationBehaviour() { coordinator = (SimpleVertigoCoordinator) findViewById(R.id.activity_main_coordinator); final VertigoView view1 = (VertigoView) findViewById(R.id.activity_main_view_1); final VertigoView view2 = (VertigoView) findViewById(R.id.activity_main_view_2); final VertigoView view3 = (VertigoView) findViewById(R.id.activity_main_view_3); view1.onStateChanged(VertigoView.State.INACTIVE); view2.onStateChanged(VertigoView.State.INACTIVE); view3.onStateChanged(VertigoView.State.ACTIVE); coordinator.registerViewForCoordination(view1, VIEW_1_KEY); coordinator.registerViewForCoordination(view2, VIEW_2_KEY); coordinator.registerViewForCoordination(view3, VIEW_3_KEY); } /** * On-click listener method for button 1. When called, view 1 is made active in the * coordinator. * * @param clickedView * the clicked view */ public void showView1(final View clickedView) { coordinator.makeViewActive(VIEW_1_KEY, true, null); } /** * On-click listener method for button 2. When called, view 2 is made active in the * coordinator. * * @param clickedView * the clicked view */ public void showView2(final View clickedView) { coordinator.makeViewActive(VIEW_2_KEY, true, null); } /** * On-click listener method for button 3. When called, view 3 is made active in the * coordinator. * * @param clickedView * the clicked view */ public void showView3(final View clickedView) { coordinator.makeViewActive(VIEW_3_KEY, true, null); } }
example/src/main/java/com/matthewtamlin/vertigo/example/MainActivity.java
package com.matthewtamlin.vertigo.example; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; import android.view.View; import com.matthewtamlin.vertigo.library.SimpleVertigoCoordinator; import com.matthewtamlin.vertigo.library.VertigoCoordinator; import com.matthewtamlin.vertigo.library.VertigoView; public class MainActivity extends AppCompatActivity { private static final String VIEW_1_KEY = "view 1"; private static final String VIEW_2_KEY = "view 2"; private static final String VIEW_3_KEY = "view_3"; private VertigoCoordinator coordinator; @Override protected void onCreate(final Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); setupCoordinationBehaviour(); } private void setupCoordinationBehaviour() { coordinator = (SimpleVertigoCoordinator) findViewById(R.id.activity_main_coordinator); final VertigoView view1 = (VertigoView) findViewById(R.id.activity_main_view_1); final VertigoView view2 = (VertigoView) findViewById(R.id.activity_main_view_2); final VertigoView view3 = (VertigoView) findViewById(R.id.activity_main_view_3); view1.onStateChanged(VertigoView.State.INACTIVE); view2.onStateChanged(VertigoView.State.INACTIVE); view3.onStateChanged(VertigoView.State.ACTIVE); coordinator.registerViewForCoordination(view1, VIEW_1_KEY); coordinator.registerViewForCoordination(view2, VIEW_2_KEY); coordinator.registerViewForCoordination(view3, VIEW_3_KEY); } public void showView1(final View clickedView) { coordinator.makeViewActive(VIEW_1_KEY, true, null); } public void showView2(final View clickedView) { coordinator.makeViewActive(VIEW_2_KEY, true, null); } public void showView3(final View clickedView) { coordinator.makeViewActive(VIEW_3_KEY, true, null); } }
Added Javadoc
example/src/main/java/com/matthewtamlin/vertigo/example/MainActivity.java
Added Javadoc
Java
apache-2.0
f10df5e199390b641acab1cb8d6cb8a3e4056981
0
jmt4/Selenium2,jmt4/Selenium2,jmt4/Selenium2,jmt4/Selenium2,jmt4/Selenium2,yumingjuan/selenium,yumingjuan/selenium,yumingjuan/selenium,yumingjuan/selenium,jmt4/Selenium2,jmt4/Selenium2,yumingjuan/selenium,jmt4/Selenium2,yumingjuan/selenium,jmt4/Selenium2,yumingjuan/selenium,yumingjuan/selenium,yumingjuan/selenium
/* * Copyright 2006 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.thoughtworks.selenium; import com.google.common.collect.Lists; import org.openqa.selenium.net.Urls; import java.io.BufferedWriter; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.Reader; import java.io.Writer; import java.net.HttpURLConnection; import java.net.URL; import java.net.ConnectException; import java.text.NumberFormat; import java.text.ParseException; import java.util.Arrays; import java.util.List; /** * Sends commands and retrieves results via HTTP. * @author Ben Griffiths, Jez Humble */ public class HttpCommandProcessor implements CommandProcessor { private String pathToServlet; private String browserStartCommand; private String browserURL; private String sessionId; private String extensionJs; private String rcServerLocation; /** Specifies a server host/port, a command to launch the browser, and a starting URL for the browser. * * @param serverHost - the host name on which the Selenium Server resides * @param serverPort - the port on which the Selenium Server is listening * @param browserStartCommand - the command string used to launch the browser, e.g. "*firefox" or "c:\\program files\\internet explorer\\iexplore.exe" * @param browserURL - the starting URL including just a domain name. We'll start the browser pointing at the Selenium resources on this URL, * @param extensionJs - extension Javascript for this session * e.g. "http://www.google.com" would send the browser to "http://www.google.com/selenium-server/core/RemoteRunner.html" */ public HttpCommandProcessor(String serverHost, int serverPort, String browserStartCommand, String browserURL) { rcServerLocation = serverHost + ":"+ Integer.toString(serverPort); this.pathToServlet = "http://" + rcServerLocation + "/selenium-server/driver/"; this.browserStartCommand = browserStartCommand; this.browserURL = browserURL; this.extensionJs = ""; } /** Specifies the URL to the CommandBridge servlet, a command to launch the browser, and a starting URL for the browser. * * @param pathToServlet - the URL of the Selenium Server Driver, e.g. "http://localhost:4444/selenium-server/driver/" (don't forget the final slash!) * @param browserStartCommand - the command string used to launch the browser, e.g. "*firefox" or "c:\\program files\\internet explorer\\iexplore.exe" * @param browserURL - the starting URL including just a domain name. We'll start the browser pointing at the Selenium resources on this URL, * @param extensionJs - extension Javascript for this session */ public HttpCommandProcessor(String pathToServlet, String browserStartCommand, String browserURL) { this.pathToServlet = pathToServlet; this.browserStartCommand = browserStartCommand; this.browserURL = browserURL; this.extensionJs = ""; } public String getRemoteControlServerLocation() { return rcServerLocation; } public String doCommand(String commandName, String[] args) { DefaultRemoteCommand command = new DefaultRemoteCommand(commandName,args); String result = executeCommandOnServlet(command.getCommandURLString()); if (result == null) { throw new NullPointerException("Selenium Bug! result must not be null"); } if (!result.startsWith("OK")) { return throwAssertionFailureExceptionOrError(result); } return result; } protected String throwAssertionFailureExceptionOrError(String message) { throw new SeleniumException(message); } /** Sends the specified command string to the bridge servlet */ public String executeCommandOnServlet(String command) { InputStream is = null; try { return getCommandResponseAsString(command); } catch (IOException e) { if (e instanceof ConnectException) { throw new SeleniumException(e.getMessage(),e); } e.printStackTrace(); throw new UnsupportedOperationException("Catch body broken: IOException from " + command + " -> " + e, e); } } private String stringContentsOfInputStream(Reader rdr) throws IOException { StringBuffer sb = new StringBuffer(); int c; try { while ((c = rdr.read()) != -1) { sb.append((char) c); } return sb.toString(); } finally { rdr.close(); } } // for testing protected HttpURLConnection getHttpUrlConnection(URL urlForServlet) throws IOException { return (HttpURLConnection) urlForServlet.openConnection(); } // for testing protected Writer getOutputStreamWriter(HttpURLConnection conn) throws IOException { return new BufferedWriter(new OutputStreamWriter(conn.getOutputStream())); } // for testing protected Reader getInputStreamReader(HttpURLConnection conn) throws IOException { return new InputStreamReader(conn.getInputStream(), "UTF-8"); } // for testing protected int getResponseCode(HttpURLConnection conn) throws IOException { return conn.getResponseCode(); } protected String getCommandResponseAsString(String command) throws IOException { String responseString = null; int responsecode = HttpURLConnection.HTTP_MOVED_PERM; HttpURLConnection uc = null; Writer wr = null; Reader rdr = null; while (responsecode == HttpURLConnection.HTTP_MOVED_PERM) { URL result = new URL(pathToServlet); String body = buildCommandBody(command); try { uc = getHttpUrlConnection(result); uc.setRequestProperty("Content-Type", "application/x-www-form-urlencoded; charset=utf-8"); uc.setInstanceFollowRedirects(false); uc.setDoOutput(true); wr = getOutputStreamWriter(uc);; wr.write(body); wr.flush(); responsecode = getResponseCode(uc); if (responsecode == HttpURLConnection.HTTP_MOVED_PERM) { pathToServlet = uc.getRequestProperty("Location"); } else if (responsecode != HttpURLConnection.HTTP_OK) { throwAssertionFailureExceptionOrError(uc.getResponseMessage()); } else { rdr = getInputStreamReader(uc); responseString = stringContentsOfInputStream(rdr); } } finally { closeResources(uc, wr, rdr); } } return responseString; } protected void closeResources(HttpURLConnection conn, Writer wr, Reader rdr) { try { if (null != wr) { wr.close(); } } catch (IOException ioe) { // ignore } try { if (null != rdr) { rdr.close(); } } catch (IOException ioe) { // ignore } if (null != conn) { conn.disconnect(); } } private String buildCommandBody(String command) { StringBuffer sb = new StringBuffer(); sb.append(command); if (sessionId != null) { sb.append("&sessionId="); sb.append(Urls.urlEncode(sessionId)); } return sb.toString(); } /** * This should be invoked before start(). * * @param extensionJs the extra extension Javascript to include in this * browser session. */ public void setExtensionJs(String extensionJs) { this.extensionJs = extensionJs; } public void start() { String result = getString("getNewBrowserSession", new String[]{browserStartCommand, browserURL, extensionJs}); setSessionInProgress(result); } public void start(String optionsString) { String result = getString("getNewBrowserSession", new String[]{browserStartCommand, browserURL, extensionJs, optionsString}); setSessionInProgress(result); } /** * Wraps the version of start() that takes a String parameter, sending it * the result of calling toString() on optionsObject, which will likely be * a BrowserConfigurationOptions instance. * * @param optionsObject */ public void start(Object optionsObject) { start(optionsObject.toString()); } protected void setSessionInProgress(String result) { sessionId = result; } public void stop() { if (hasSessionInProgress()) { doCommand("testComplete", null); } setSessionInProgress(null); } public boolean hasSessionInProgress() { return null != sessionId; } public String getString(String commandName, String[] args) { String result = doCommand(commandName, args); if (result.length() >= "OK,".length()) { return result.substring("OK,".length()); } System.err.println("WARNING: getString(" + commandName + ") saw a bad result " + result); return ""; } public String[] getStringArray(String commandName, String[] args) { String result = getString(commandName, args); return parseCSV(result); } /** Convert backslash-escaped comma-delimited string into String array. As described in SRC-CDP * spec section 5.2.1.2, these strings are comma-delimited, but commas * can be escaped with a backslash "\". Backslashes can also be escaped * as a double-backslash. * @param input the unparsed string, e.g. "veni\, vidi\, vici,c:\\foo\\bar,c:\\I came\, I \\saw\\\, I conquered" * @return the string array resulting from parsing this string */ public static String[] parseCSV(String input) { List<String> output = Lists.newArrayList(); StringBuffer sb = new StringBuffer(); for(int i = 0; i < input.length(); i++) { char c = input.charAt(i); switch (c) { case ',': output.add(sb.toString()); sb = new StringBuffer(); continue; case '\\': i++; c = input.charAt(i); // fall through to: default: sb.append(c); } } output.add(sb.toString()); return output.toArray(new String[output.size()]); } public Number getNumber(String commandName, String[] args) { String result = getString(commandName, args); Number n; try { n = NumberFormat.getInstance().parse(result); } catch (ParseException e) { throw new RuntimeException(e); } if (n instanceof Long) { // SRC-315 we should return Integers if possible if (n.intValue() == n.longValue()) { return new Integer(n.intValue()); } } return n; } public Number[] getNumberArray(String commandName, String[] args) { String[] result = getStringArray(commandName, args); Number[] n = new Number[result.length]; for (int i = 0; i < result.length; i++) { try { n[i] = NumberFormat.getInstance().parse(result[i]); } catch (ParseException e) { throw new RuntimeException(e); } } return n; } public boolean getBoolean(String commandName, String[] args) { String result = getString(commandName, args); boolean b; if ("true".equals(result)) { b = true; return b; } if ("false".equals(result)) { b = false; return b; } throw new RuntimeException("result was neither 'true' nor 'false': " + result); } public boolean[] getBooleanArray(String commandName, String[] args) { String[] result = getStringArray(commandName, args); boolean[] b = new boolean[result.length]; for (int i = 0; i < result.length; i++) { if ("true".equals(result[i])) { b[i] = true; continue; } if ("false".equals(result[i])) { b[i] = false; continue; } throw new RuntimeException("result was neither 'true' nor 'false': " + Arrays.toString(result)); } return b; } }
java/client/src/com/thoughtworks/selenium/HttpCommandProcessor.java
/* * Copyright 2006 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.thoughtworks.selenium; import org.openqa.selenium.net.Urls; import org.testng.collections.Lists; import java.io.BufferedWriter; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.Reader; import java.io.Writer; import java.net.HttpURLConnection; import java.net.URL; import java.net.ConnectException; import java.text.NumberFormat; import java.text.ParseException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; /** * Sends commands and retrieves results via HTTP. * @author Ben Griffiths, Jez Humble */ public class HttpCommandProcessor implements CommandProcessor { private String pathToServlet; private String browserStartCommand; private String browserURL; private String sessionId; private String extensionJs; private String rcServerLocation; /** Specifies a server host/port, a command to launch the browser, and a starting URL for the browser. * * @param serverHost - the host name on which the Selenium Server resides * @param serverPort - the port on which the Selenium Server is listening * @param browserStartCommand - the command string used to launch the browser, e.g. "*firefox" or "c:\\program files\\internet explorer\\iexplore.exe" * @param browserURL - the starting URL including just a domain name. We'll start the browser pointing at the Selenium resources on this URL, * @param extensionJs - extension Javascript for this session * e.g. "http://www.google.com" would send the browser to "http://www.google.com/selenium-server/core/RemoteRunner.html" */ public HttpCommandProcessor(String serverHost, int serverPort, String browserStartCommand, String browserURL) { rcServerLocation = serverHost + ":"+ Integer.toString(serverPort); this.pathToServlet = "http://" + rcServerLocation + "/selenium-server/driver/"; this.browserStartCommand = browserStartCommand; this.browserURL = browserURL; this.extensionJs = ""; } /** Specifies the URL to the CommandBridge servlet, a command to launch the browser, and a starting URL for the browser. * * @param pathToServlet - the URL of the Selenium Server Driver, e.g. "http://localhost:4444/selenium-server/driver/" (don't forget the final slash!) * @param browserStartCommand - the command string used to launch the browser, e.g. "*firefox" or "c:\\program files\\internet explorer\\iexplore.exe" * @param browserURL - the starting URL including just a domain name. We'll start the browser pointing at the Selenium resources on this URL, * @param extensionJs - extension Javascript for this session */ public HttpCommandProcessor(String pathToServlet, String browserStartCommand, String browserURL) { this.pathToServlet = pathToServlet; this.browserStartCommand = browserStartCommand; this.browserURL = browserURL; this.extensionJs = ""; } public String getRemoteControlServerLocation() { return rcServerLocation; } public String doCommand(String commandName, String[] args) { DefaultRemoteCommand command = new DefaultRemoteCommand(commandName,args); String result = executeCommandOnServlet(command.getCommandURLString()); if (result == null) { throw new NullPointerException("Selenium Bug! result must not be null"); } if (!result.startsWith("OK")) { return throwAssertionFailureExceptionOrError(result); } return result; } protected String throwAssertionFailureExceptionOrError(String message) { throw new SeleniumException(message); } /** Sends the specified command string to the bridge servlet */ public String executeCommandOnServlet(String command) { InputStream is = null; try { return getCommandResponseAsString(command); } catch (IOException e) { if (e instanceof ConnectException) { throw new SeleniumException(e.getMessage(),e); } e.printStackTrace(); throw new UnsupportedOperationException("Catch body broken: IOException from " + command + " -> " + e, e); } } private String stringContentsOfInputStream(Reader rdr) throws IOException { StringBuffer sb = new StringBuffer(); int c; try { while ((c = rdr.read()) != -1) { sb.append((char) c); } return sb.toString(); } finally { rdr.close(); } } // for testing protected HttpURLConnection getHttpUrlConnection(URL urlForServlet) throws IOException { return (HttpURLConnection) urlForServlet.openConnection(); } // for testing protected Writer getOutputStreamWriter(HttpURLConnection conn) throws IOException { return new BufferedWriter(new OutputStreamWriter(conn.getOutputStream())); } // for testing protected Reader getInputStreamReader(HttpURLConnection conn) throws IOException { return new InputStreamReader(conn.getInputStream(), "UTF-8"); } // for testing protected int getResponseCode(HttpURLConnection conn) throws IOException { return conn.getResponseCode(); } protected String getCommandResponseAsString(String command) throws IOException { String responseString = null; int responsecode = HttpURLConnection.HTTP_MOVED_PERM; HttpURLConnection uc = null; Writer wr = null; Reader rdr = null; while (responsecode == HttpURLConnection.HTTP_MOVED_PERM) { URL result = new URL(pathToServlet); String body = buildCommandBody(command); try { uc = getHttpUrlConnection(result); uc.setRequestProperty("Content-Type", "application/x-www-form-urlencoded; charset=utf-8"); uc.setInstanceFollowRedirects(false); uc.setDoOutput(true); wr = getOutputStreamWriter(uc);; wr.write(body); wr.flush(); responsecode = getResponseCode(uc); if (responsecode == HttpURLConnection.HTTP_MOVED_PERM) { pathToServlet = uc.getRequestProperty("Location"); } else if (responsecode != HttpURLConnection.HTTP_OK) { throwAssertionFailureExceptionOrError(uc.getResponseMessage()); } else { rdr = getInputStreamReader(uc); responseString = stringContentsOfInputStream(rdr); } } finally { closeResources(uc, wr, rdr); } } return responseString; } protected void closeResources(HttpURLConnection conn, Writer wr, Reader rdr) { try { if (null != wr) { wr.close(); } } catch (IOException ioe) { // ignore } try { if (null != rdr) { rdr.close(); } } catch (IOException ioe) { // ignore } if (null != conn) { conn.disconnect(); } } private String buildCommandBody(String command) { StringBuffer sb = new StringBuffer(); sb.append(command); if (sessionId != null) { sb.append("&sessionId="); sb.append(Urls.urlEncode(sessionId)); } return sb.toString(); } /** * This should be invoked before start(). * * @param extensionJs the extra extension Javascript to include in this * browser session. */ public void setExtensionJs(String extensionJs) { this.extensionJs = extensionJs; } public void start() { String result = getString("getNewBrowserSession", new String[]{browserStartCommand, browserURL, extensionJs}); setSessionInProgress(result); } public void start(String optionsString) { String result = getString("getNewBrowserSession", new String[]{browserStartCommand, browserURL, extensionJs, optionsString}); setSessionInProgress(result); } /** * Wraps the version of start() that takes a String parameter, sending it * the result of calling toString() on optionsObject, which will likely be * a BrowserConfigurationOptions instance. * * @param optionsObject */ public void start(Object optionsObject) { start(optionsObject.toString()); } protected void setSessionInProgress(String result) { sessionId = result; } public void stop() { if (hasSessionInProgress()) { doCommand("testComplete", null); } setSessionInProgress(null); } public boolean hasSessionInProgress() { return null != sessionId; } public String getString(String commandName, String[] args) { String result = doCommand(commandName, args); if (result.length() >= "OK,".length()) { return result.substring("OK,".length()); } System.err.println("WARNING: getString(" + commandName + ") saw a bad result " + result); return ""; } public String[] getStringArray(String commandName, String[] args) { String result = getString(commandName, args); return parseCSV(result); } /** Convert backslash-escaped comma-delimited string into String array. As described in SRC-CDP * spec section 5.2.1.2, these strings are comma-delimited, but commas * can be escaped with a backslash "\". Backslashes can also be escaped * as a double-backslash. * @param input the unparsed string, e.g. "veni\, vidi\, vici,c:\\foo\\bar,c:\\I came\, I \\saw\\\, I conquered" * @return the string array resulting from parsing this string */ public static String[] parseCSV(String input) { List<String> output = Lists.newArrayList(); StringBuffer sb = new StringBuffer(); for(int i = 0; i < input.length(); i++) { char c = input.charAt(i); switch (c) { case ',': output.add(sb.toString()); sb = new StringBuffer(); continue; case '\\': i++; c = input.charAt(i); // fall through to: default: sb.append(c); } } output.add(sb.toString()); return output.toArray(new String[output.size()]); } public Number getNumber(String commandName, String[] args) { String result = getString(commandName, args); Number n; try { n = NumberFormat.getInstance().parse(result); } catch (ParseException e) { throw new RuntimeException(e); } if (n instanceof Long) { // SRC-315 we should return Integers if possible if (n.intValue() == n.longValue()) { return new Integer(n.intValue()); } } return n; } public Number[] getNumberArray(String commandName, String[] args) { String[] result = getStringArray(commandName, args); Number[] n = new Number[result.length]; for (int i = 0; i < result.length; i++) { try { n[i] = NumberFormat.getInstance().parse(result[i]); } catch (ParseException e) { throw new RuntimeException(e); } } return n; } public boolean getBoolean(String commandName, String[] args) { String result = getString(commandName, args); boolean b; if ("true".equals(result)) { b = true; return b; } if ("false".equals(result)) { b = false; return b; } throw new RuntimeException("result was neither 'true' nor 'false': " + result); } public boolean[] getBooleanArray(String commandName, String[] args) { String[] result = getStringArray(commandName, args); boolean[] b = new boolean[result.length]; for (int i = 0; i < result.length; i++) { if ("true".equals(result[i])) { b[i] = true; continue; } if ("false".equals(result[i])) { b[i] = false; continue; } throw new RuntimeException("result was neither 'true' nor 'false': " + Arrays.toString(result)); } return b; } }
SimonStewart: replacing a testng import with one from guava-libraries git-svn-id: aa1aa1384423cb28c2b1e29129bb3a91de1d9196@11659 07704840-8298-11de-bf8c-fd130f914ac9
java/client/src/com/thoughtworks/selenium/HttpCommandProcessor.java
SimonStewart: replacing a testng import with one from guava-libraries
Java
bsd-3-clause
7c3a03ad89eef190fb4487c210ce9803a2ddb3a3
0
NCIP/cagrid,NCIP/cagrid,NCIP/cagrid,NCIP/cagrid
package gov.nih.nci.cagrid.introduce.portal.modification.services.methods; import gov.nih.nci.cagrid.common.portal.ErrorDialog; import gov.nih.nci.cagrid.common.portal.PortalLookAndFeel; import gov.nih.nci.cagrid.common.portal.PortalUtils; import gov.nih.nci.cagrid.introduce.IntroduceConstants; import gov.nih.nci.cagrid.introduce.beans.method.MethodType; import gov.nih.nci.cagrid.introduce.beans.method.MethodTypeExceptions; import gov.nih.nci.cagrid.introduce.beans.method.MethodTypeExceptionsException; import gov.nih.nci.cagrid.introduce.beans.method.MethodTypeImportInformation; import gov.nih.nci.cagrid.introduce.beans.method.MethodTypeInputs; import gov.nih.nci.cagrid.introduce.beans.method.MethodTypeInputsInput; import gov.nih.nci.cagrid.introduce.beans.method.MethodTypeOutput; import gov.nih.nci.cagrid.introduce.beans.method.MethodTypeProviderInformation; import gov.nih.nci.cagrid.introduce.beans.method.MethodsType; import gov.nih.nci.cagrid.introduce.beans.namespace.NamespaceType; import gov.nih.nci.cagrid.introduce.beans.namespace.SchemaElementType; import gov.nih.nci.cagrid.introduce.beans.service.ServiceType; import gov.nih.nci.cagrid.introduce.common.CommonTools; import gov.nih.nci.cagrid.introduce.info.SpecificServiceInformation; import gov.nih.nci.cagrid.introduce.portal.common.IntroduceLookAndFeel; import gov.nih.nci.cagrid.introduce.portal.modification.security.MethodSecurityPanel; import gov.nih.nci.cagrid.introduce.portal.modification.types.NamespaceTypeTreeNode; import gov.nih.nci.cagrid.introduce.portal.modification.types.NamespacesJTree; import gov.nih.nci.cagrid.introduce.portal.modification.types.SchemaElementTypeTreeNode; import java.awt.FlowLayout; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Insets; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; import java.io.File; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.SortedSet; import java.util.TreeSet; import javax.swing.BorderFactory; import javax.swing.JButton; import javax.swing.JCheckBox; import javax.swing.JComboBox; import javax.swing.JLabel; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JSplitPane; import javax.swing.JTabbedPane; import javax.swing.JTextField; import javax.swing.ScrollPaneConstants; import javax.swing.border.TitledBorder; import javax.xml.namespace.QName; import org.apache.axis.utils.JavaUtils; import org.projectmobius.portal.GridPortalBaseFrame; /** * MethodViewer * * @author <A HREF="MAILTO:[email protected]">Shannon Hastings </A> * @author <A HREF="MAILTO:[email protected]">Scott Oster </A> * @author <A HREF="MAILTO:[email protected]">Stephen Langella </A> */ public class MethodViewer extends GridPortalBaseFrame { public class ExceptionHolder implements Comparable { boolean isCreated; QName qname; public ExceptionHolder(QName qname, boolean isCreated) { this.qname = qname; this.isCreated = isCreated; } public int compareTo(Object arg0) { return this.qname.toString().compareTo(((ExceptionHolder) arg0).toString()); } public String toString() { return qname.toString(); } } private MethodType method; private JPanel mainPanel = null; private JScrollPane inputParamScrollPanel = null; private InputParametersTable inputParamTable = null; private JScrollPane outputTypejScrollPane = null; private OutputTypeTable outputTypeTable = null; private JPanel buttonPanel = null; private JButton doneButton = null; private JButton addInputParamButton = null; private JPanel namePanel = null; private JTextField nameField = null; private JButton removeButton = null; private JLabel methodLabel = null; private JPanel inputButtonPanel = null; private JButton cancelButton = null; private JPanel exceptionsPanel = null; private JScrollPane exceptionScrollPane = null; private JPanel exceptionInputPanel = null; private ExceptionsTable exceptionsTable = null; private JButton addExceptionButton = null; private JButton removeExceptionButton = null; private JTabbedPane tabbedPanel = null; private JPanel methodPanel = null; private JPanel securityContainerPanel = null; private SpecificServiceInformation info; private JTabbedPane configureTabbedPane = null; private JComboBox exceptionJComboBox = null; private JPanel inputNamespacesPanel = null; private JScrollPane inputNamespaceScrollPane = null; private NamespacesJTree inputNamespaceTypesJTree = null; private JPanel methodPropertiesPanel = null; private JPanel outputNamespacePanel = null; private JScrollPane outputNamespacesTypeScrollPane = null; private NamespacesJTree outputNamespacesJTree = null; private JPanel outputTypesTablePanel = null; private JPanel inputTypesTablePanel = null; private JPanel inputTableControlsPanel = null; private JLabel upLabel = null; private JLabel downLabel = null; private JButton clearOutputTypeButton = null; private JPanel exceptionsInputButtonPanel = null; private JPanel importInformationPanel = null; private JLabel serviceName = null; private JLabel wsdlFileLabel = null; private JLabel namespaceLabel = null; private JTextField namespaceTextField = null; private JTextField serviceNameTextField = null; private JTextField wsdlFileTextField = null; private JCheckBox isImportedCheckBox = null; private JLabel packageNameLabel = null; private JTextField packageNameTextField = null; private JCheckBox isProvidedCheckBox = null; private JScrollPane servicesTypeScrollPane = null; private ServicesTable servicesTypeTable = null; private JTextField providerClassnameTextField = null; private JPanel providerInformationPanel = null; private JLabel providerClassnameLabel = null; private JSplitPane inputParamsSplitPane = null; private JSplitPane outputTypeSplitPane = null; private JPanel messagePanel = null; private JLabel inputLabel = null; private JLabel outputLabel = null; private JTextField inputMessageNamespaceTextField = null; private JTextField outputMessageNamespaceTextField = null; private JCheckBox messagesCheckBox = null; private JTextField inputMessageNameTextField = null; private JTextField outputMessageNameTextField = null; private JPanel createFaultPanel = null; private JTextField newFaultNameTextField = null; private JButton createFaultButton = null; private JLabel faultTypeNameLabel = null; private JLabel existingExceptionLabel = null; private JSplitPane exceptionsPanelSplitPane = null; private JScrollPane exceptionNamespacesScrollPane = null; private NamespacesJTree namespacesJTree = null; private JPanel faultsFromTypesPanel = null; private JButton addFaultFromTypeButton = null; private JPanel removeFaultPanel = null; public MethodViewer(MethodType method, SpecificServiceInformation info) { this.info = info; this.method = method; this.setTitle("Modify Method"); initialize(); } private void initialize() { this.setContentPane(getMainPanel()); this.setTitle("Build/Modify Operation"); this.setSize(new java.awt.Dimension(683, 469)); this.setContentPane(getMainPanel()); this.setFrameIcon(IntroduceLookAndFeel.getModifyIcon()); } /** * This method initializes jPanel * * @return javax.swing.JPanel */ private JPanel getMainPanel() { if (mainPanel == null) { GridBagConstraints gridBagConstraints20 = new GridBagConstraints(); gridBagConstraints20.gridx = 0; gridBagConstraints20.weighty = 0.0D; gridBagConstraints20.weightx = 1.0D; gridBagConstraints20.fill = java.awt.GridBagConstraints.BOTH; gridBagConstraints20.gridy = 0; GridBagConstraints gridBagConstraints9 = new GridBagConstraints(); gridBagConstraints9.fill = java.awt.GridBagConstraints.BOTH; gridBagConstraints9.weighty = 1.0; gridBagConstraints9.gridx = 0; gridBagConstraints9.gridy = 1; gridBagConstraints9.weightx = 1.0; GridBagConstraints gridBagConstraints10 = new GridBagConstraints(); mainPanel = new JPanel(); mainPanel.setLayout(new GridBagLayout()); gridBagConstraints10.gridx = 0; gridBagConstraints10.gridy = 4; gridBagConstraints10.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints10.weightx = 0.0D; gridBagConstraints10.weighty = 0.0D; gridBagConstraints10.fill = java.awt.GridBagConstraints.BOTH; mainPanel.add(getButtonPanel(), gridBagConstraints10); mainPanel.add(getTabbedPanel(), gridBagConstraints9); mainPanel.add(getMethodPropertiesPanel(), gridBagConstraints20); } return mainPanel; } /** * This method initializes jScrollPane * * @return javax.swing.JScrollPane */ private JScrollPane getInputParamScrollPanel() { if (inputParamScrollPanel == null) { inputParamScrollPanel = new JScrollPane(); inputParamScrollPanel.setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_AS_NEEDED); inputParamScrollPanel.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER); inputParamScrollPanel.setViewportView(getInputParamTable()); } return inputParamScrollPanel; } /** * This method initializes jTable * * @return javax.swing.JTable */ private InputParametersTable getInputParamTable() { if (inputParamTable == null) { inputParamTable = new InputParametersTable(this.method); } return inputParamTable; } /** * This method initializes jScrollPane * * @return javax.swing.JScrollPane */ private JScrollPane getOutputTypejScrollPane() { if (outputTypejScrollPane == null) { outputTypejScrollPane = new JScrollPane(); outputTypejScrollPane.setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_AS_NEEDED); outputTypejScrollPane.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER); outputTypejScrollPane.setViewportView(getOutputTypeTable()); } return outputTypejScrollPane; } /** * This method initializes jTable * * @return javax.swing.JTable */ private OutputTypeTable getOutputTypeTable() { if (outputTypeTable == null) { outputTypeTable = new OutputTypeTable(this.method); } return outputTypeTable; } /** * This method initializes jPanel * * @return javax.swing.JPanel */ private JPanel getButtonPanel() { if (buttonPanel == null) { buttonPanel = new JPanel(); buttonPanel.setLayout(new FlowLayout()); buttonPanel.add(getDoneButton(), null); buttonPanel.add(getCancelButton(), null); } return buttonPanel; } /** * This method initializes jButton * * @return javax.swing.JButton */ public JButton getDoneButton() { if (doneButton == null) { doneButton = new JButton(IntroduceLookAndFeel.getDoneIcon()); doneButton.setText("Done"); doneButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent e) { // First process the inputs boolean valid = true; String message = ""; List usedNames = new ArrayList(); MethodsType methodsType = info.getService().getMethods(); if (methodsType != null) { MethodType methods[] = methodsType.getMethod(); if (methods != null) { for (int j = 0; j < methods.length; j++) { MethodType tmethod = methods[j]; if (!usedNames.contains(tmethod.getName())) { usedNames.add(tmethod.getName()); } else { valid = false; message = "Method name is not unique: " + tmethod.getName(); } } } } if (!method.getName().equals(getNameField().getText())) { if (usedNames.contains(getNameField().getText())) { valid = false; message = "Method name is not unique: " + getNameField().getText(); } } try { method.setName(getNameField().getText()); method.setMethodSecurity(((MethodSecurityPanel) securityContainerPanel).getMethodSecurity()); // process the inputs MethodTypeInputs inputs = new MethodTypeInputs(); MethodTypeInputsInput[] inputsA = new MethodTypeInputsInput[getInputParamTable().getRowCount()]; usedNames = new ArrayList(); for (int i = 0; i < getInputParamTable().getRowCount(); i++) { MethodTypeInputsInput input = getInputParamTable().getRowData(i); // validate the input param if (usedNames.contains(input.getName())) { valid = false; message = "Method " + method.getName() + " contains more that one parameter named " + input.getName(); } usedNames.add(input.getName()); if (!JavaUtils.isJavaId(input.getName())) { valid = false; message = "Parameter name must be a valid java identifier: Method: " + method.getName() + " param: " + input.getName(); } inputsA[i] = input; } inputs.setInput(inputsA); method.setInputs(inputs); // process exceptions MethodTypeExceptions exceptions = new MethodTypeExceptions(); MethodTypeExceptionsException[] exceptionsA = new MethodTypeExceptionsException[getExceptionsTable() .getRowCount()]; for (int i = 0; i < getExceptionsTable().getRowCount(); i++) { MethodTypeExceptionsException exception = getExceptionsTable().getRowData(i); exceptionsA[i] = exception; } exceptions.setException(exceptionsA); method.setExceptions(exceptions); // now process the output MethodTypeOutput output = getOutputTypeTable().getRowData(0); method.setOutput(output); if (getIsImportedCheckBox().isSelected() && getIsProvidedCheckBox().isSelected()) { method.setIsProvided(true); MethodTypeProviderInformation pi = new MethodTypeProviderInformation(); pi.setProviderClass(getProviderClassnameTextField().getText()); method.setProviderInformation(pi); } else { method.setIsProvided(false); } if (getIsImportedCheckBox().isSelected()) { // validate the import // make sure there are no collision problems with // namespaces or packages..... for (int i = 0; i < info.getNamespaces().getNamespace().length; i++) { NamespaceType nsType = info.getNamespaces().getNamespace(i); if (nsType.getNamespace().equals(getNamespaceTextField().getText()) && !nsType.getPackageName().equals(getPackageNameTextField().getText())) { valid = false; message = "Service Namespace is already being used and Package Name does not match : " + getPackageNameTextField().getText() + " != " + nsType.getPackageName(); } } // process the import information method.setIsImported(true); if (getIsImportedCheckBox().isSelected()) { MethodTypeImportInformation importInfo = new MethodTypeImportInformation(); importInfo.setNamespace(getNamespaceTextField().getText()); importInfo.setPortTypeName(getServiceNameTextField().getText()); importInfo.setPackageName(getPackageNameTextField().getText()); importInfo.setWsdlFile(getWsdlFileTextField().getText()); if (!getInputMessageNamespaceTextField().getText().equals("") && !getInputMessageNameTextField().getText().equals("")) { importInfo.setInputMessage(new QName(getInputMessageNamespaceTextField().getText(), getInputMessageNameTextField().getText())); } if (!getOutputMessageNamespaceTextField().getText().equals("") && !getOutputMessageNameTextField().getText().equals("")) { importInfo.setOutputMessage(new QName(getOutputMessageNamespaceTextField() .getText(), getOutputMessageNameTextField().getText())); } method.setImportInformation(importInfo); } } else { method.setIsImported(false); } } catch (Exception ex) { ex.printStackTrace(); // PortalUtils.showErrorMessage(ex); ErrorDialog.showErrorDialog(ex); } if (!valid) { JOptionPane.showMessageDialog(MethodViewer.this, message); } else { dispose(); } } }); } return doneButton; } /** * This method initializes jButton * * @return javax.swing.JButton */ private JButton getAddInputParamButton() { if (addInputParamButton == null) { addInputParamButton = new JButton(PortalLookAndFeel.getAddIcon()); addInputParamButton.setText("Add"); addInputParamButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent e) { if (getInputNamespaceTypesJTree().getCurrentNode() instanceof SchemaElementTypeTreeNode) { NamespaceType nt = ((NamespaceType) ((NamespaceTypeTreeNode) getInputNamespaceTypesJTree() .getCurrentNode().getParent()).getUserObject()); SchemaElementType st = ((SchemaElementType) ((SchemaElementTypeTreeNode) getInputNamespaceTypesJTree() .getCurrentNode()).getUserObject()); MethodTypeInputsInput input = new MethodTypeInputsInput(); input.setQName(new QName(nt.getNamespace(), st.getType())); input.setIsArray(false); input.setName(JavaUtils.xmlNameToJava(st.getType())); getInputParamTable().addRow(input); } else { JOptionPane.showMessageDialog(MethodViewer.this, "Please select a type to add"); } } }); } return addInputParamButton; } /** * This method initializes jPanel * * @return javax.swing.JPanel */ private JPanel getNamePanel() { if (namePanel == null) { GridBagConstraints gridBagConstraints110 = new GridBagConstraints(); gridBagConstraints110.gridx = 2; gridBagConstraints110.insets = new java.awt.Insets(0, 30, 0, 0); gridBagConstraints110.gridy = 0; methodLabel = new JLabel(); methodLabel.setText("Method Name"); GridBagConstraints gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints.gridheight = 2; gridBagConstraints.gridy = 0; GridBagConstraints gridBagConstraints2 = new GridBagConstraints(); namePanel = new JPanel(); namePanel.setLayout(new GridBagLayout()); namePanel.setBorder(javax.swing.BorderFactory.createTitledBorder(null, "Method Properties", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, null, PortalLookAndFeel.getPanelLabelColor())); gridBagConstraints2.gridx = 1; gridBagConstraints2.gridheight = 2; gridBagConstraints2.gridwidth = 1; gridBagConstraints2.anchor = java.awt.GridBagConstraints.CENTER; gridBagConstraints2.gridy = 0; gridBagConstraints2.weightx = 1.0D; gridBagConstraints2.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints2.insets = new java.awt.Insets(2, 2, 2, 2); namePanel.add(getNameField(), gridBagConstraints2); namePanel.add(methodLabel, gridBagConstraints); namePanel.add(getIsImportedCheckBox(), gridBagConstraints110); } return namePanel; } /** * This method initializes jTextField * * @return javax.swing.JTextField */ private JTextField getNameField() { if (nameField == null) { nameField = new JTextField(); nameField.setText(method.getName()); // nameField.setText(methodsTable.getSelectedMethodType().getName()); } return nameField; } /** * This method initializes jButton * * @return javax.swing.JButton */ private JButton getRemoveButton() { if (removeButton == null) { removeButton = new JButton(PortalLookAndFeel.getRemoveIcon()); removeButton.setText("Remove"); removeButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent e) { try { getInputParamTable().removeSelectedRow(); } catch (Exception ex) { PortalUtils.showErrorMessage("Please select an input parameter to Remove"); } } }); } return removeButton; } /** * This method initializes inputButtonPanel * * @return javax.swing.JPanel */ private JPanel getInputButtonPanel() { if (inputButtonPanel == null) { inputButtonPanel = new JPanel(); inputButtonPanel.add(getAddInputParamButton(), null); inputButtonPanel.add(getRemoveButton(), null); } return inputButtonPanel; } /** * This method initializes cancelButton * * @return javax.swing.JButton */ private JButton getCancelButton() { if (cancelButton == null) { cancelButton = new JButton(); cancelButton.setText("Cancel"); cancelButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent e) { dispose(); } }); cancelButton.setIcon(PortalLookAndFeel.getCloseIcon()); } return cancelButton; } /** * This method initializes exceptionsPanel * * @return jav gridBagConstraints50.gridy = 2; * exceptionsPanel.add(getExceptionInputPanel(), * gridBagConstraints50); gridBagConstraints49.fill = * java.awt.GridBagConstraints.BOTH; * exceptionsPanel.add(getCreateFaultPanel(), gridBagConstraints49); * gridBagConstraints49.gridy = 3; * exceptionsPanel.add(getFaultsFromTypesPanel(), * gridBagConstraints52); gridBagConstraints49.gridy = 2; * gridBagConstraints50.gridy = 3; gridBagConstraints50.fill = * java.awt.GridBagConstraints.BOTH; * exceptionsPanel.add(getCreateFaultPanel(), gridBagConstraints50); * gridBagConstraints49.fill = java.awt.GridBagConstraints.BOTH; * exceptionsPanel.add(getExceptionInputPanel(), * gridBagConstraints49); gridBagConstraints52.fill = * java.awt.GridBagConstraints.BOTH; * exceptionsPanel.add(getFaultsFromTypesPanel(), * gridBagConstraints52); * exceptionsPanel.add(getExceptionInputPanel(), * gridBagConstraints12); exceptionsPanel.add(getCreateFaultPanel(), * gridBagConstraints46); ax.swing.JPanel */ private JPanel getExceptionsPanel() { if (exceptionsPanel == null) { GridBagConstraints gridBagConstraints49 = new GridBagConstraints(); gridBagConstraints49.gridx = 1; gridBagConstraints49.gridheight = 3; gridBagConstraints49.gridy = 1; GridBagConstraints gridBagConstraints46 = new GridBagConstraints(); gridBagConstraints46.gridx = 0; gridBagConstraints46.fill = java.awt.GridBagConstraints.BOTH; gridBagConstraints46.weightx = 1.0D; gridBagConstraints46.gridy = 1; GridBagConstraints gridBagConstraints11 = new GridBagConstraints(); gridBagConstraints11.fill = GridBagConstraints.BOTH; gridBagConstraints11.gridy = 2; gridBagConstraints11.weightx = 1.0D; gridBagConstraints11.gridx = 0; GridBagConstraints gridBagConstraints12 = new GridBagConstraints(); gridBagConstraints12.fill = java.awt.GridBagConstraints.BOTH; gridBagConstraints12.gridy = 3; gridBagConstraints12.weightx = 1.0D; gridBagConstraints12.gridx = 0; GridBagConstraints gridBagConstraints3 = new GridBagConstraints(); gridBagConstraints3.fill = java.awt.GridBagConstraints.BOTH; gridBagConstraints3.weighty = 1.0; gridBagConstraints3.gridx = 0; gridBagConstraints3.gridy = 0; gridBagConstraints3.gridwidth = 2; gridBagConstraints3.weightx = 1.0; exceptionsPanel = new JPanel(); exceptionsPanel.setLayout(new GridBagLayout()); exceptionsPanel.setBorder(javax.swing.BorderFactory.createTitledBorder(null, "Faults", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, null, PortalLookAndFeel.getPanelLabelColor())); exceptionsPanel.add(getExceptionsPanelSplitPane(), gridBagConstraints3); exceptionsPanel.add(getCreateFaultPanel(), gridBagConstraints12); exceptionsPanel.add(getExceptionInputPanel(), gridBagConstraints11); exceptionsPanel.add(getFaultsFromTypesPanel(), gridBagConstraints46); exceptionsPanel.add(getRemoveFaultPanel(), gridBagConstraints49); } return exceptionsPanel; } /** * This method initializes jScrollPane * * @return javax.swing.JScrollPane */ private JScrollPane getExceptionScrollPane() { if (exceptionScrollPane == null) { exceptionScrollPane = new JScrollPane(); exceptionScrollPane.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER); exceptionScrollPane.setViewportView(getExceptionsTable()); } return exceptionScrollPane; } /** * This method initializes exceptionInputPanel * * @return javax.swing.JPanel */ private JPanel getExceptionInputPanel() { if (exceptionInputPanel == null) { GridBagConstraints gridBagConstraints4 = new GridBagConstraints(); gridBagConstraints4.insets = new Insets(2, 2, 2, 2); gridBagConstraints4.gridy = 0; gridBagConstraints4.gridx = 2; GridBagConstraints gridBagConstraints51 = new GridBagConstraints(); gridBagConstraints51.gridx = 0; gridBagConstraints51.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints51.insets = new java.awt.Insets(2, 2, 2, 10); gridBagConstraints51.gridy = 0; existingExceptionLabel = new JLabel(); existingExceptionLabel.setText("Used Faults:"); GridBagConstraints gridBagConstraints27 = new GridBagConstraints(); gridBagConstraints27.gridx = 2; gridBagConstraints27.fill = java.awt.GridBagConstraints.BOTH; gridBagConstraints27.gridheight = 2; gridBagConstraints27.gridy = 1; GridBagConstraints gridBagConstraints15 = new GridBagConstraints(); gridBagConstraints15.fill = GridBagConstraints.HORIZONTAL; gridBagConstraints15.gridx = 1; gridBagConstraints15.gridy = 0; gridBagConstraints15.weightx = 1.0; gridBagConstraints15.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints15.gridheight = 1; exceptionInputPanel = new JPanel(); exceptionInputPanel.setLayout(new GridBagLayout()); exceptionInputPanel.setBorder(javax.swing.BorderFactory.createTitledBorder(null, "Choose Used Fault", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, null, IntroduceLookAndFeel.getPanelLabelColor())); exceptionInputPanel.add(getExceptionJComboBox(), gridBagConstraints15); exceptionInputPanel.add(getExceptionsInputButtonPanel(), gridBagConstraints27); exceptionInputPanel.add(existingExceptionLabel, gridBagConstraints51); exceptionInputPanel.add(getAddExceptionButton(), gridBagConstraints4); } return exceptionInputPanel; } /** * This method initializes faultsTable * * @return javax.swing.JTable */ private ExceptionsTable getExceptionsTable() { if (exceptionsTable == null) { exceptionsTable = new ExceptionsTable(this.method, this.info.getService()); } return exceptionsTable; } /** * This method initializes addExceptionButton * * @return javax.swing.JButton */ private JButton getAddExceptionButton() { if (addExceptionButton == null) { addExceptionButton = new JButton(PortalLookAndFeel.getAddIcon()); addExceptionButton.setText("Add Used Fault"); addExceptionButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent e) { ExceptionHolder exceptionHolder = null; if (getExceptionJComboBox().getSelectedItem() != null) { exceptionHolder = (ExceptionHolder) getExceptionJComboBox().getSelectedItem(); } if (exceptionHolder != null) { // parse qname string into qname for (int i = 0; i < getExceptionsTable().getRowCount(); i++) { MethodTypeExceptionsException exception = null; try { exception = getExceptionsTable().getRowData(i); } catch (Exception e1) { e1.printStackTrace(); } if (exception != null && exception.getQname() != null && exception.getQname().equals(exceptionHolder.qname)) { JOptionPane.showMessageDialog(MethodViewer.this, "Exception (" + exceptionHolder + ") already thrown by method."); return; } } getExceptionsTable().addRow(exceptionHolder.qname, exceptionHolder.isCreated); } else { JOptionPane.showMessageDialog(MethodViewer.this, "Please select an exception first!"); } } }); } return addExceptionButton; } /** * This method initializes removeExceptionButton * * @return javax.swing.JButton */ private JButton getRemoveExceptionButton() { if (removeExceptionButton == null) { removeExceptionButton = new JButton(PortalLookAndFeel.getRemoveIcon()); removeExceptionButton.setText("Remove"); removeExceptionButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent e) { try { getExceptionsTable().removeSelectedRow(); } catch (Exception ex) { PortalUtils.showErrorMessage("Please select an exception to Remove"); } } }); } return removeExceptionButton; } /** * This method initializes tabbedPanel * * @return javax.swing.JTabbedPane */ private JTabbedPane getTabbedPanel() { if (tabbedPanel == null) { tabbedPanel = new JTabbedPane(); tabbedPanel.addTab("Method Signature", null, getMethodPanel(), null); tabbedPanel.addTab("Security", null, getSecurityContainerPanel(), null); tabbedPanel.addTab("Import Information", null, getImportInformationPanel(), null); } return tabbedPanel; } /** * This method initializes methodPanel * * @return javax.swing.JPanel */ private JPanel getMethodPanel() { if (methodPanel == null) { GridBagConstraints gridBagConstraints1 = new GridBagConstraints(); gridBagConstraints1.fill = GridBagConstraints.BOTH; gridBagConstraints1.gridx = -1; gridBagConstraints1.gridy = -1; gridBagConstraints1.weightx = 1.0; gridBagConstraints1.weighty = 1.0; gridBagConstraints1.insets = new Insets(2, 2, 2, 2); methodPanel = new JPanel(); methodPanel.setLayout(new GridBagLayout()); methodPanel.add(getConfigureTabbedPane(), gridBagConstraints1); } return methodPanel; } /** * This method initializes securityContainerPanel * * @return javax.swing.JPanel */ private JPanel getSecurityContainerPanel() { if (securityContainerPanel == null) { securityContainerPanel = new MethodSecurityPanel(info.getService().getServiceSecurity(), this.method .getMethodSecurity()); securityContainerPanel.setBorder(BorderFactory.createTitledBorder(null, "Method Level Security Configuration", TitledBorder.DEFAULT_JUSTIFICATION, TitledBorder.DEFAULT_POSITION, null, PortalLookAndFeel.getPanelLabelColor())); } return securityContainerPanel; } /** * This method initializes configureTabbedPane * * @return javax.swing.JTabbedPane */ private JTabbedPane getConfigureTabbedPane() { if (configureTabbedPane == null) { configureTabbedPane = new JTabbedPane(); configureTabbedPane.addTab("Inputs", null, getInputParamsSplitPane(), null); configureTabbedPane.addTab("Output", null, getOutputTypeSplitPane(), null); configureTabbedPane.addTab("Faults", null, getExceptionsPanel(), null); } return configureTabbedPane; } /** * This method initializes exceptionEditText * * @return javax.swing.JTextField */ private JComboBox getExceptionJComboBox() { if (exceptionJComboBox == null) { exceptionJComboBox = new JComboBox(); // populate with currently used exception names ServiceType[] service = this.info.getServices().getService(); SortedSet exceptionNameSet = new TreeSet(); for (int namespaceI = 0; namespaceI < info.getNamespaces().getNamespace().length; namespaceI++) { NamespaceType namespace = info.getNamespaces().getNamespace(namespaceI); } if (service != null) { for (int i = 0; i < service.length; i++) { MethodsType methodsType = service[i].getMethods(); if (methodsType != null) { MethodType methods[] = methodsType.getMethod(); if (methods != null) { for (int j = 0; j < methods.length; j++) { MethodTypeExceptions exceptionsType = methods[j].getExceptions(); if (exceptionsType != null) { MethodTypeExceptionsException[] exceptions = exceptionsType.getException(); if (exceptions != null) { for (int e = 0; e < exceptions.length; e++) { if (exceptions[e].getQname() != null) { exceptionNameSet .add(new ExceptionHolder(exceptions[e].getQname(), true)); } else { exceptionNameSet.add(new ExceptionHolder(new QName(info.getService() .getNamespace() + "/types", exceptions[e].getName()), false)); } } } } } } } } } for (Iterator iter = exceptionNameSet.iterator(); iter.hasNext();) { exceptionJComboBox.addItem(iter.next()); } } return exceptionJComboBox; } /** * This method initializes namespacesPanel * * @return javax.swing.JPanel */ private JPanel getInputNamespacesPanel() { if (inputNamespacesPanel == null) { GridBagConstraints gridBagConstraints19 = new GridBagConstraints(); gridBagConstraints19.fill = java.awt.GridBagConstraints.BOTH; gridBagConstraints19.weighty = 1.0; gridBagConstraints19.gridx = 0; gridBagConstraints19.gridy = 0; gridBagConstraints19.insets = new java.awt.Insets(0, 0, 0, 0); gridBagConstraints19.weightx = 1.0; inputNamespacesPanel = new JPanel(); inputNamespacesPanel.setLayout(new GridBagLayout()); inputNamespacesPanel.add(getInputNamespaceScrollPane(), gridBagConstraints19); } return inputNamespacesPanel; } /** * This method initializes namespaceScrollPane * * @return javax.swing.JScrollPane */ private JScrollPane getInputNamespaceScrollPane() { if (inputNamespaceScrollPane == null) { inputNamespaceScrollPane = new JScrollPane(); inputNamespaceScrollPane.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER); inputNamespaceScrollPane.setViewportView(getInputNamespaceTypesJTree()); } return inputNamespaceScrollPane; } /** * This method initializes namespaceTypesJTree * * @return javax.swing.JTree */ private NamespacesJTree getInputNamespaceTypesJTree() { if (inputNamespaceTypesJTree == null) { inputNamespaceTypesJTree = new NamespacesJTree(info.getNamespaces(), true); inputNamespaceTypesJTree.addMouseListener(new MouseListener() { public void mouseReleased(MouseEvent e) { // TODO Auto-generated method stub } public void mousePressed(MouseEvent e) { // TODO Auto-generated method stub } public void mouseExited(MouseEvent e) { // TODO Auto-generated method stub } public void mouseEntered(MouseEvent e) { // TODO Auto-generated method stub } public void mouseClicked(MouseEvent e) { if (e.getClickCount() == 2) { if (getInputNamespaceTypesJTree().getCurrentNode() instanceof SchemaElementTypeTreeNode) { NamespaceType nt = ((NamespaceType) ((NamespaceTypeTreeNode) getInputNamespaceTypesJTree() .getCurrentNode().getParent()).getUserObject()); SchemaElementType st = ((SchemaElementType) ((SchemaElementTypeTreeNode) getInputNamespaceTypesJTree() .getCurrentNode()).getUserObject()); MethodTypeInputsInput input = new MethodTypeInputsInput(); input.setQName(new QName(nt.getNamespace(), st.getType())); input.setIsArray(false); input.setName(JavaUtils.xmlNameToJava(st.getType())); getInputParamTable().addRow(input); } } } }); } return inputNamespaceTypesJTree; } /** * This method initializes methodPropertiesPanel * * @return javax.swing.JPanel */ private JPanel getMethodPropertiesPanel() { if (methodPropertiesPanel == null) { GridBagConstraints gridBagConstraints13 = new GridBagConstraints(); gridBagConstraints13.fill = GridBagConstraints.BOTH; gridBagConstraints13.gridx = 0; gridBagConstraints13.gridy = 0; gridBagConstraints13.weightx = 1.0D; gridBagConstraints13.weighty = 0.0D; gridBagConstraints13.insets = new Insets(2, 2, 2, 2); methodPropertiesPanel = new JPanel(); methodPropertiesPanel.setLayout(new GridBagLayout()); methodPropertiesPanel.add(getNamePanel(), gridBagConstraints13); } return methodPropertiesPanel; } /** * This method initializes jPanel * * @return javax.swing.JPanel */ private JPanel getOutputNamespacePanel() { if (outputNamespacePanel == null) { GridBagConstraints gridBagConstraints29 = new GridBagConstraints(); gridBagConstraints29.fill = java.awt.GridBagConstraints.BOTH; gridBagConstraints29.weighty = 1.0; gridBagConstraints29.gridx = 0; gridBagConstraints29.gridy = 3; gridBagConstraints29.weightx = 1.0; GridBagConstraints gridBagConstraints6 = new GridBagConstraints(); gridBagConstraints6.fill = java.awt.GridBagConstraints.BOTH; gridBagConstraints6.gridy = 0; gridBagConstraints6.gridx = 0; gridBagConstraints6.gridheight = 3; gridBagConstraints6.weighty = 1.0; gridBagConstraints6.weightx = 1.0; outputNamespacePanel = new JPanel(); outputNamespacePanel.setLayout(new GridBagLayout()); outputNamespacePanel.add(getOutputNamespacesTypeScrollPane(), gridBagConstraints6); outputNamespacePanel.add(getServicesTypeScrollPane(), gridBagConstraints29); } return outputNamespacePanel; } /** * This method initializes jScrollPane * * @return javax.swing.JScrollPane */ private JScrollPane getOutputNamespacesTypeScrollPane() { if (outputNamespacesTypeScrollPane == null) { outputNamespacesTypeScrollPane = new JScrollPane(); outputNamespacesTypeScrollPane.setBorder(javax.swing.BorderFactory.createTitledBorder(null, "Data Types", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, null, PortalLookAndFeel.getPanelLabelColor())); outputNamespacesTypeScrollPane.setViewportView(getOutputNamespacesJTree()); } return outputNamespacesTypeScrollPane; } /** * This method initializes outputNamespacesJTree * * @return javax.swing.JTree */ private NamespacesJTree getOutputNamespacesJTree() { if (outputNamespacesJTree == null) { outputNamespacesJTree = new NamespacesJTree(info.getNamespaces(), true); outputNamespacesJTree.addMouseListener(new MouseListener() { public void mouseClicked(MouseEvent e) { // TODO Auto-generated method stub if (e.getClickCount() == 2) { if (getOutputNamespacesJTree().getCurrentNode() instanceof SchemaElementTypeTreeNode) { NamespaceType nt = ((NamespaceType) ((NamespaceTypeTreeNode) getOutputNamespacesJTree() .getCurrentNode().getParent()).getUserObject()); SchemaElementType st = ((SchemaElementType) ((SchemaElementTypeTreeNode) getOutputNamespacesJTree() .getCurrentNode()).getUserObject()); MethodTypeOutput output = new MethodTypeOutput(); output.setQName(new QName(nt.getNamespace(), st.getType())); output.setIsArray(false); try { getOutputTypeTable().modifyRow(0, output); } catch (Exception ex) { ex.printStackTrace(); } } } } public void mouseEntered(MouseEvent e) { // TODO Auto-generated method stub } public void mouseExited(MouseEvent e) { // TODO Auto-generated method stub } public void mousePressed(MouseEvent e) { // TODO Auto-generated method stub } public void mouseReleased(MouseEvent e) { // TODO Auto-generated method stub } }); } return outputNamespacesJTree; } /** * This method initializes outputTypesTablePanel * * @return javax.swing.JPanel */ private JPanel getOutputTypesTablePanel() { if (outputTypesTablePanel == null) { GridBagConstraints gridBagConstraints26 = new GridBagConstraints(); gridBagConstraints26.gridx = 0; gridBagConstraints26.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints26.gridy = 0; GridBagConstraints gridBagConstraints22 = new GridBagConstraints(); gridBagConstraints22.fill = java.awt.GridBagConstraints.BOTH; gridBagConstraints22.gridx = 0; gridBagConstraints22.gridy = 1; gridBagConstraints22.weightx = 1.0; gridBagConstraints22.weighty = 1.0; gridBagConstraints22.gridwidth = 3; gridBagConstraints22.insets = new java.awt.Insets(2, 2, 2, 2); outputTypesTablePanel = new JPanel(); outputTypesTablePanel.setLayout(new GridBagLayout()); outputTypesTablePanel.add(getOutputTypejScrollPane(), gridBagConstraints22); outputTypesTablePanel.add(getClearOutputTypeButton(), gridBagConstraints26); } return outputTypesTablePanel; } /** * This method initializes inputTypesTablePanel * * @return javax.swing.JPanel */ private JPanel getInputTypesTablePanel() { if (inputTypesTablePanel == null) { GridBagConstraints gridBagConstraints18 = new GridBagConstraints(); gridBagConstraints18.gridx = 1; gridBagConstraints18.fill = java.awt.GridBagConstraints.VERTICAL; gridBagConstraints18.gridy = 0; GridBagConstraints gridBagConstraints14 = new GridBagConstraints(); gridBagConstraints14.fill = GridBagConstraints.HORIZONTAL; gridBagConstraints14.gridwidth = 2; gridBagConstraints14.gridx = 0; gridBagConstraints14.gridy = 1; gridBagConstraints14.weightx = 0.0D; gridBagConstraints14.weighty = 0.0D; gridBagConstraints14.insets = new Insets(2, 2, 2, 2); GridBagConstraints gridBagConstraints21 = new GridBagConstraints(); gridBagConstraints21.fill = java.awt.GridBagConstraints.BOTH; gridBagConstraints21.gridx = 0; gridBagConstraints21.gridy = 0; gridBagConstraints21.weightx = 1.0; gridBagConstraints21.weighty = 1.0; gridBagConstraints21.insets = new java.awt.Insets(0, 0, 0, 0); inputTypesTablePanel = new JPanel(); inputTypesTablePanel.setLayout(new GridBagLayout()); inputTypesTablePanel.add(getInputParamScrollPanel(), gridBagConstraints21); inputTypesTablePanel.add(getInputButtonPanel(), gridBagConstraints14); inputTypesTablePanel.add(getInputTableControlsPanel(), gridBagConstraints18); } return inputTypesTablePanel; } /** * This method initializes inputTableControlsPanel1 * * @return javax.swing.JPanel */ private JPanel getInputTableControlsPanel() { if (inputTableControlsPanel == null) { GridBagConstraints gridBagConstraints25 = new GridBagConstraints(); gridBagConstraints25.gridx = 0; gridBagConstraints25.gridy = 0; GridBagConstraints gridBagConstraints24 = new GridBagConstraints(); gridBagConstraints24.gridx = 0; gridBagConstraints24.gridy = 1; downLabel = new JLabel(); downLabel.setText(""); downLabel.setIcon(IntroduceLookAndFeel.getDownIcon()); downLabel.addMouseListener(new MouseAdapter() { public void mouseClicked(MouseEvent e) { super.mouseClicked(e); try { getInputParamTable().moveSelectedRowDown(); } catch (Exception e1) { e1.printStackTrace(); } } }); upLabel = new JLabel(); upLabel.setText(""); upLabel.setIcon(IntroduceLookAndFeel.getUpIcon()); upLabel.addMouseListener(new MouseAdapter() { public void mouseClicked(MouseEvent e) { super.mouseClicked(e); try { getInputParamTable().moveSelectedRowUp(); } catch (Exception e1) { e1.printStackTrace(); } } }); inputTableControlsPanel = new JPanel(); inputTableControlsPanel.setLayout(new GridBagLayout()); inputTableControlsPanel.add(upLabel, gridBagConstraints25); inputTableControlsPanel.add(downLabel, gridBagConstraints24); } return inputTableControlsPanel; } /** * This method initializes clearOutputTypeButton * * @return javax.swing.JButton */ private JButton getClearOutputTypeButton() { if (clearOutputTypeButton == null) { clearOutputTypeButton = new JButton(); clearOutputTypeButton.setText("Clear Output Type"); clearOutputTypeButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent e) { MethodTypeOutput output = new MethodTypeOutput(); output.setQName(new QName("", "void")); try { getOutputTypeTable().modifyRow(0, output); } catch (Exception e1) { e1.printStackTrace(); } } }); } return clearOutputTypeButton; } /** * This method initializes exceptionsInputButtonPanel * * @return javax.swing.JPanel */ private JPanel getExceptionsInputButtonPanel() { if (exceptionsInputButtonPanel == null) { exceptionsInputButtonPanel = new JPanel(); exceptionsInputButtonPanel.setLayout(new GridBagLayout()); } return exceptionsInputButtonPanel; } /** * Yabba dabba do * * @return */ private JPanel getImportInformationPanel() { if (importInformationPanel == null) { GridBagConstraints gridBagConstraints7 = new GridBagConstraints(); gridBagConstraints7.gridy = 5; gridBagConstraints7.fill = java.awt.GridBagConstraints.BOTH; gridBagConstraints7.gridwidth = 2; GridBagConstraints gridBagConstraints36 = new GridBagConstraints(); gridBagConstraints36.gridx = 0; gridBagConstraints36.fill = java.awt.GridBagConstraints.BOTH; gridBagConstraints36.gridwidth = 2; gridBagConstraints36.gridy = 6; GridBagConstraints gridBagConstraints23 = new GridBagConstraints(); gridBagConstraints23.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints23.gridy = 0; gridBagConstraints23.gridx = 1; GridBagConstraints gridBagConstraints41 = new GridBagConstraints(); gridBagConstraints41.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints41.gridy = 1; gridBagConstraints41.weightx = 1.0; gridBagConstraints41.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints41.gridx = 1; GridBagConstraints gridBagConstraints40 = new GridBagConstraints(); gridBagConstraints40.gridx = 0; gridBagConstraints40.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints40.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints40.gridy = 1; packageNameLabel = new JLabel(); packageNameLabel.setText("Package Name"); GridBagConstraints gridBagConstraints35 = new GridBagConstraints(); gridBagConstraints35.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints35.gridy = 4; gridBagConstraints35.weightx = 1.0; gridBagConstraints35.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints35.gridx = 1; GridBagConstraints gridBagConstraints33 = new GridBagConstraints(); gridBagConstraints33.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints33.gridy = 2; gridBagConstraints33.weightx = 1.0; gridBagConstraints33.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints33.gridx = 1; GridBagConstraints gridBagConstraints32 = new GridBagConstraints(); gridBagConstraints32.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints32.gridy = 0; gridBagConstraints32.weightx = 1.0; gridBagConstraints32.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints32.gridx = 1; GridBagConstraints gridBagConstraints31 = new GridBagConstraints(); gridBagConstraints31.gridx = 0; gridBagConstraints31.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints31.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints31.gridy = 0; namespaceLabel = new JLabel(); namespaceLabel.setText("Namepace"); GridBagConstraints gridBagConstraints30 = new GridBagConstraints(); gridBagConstraints30.gridx = 0; gridBagConstraints30.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints30.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints30.gridy = 4; wsdlFileLabel = new JLabel(); wsdlFileLabel.setText("WSDL File"); GridBagConstraints gridBagConstraints28 = new GridBagConstraints(); gridBagConstraints28.gridx = 0; gridBagConstraints28.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints28.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints28.gridy = 2; serviceName = new JLabel(); serviceName.setText("PortType"); importInformationPanel = new JPanel(); importInformationPanel.setLayout(new GridBagLayout()); importInformationPanel.add(serviceName, gridBagConstraints28); importInformationPanel.add(wsdlFileLabel, gridBagConstraints30); importInformationPanel.add(namespaceLabel, gridBagConstraints31); importInformationPanel.add(getNamespaceTextField(), gridBagConstraints32); importInformationPanel.add(getServiceNameTextField(), gridBagConstraints33); importInformationPanel.add(getWsdlFileTextField(), gridBagConstraints35); importInformationPanel.add(packageNameLabel, gridBagConstraints40); importInformationPanel.add(getPackageNameTextField(), gridBagConstraints41); importInformationPanel.add(getMessagePanel(), gridBagConstraints7); importInformationPanel.add(getProviderInformationPanel(), gridBagConstraints36); } return importInformationPanel; } /** * This method initializes namespaceTextField * * @return javax.swing.JTextField */ private JTextField getNamespaceTextField() { if (namespaceTextField == null) { namespaceTextField = new JTextField(); if (method.getImportInformation() != null) { namespaceTextField.setText(method.getImportInformation().getNamespace()); } } return namespaceTextField; } /** * This method initializes serviceNameTextField * * @return javax.swing.JTextField */ private JTextField getServiceNameTextField() { if (serviceNameTextField == null) { serviceNameTextField = new JTextField(); if (method.getImportInformation() != null) { serviceNameTextField.setText(method.getImportInformation().getPortTypeName()); } } return serviceNameTextField; } /** * This method initializes wsdlFileTextField * * @return javax.swing.JTextField */ private JTextField getWsdlFileTextField() { if (wsdlFileTextField == null) { wsdlFileTextField = new JTextField(); if (method.getImportInformation() != null) { wsdlFileTextField.setText(method.getImportInformation().getWsdlFile()); } } return wsdlFileTextField; } /** * This method initializes isImportedCheckBox * * @return javax.swing.JCheckBox */ private JCheckBox getIsImportedCheckBox() { if (isImportedCheckBox == null) { isImportedCheckBox = new JCheckBox(); isImportedCheckBox.setText("imported"); isImportedCheckBox.setSelected(method.isIsImported()); if (isImportedCheckBox.isSelected()) { getTabbedPanel().setEnabledAt(2, true); } else { getTabbedPanel().setEnabledAt(2, false); if (getTabbedPanel().getSelectedIndex() == 2) { getTabbedPanel().setSelectedIndex(0); } } isImportedCheckBox.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { if (isImportedCheckBox.isSelected()) { getTabbedPanel().setEnabledAt(2, true); } else { getTabbedPanel().setEnabledAt(2, false); if (getTabbedPanel().getSelectedIndex() == 2) { getTabbedPanel().setSelectedIndex(0); } } } }); } return isImportedCheckBox; } /** * This method initializes packageNameTextField * * @return javax.swing.JTextField */ private JTextField getPackageNameTextField() { if (packageNameTextField == null) { packageNameTextField = new JTextField(); if (method.getImportInformation() != null) { packageNameTextField.setText(method.getImportInformation().getPackageName()); } } return packageNameTextField; } /** * This method initializes isProvidedCheckBox * * @return javax.swing.JCheckBox */ private JCheckBox getIsProvidedCheckBox() { if (isProvidedCheckBox == null) { isProvidedCheckBox = new JCheckBox(); isProvidedCheckBox.setText("Provided"); getProviderClassnameTextField().setEnabled(false); getProviderClassnameTextField().setEditable(false); isProvidedCheckBox.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { if (isProvidedCheckBox.isSelected()) { getProviderClassnameTextField().setEnabled(true); getProviderClassnameTextField().setEditable(true); } else { getProviderClassnameTextField().setEnabled(false); getProviderClassnameTextField().setEditable(false); } } }); } isProvidedCheckBox.setSelected(method.isIsProvided()); return isProvidedCheckBox; } /** * This method initializes servicesTypeScrollPane * * @return javax.swing.JScrollPane */ private JScrollPane getServicesTypeScrollPane() { if (servicesTypeScrollPane == null) { servicesTypeScrollPane = new JScrollPane(); servicesTypeScrollPane.setBorder(javax.swing.BorderFactory.createTitledBorder(null, "Client Handle Types", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, null, PortalLookAndFeel.getPanelLabelColor())); servicesTypeScrollPane.setViewportView(getServicesTypeTable()); } return servicesTypeScrollPane; } /** * This method initializes servicesTypeTable * * @return javax.swing.JTable */ private ServicesTable getServicesTypeTable() { if (servicesTypeTable == null) { servicesTypeTable = new ServicesTable(info.getServices()); servicesTypeTable.addMouseListener(new MouseAdapter() { public void mouseClicked(MouseEvent e) { super.mouseClicked(e); if (e.getClickCount() == 2) { // set the epr type as this outputType MethodTypeOutput output = new MethodTypeOutput(); try { output.setQName(new QName(getServicesTypeTable().getSelectedRowData().getNamespace() + "/types", getServicesTypeTable().getSelectedRowData().getName() + "Reference")); output.setIsArray(false); output.setIsClientHandle(new Boolean(true)); output.setClientHandleClass(getServicesTypeTable().getSelectedRowData().getPackageName() + "." + "client" + "." + getServicesTypeTable().getSelectedRowData().getName() + "Client"); } catch (Exception e1) { // TODO Auto-generated catch block e1.printStackTrace(); } try { getOutputTypeTable().modifyRow(0, output); } catch (Exception ex) { ex.printStackTrace(); } } } }); } return servicesTypeTable; } /** * This method initializes providerClassnameTextField * * @return javax.swing.JTextField */ private JTextField getProviderClassnameTextField() { if (providerClassnameTextField == null) { providerClassnameTextField = new JTextField(); if (method.getProviderInformation() != null && method.getProviderInformation().getProviderClass() != null) { providerClassnameTextField.setText(method.getProviderInformation().getProviderClass()); } } return providerClassnameTextField; } /** * This method initializes providerInformationPanel * * @return javax.swing.JPanel */ private JPanel getProviderInformationPanel() { if (providerInformationPanel == null) { GridBagConstraints gridBagConstraints34 = new GridBagConstraints(); gridBagConstraints34.gridx = 0; gridBagConstraints34.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints34.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints34.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints34.gridy = 1; GridBagConstraints gridBagConstraints37 = new GridBagConstraints(); gridBagConstraints37.gridx = 1; gridBagConstraints37.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints37.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints37.weightx = 1.0D; gridBagConstraints37.gridy = 1; GridBagConstraints gridBagConstraints38 = new GridBagConstraints(); gridBagConstraints38.gridx = 0; gridBagConstraints38.gridwidth = 2; gridBagConstraints38.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints38.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints38.anchor = java.awt.GridBagConstraints.CENTER; gridBagConstraints38.weightx = 0.0D; gridBagConstraints38.gridy = 0; providerClassnameLabel = new JLabel(); providerClassnameLabel.setText("Provider Classname"); providerInformationPanel = new JPanel(); providerInformationPanel.setLayout(new GridBagLayout()); providerInformationPanel.setBorder(javax.swing.BorderFactory.createTitledBorder(null, "Provider Information", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, null, IntroduceLookAndFeel.getPanelLabelColor())); providerInformationPanel.add(getIsProvidedCheckBox(), gridBagConstraints38); providerInformationPanel.add(providerClassnameLabel, gridBagConstraints34); providerInformationPanel.add(getProviderClassnameTextField(), gridBagConstraints37); } return providerInformationPanel; } /** * This method initializes jSplitPane * * @return javax.swing.JSplitPane */ private JSplitPane getInputParamsSplitPane() { if (inputParamsSplitPane == null) { inputParamsSplitPane = new JSplitPane(); inputParamsSplitPane.setSize(new java.awt.Dimension(173, 68)); inputParamsSplitPane.setOneTouchExpandable(true); inputParamsSplitPane.setLeftComponent(getInputNamespacesPanel()); inputParamsSplitPane.setRightComponent(getInputTypesTablePanel()); inputParamsSplitPane.setBorder(javax.swing.BorderFactory.createTitledBorder(null, "Input Parameters", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, null, PortalLookAndFeel.getPanelLabelColor())); } return inputParamsSplitPane; } /** * This method initializes jSplitPane * * @return javax.swing.JSplitPane */ private JSplitPane getOutputTypeSplitPane() { if (outputTypeSplitPane == null) { outputTypeSplitPane = new JSplitPane(); outputTypeSplitPane.setSize(new java.awt.Dimension(192, 90)); outputTypeSplitPane.setOneTouchExpandable(true); outputTypeSplitPane.setLeftComponent(getOutputNamespacePanel()); outputTypeSplitPane.setRightComponent(getOutputTypesTablePanel()); outputTypeSplitPane.setBorder(javax.swing.BorderFactory.createTitledBorder(null, "Output Type", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, null, PortalLookAndFeel.getPanelLabelColor())); } return outputTypeSplitPane; } /** * This method initializes messagePanel * * @return javax.swing.JPanel */ private JPanel getMessagePanel() { if (messagePanel == null) { GridBagConstraints gridBagConstraints44 = new GridBagConstraints(); gridBagConstraints44.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints44.gridy = 2; gridBagConstraints44.weightx = 1.0; gridBagConstraints44.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints44.gridx = 2; GridBagConstraints gridBagConstraints43 = new GridBagConstraints(); gridBagConstraints43.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints43.gridy = 1; gridBagConstraints43.weightx = 1.0; gridBagConstraints43.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints43.gridx = 2; GridBagConstraints gridBagConstraints42 = new GridBagConstraints(); gridBagConstraints42.gridx = 0; gridBagConstraints42.gridy = 0; GridBagConstraints gridBagConstraints39 = new GridBagConstraints(); gridBagConstraints39.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints39.gridy = 2; gridBagConstraints39.weightx = 1.0; gridBagConstraints39.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints39.gridx = 1; GridBagConstraints gridBagConstraints17 = new GridBagConstraints(); gridBagConstraints17.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints17.gridy = 1; gridBagConstraints17.weightx = 1.0; gridBagConstraints17.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints17.gridx = 1; GridBagConstraints gridBagConstraints16 = new GridBagConstraints(); gridBagConstraints16.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints16.gridy = 2; gridBagConstraints16.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints16.gridx = 0; GridBagConstraints gridBagConstraints8 = new GridBagConstraints(); gridBagConstraints8.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints8.gridy = 1; gridBagConstraints8.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints8.gridx = 0; outputLabel = new JLabel(); outputLabel.setText("Output Message QName"); inputLabel = new JLabel(); inputLabel.setText("Input Message QName"); messagePanel = new JPanel(); messagePanel.setLayout(new GridBagLayout()); messagePanel.setBorder(javax.swing.BorderFactory.createTitledBorder(null, "Messages", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, null, IntroduceLookAndFeel.getPanelLabelColor())); messagePanel.add(inputLabel, gridBagConstraints8); messagePanel.add(outputLabel, gridBagConstraints16); messagePanel.add(getInputMessageNamespaceTextField(), gridBagConstraints17); messagePanel.add(getOutputMessageNamespaceTextField(), gridBagConstraints39); messagePanel.add(getMessagesCheckBox(), gridBagConstraints42); messagePanel.add(getInputMessageNameTextField(), gridBagConstraints43); messagePanel.add(getOutputMessageNameTextField(), gridBagConstraints44); } return messagePanel; } /** * This method initializes inputMessageTextField * * @return javax.swing.JTextField */ private JTextField getInputMessageNamespaceTextField() { if (inputMessageNamespaceTextField == null) { inputMessageNamespaceTextField = new JTextField(); if (method.isIsImported()) { inputMessageNamespaceTextField.setText(method.getImportInformation().getInputMessage() .getNamespaceURI()); } } return inputMessageNamespaceTextField; } /** * This method initializes ouputMessageTextField * * @return javax.swing.JTextField */ private JTextField getOutputMessageNamespaceTextField() { if (outputMessageNamespaceTextField == null) { outputMessageNamespaceTextField = new JTextField(); if (method.isIsImported()) { outputMessageNamespaceTextField.setText(method.getImportInformation().getOutputMessage() .getNamespaceURI()); } } return outputMessageNamespaceTextField; } /** * This method initializes messagesCheckBox * * @return javax.swing.JCheckBox */ private JCheckBox getMessagesCheckBox() { if (messagesCheckBox == null) { messagesCheckBox = new JCheckBox(); messagesCheckBox.setText("customize message imports"); messagesCheckBox.addItemListener(new java.awt.event.ItemListener() { public void itemStateChanged(java.awt.event.ItemEvent e) { if (messagesCheckBox.isSelected()) { getInputMessageNamespaceTextField().setEnabled(true); getInputMessageNamespaceTextField().setEditable(true); getInputMessageNameTextField().setEnabled(true); getInputMessageNameTextField().setEditable(true); getOutputMessageNamespaceTextField().setEnabled(true); getOutputMessageNamespaceTextField().setEditable(true); getOutputMessageNameTextField().setEnabled(true); getOutputMessageNameTextField().setEditable(true); } else { getInputMessageNamespaceTextField().setEnabled(false); getInputMessageNamespaceTextField().setEditable(false); getInputMessageNameTextField().setEnabled(false); getInputMessageNameTextField().setEditable(false); getOutputMessageNamespaceTextField().setEnabled(false); getOutputMessageNamespaceTextField().setEditable(false); getOutputMessageNameTextField().setEnabled(false); getOutputMessageNameTextField().setEditable(false); } } }); getInputMessageNamespaceTextField().setEnabled(false); getInputMessageNamespaceTextField().setEditable(false); getInputMessageNameTextField().setEnabled(false); getInputMessageNameTextField().setEditable(false); getOutputMessageNamespaceTextField().setEnabled(false); getOutputMessageNamespaceTextField().setEditable(false); getOutputMessageNameTextField().setEnabled(false); getOutputMessageNameTextField().setEditable(false); } return messagesCheckBox; } /** * This method initializes inputMessageNameTextField * * @return javax.swing.JTextField */ private JTextField getInputMessageNameTextField() { if (inputMessageNameTextField == null) { inputMessageNameTextField = new JTextField(); if (method.isIsImported()) { inputMessageNameTextField.setText(method.getImportInformation().getInputMessage().getLocalPart()); } } return inputMessageNameTextField; } /** * This method initializes outputMessageNameTextField * * @return javax.swing.JTextField */ private JTextField getOutputMessageNameTextField() { if (outputMessageNameTextField == null) { outputMessageNameTextField = new JTextField(); if (method.isIsImported()) { outputMessageNameTextField.setText(method.getImportInformation().getOutputMessage().getLocalPart()); } } return outputMessageNameTextField; } /** * This method initializes createFaultPanel * * @return javax.swing.JPanel */ private JPanel getCreateFaultPanel() { if (createFaultPanel == null) { GridBagConstraints gridBagConstraints48 = new GridBagConstraints(); gridBagConstraints48.gridx = 1; gridBagConstraints48.gridy = 0; faultTypeNameLabel = new JLabel(); faultTypeNameLabel.setText("Fault Type Name:"); GridBagConstraints gridBagConstraints47 = new GridBagConstraints(); gridBagConstraints47.gridx = 3; gridBagConstraints47.gridy = 0; GridBagConstraints gridBagConstraints45 = new GridBagConstraints(); gridBagConstraints45.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints45.gridx = 2; gridBagConstraints45.gridy = 0; gridBagConstraints45.weightx = 1.0; gridBagConstraints45.insets = new java.awt.Insets(5, 5, 5, 5); createFaultPanel = new JPanel(); createFaultPanel.setLayout(new GridBagLayout()); createFaultPanel.setBorder(javax.swing.BorderFactory.createTitledBorder(null, "Create New Service Faults", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, null, IntroduceLookAndFeel.getPanelLabelColor())); createFaultPanel.add(getNewFaultNameTextField(), gridBagConstraints45); createFaultPanel.add(getCreateFaultButton(), gridBagConstraints47); createFaultPanel.add(faultTypeNameLabel, gridBagConstraints48); } return createFaultPanel; } /** * This method initializes newFaultNameTextField * * @return javax.swing.JTextField */ private JTextField getNewFaultNameTextField() { if (newFaultNameTextField == null) { newFaultNameTextField = new JTextField(); } return newFaultNameTextField; } /** * This method initializes createFaultButton * * @return javax.swing.JButton */ private JButton getCreateFaultButton() { if (createFaultButton == null) { createFaultButton = new JButton(IntroduceLookAndFeel.getAddIcon()); createFaultButton.setText("Add New Fault"); createFaultButton .setToolTipText("Creates a new fault under this services types namespace and adds it to the list of available faults."); createFaultButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent e) { QName exceptionQName = null; if (CommonTools.isValidServiceName(getNewFaultNameTextField().getText())) { exceptionQName = new QName(info.getService().getNamespace() + "/types", getNewFaultNameTextField().getText()); } else { JOptionPane.showMessageDialog(MethodViewer.this, "Invalid Exception Name(" + getNewFaultNameTextField() + "): Exception must be a valid java indentifier."); return; } ExceptionHolder holder = new ExceptionHolder(exceptionQName, false); getExceptionJComboBox().addItem(holder); getExceptionsTable().addRow(holder.qname, holder.isCreated); } }); } return createFaultButton; } /** * This method initializes exceptionsPanelSplitPane * * @return javax.swing.JSplitPane */ private JSplitPane getExceptionsPanelSplitPane() { if (exceptionsPanelSplitPane == null) { exceptionsPanelSplitPane = new JSplitPane(); exceptionsPanelSplitPane.setRightComponent(getExceptionScrollPane()); exceptionsPanelSplitPane.setLeftComponent(getExceptionNamespacesScrollPane()); exceptionsPanelSplitPane.setDividerLocation(0.40); } return exceptionsPanelSplitPane; } /** * This method initializes exceptionNamespacesScrollPane * * @return javax.swing.JScrollPane */ private JScrollPane getExceptionNamespacesScrollPane() { if (exceptionNamespacesScrollPane == null) { exceptionNamespacesScrollPane = new JScrollPane(); exceptionNamespacesScrollPane.setViewportView(getNamespacesJTree()); } return exceptionNamespacesScrollPane; } /** * This method initializes namespacesJTree * * @return javax.swing.JTree */ private NamespacesJTree getNamespacesJTree() { if (namespacesJTree == null) { namespacesJTree = new NamespacesJTree(info.getNamespaces(), false); } return namespacesJTree; } /** * This method initializes faultsFromTypesPanel * * @return javax.swing.JPanel */ private JPanel getFaultsFromTypesPanel() { if (faultsFromTypesPanel == null) { faultsFromTypesPanel = new JPanel(); faultsFromTypesPanel.setLayout(new GridBagLayout()); faultsFromTypesPanel.setBorder(javax.swing.BorderFactory.createTitledBorder(null, "Add Fault From Types", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, null, IntroduceLookAndFeel.getPanelLabelColor())); faultsFromTypesPanel.add(getAddFaultFromTypeButton(), new GridBagConstraints()); } return faultsFromTypesPanel; } /** * This method initializes addFaultFromTypeButton * * @return javax.swing.JButton */ private JButton getAddFaultFromTypeButton() { if (addFaultFromTypeButton == null) { addFaultFromTypeButton = new JButton(IntroduceLookAndFeel.getAddIcon()); addFaultFromTypeButton.setText("Add From Type"); addFaultFromTypeButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent e) { if (getNamespacesJTree().getCurrentNode() instanceof SchemaElementTypeTreeNode) { NamespaceType nt = ((NamespaceType) ((NamespaceTypeTreeNode) getNamespacesJTree() .getCurrentNode().getParent()).getUserObject()); SchemaElementType st = ((SchemaElementType) ((SchemaElementTypeTreeNode) getNamespacesJTree() .getCurrentNode()).getUserObject()); if (CommonTools.validateIsFaultType(nt, st, new File(info.getBaseDirectory().getAbsolutePath() + File.separator + "schema" + File.separator + info.getServices().getService(0).getName()))) { QName qname = new QName(nt.getNamespace(), st.getType()); ExceptionHolder holder = new ExceptionHolder(qname, true); getExceptionJComboBox().addItem(holder); getExceptionsTable().addRow(holder.qname, holder.isCreated); } else { JOptionPane.showMessageDialog(MethodViewer.this, "Type does not appear to extend from {" + IntroduceConstants.BASEFAULTS_NAMESPACE + "}BaseFaultType"); } } else { JOptionPane.showMessageDialog(MethodViewer.this, "Please select a type to add"); } } }); } return addFaultFromTypeButton; } /** * This method initializes removeFaultPanel * * @return javax.swing.JPanel */ private JPanel getRemoveFaultPanel() { if (removeFaultPanel == null) { GridBagConstraints gridBagConstraints5 = new GridBagConstraints(); gridBagConstraints5.insets = new Insets(2, 2, 2, 2); gridBagConstraints5.gridx = -1; gridBagConstraints5.gridy = -1; gridBagConstraints5.gridheight = 2; removeFaultPanel = new JPanel(); removeFaultPanel.setLayout(new GridBagLayout()); removeFaultPanel.add(getRemoveExceptionButton(), gridBagConstraints5); } return removeFaultPanel; } } // @jve:decl-index=0:visual-constraint="4,12"
cagrid-1-0/caGrid/projects/introduce/src/java/Portal/gov/nih/nci/cagrid/introduce/portal/modification/services/methods/MethodViewer.java
package gov.nih.nci.cagrid.introduce.portal.modification.services.methods; import gov.nih.nci.cagrid.common.portal.ErrorDialog; import gov.nih.nci.cagrid.common.portal.PortalLookAndFeel; import gov.nih.nci.cagrid.common.portal.PortalUtils; import gov.nih.nci.cagrid.introduce.IntroduceConstants; import gov.nih.nci.cagrid.introduce.beans.method.MethodType; import gov.nih.nci.cagrid.introduce.beans.method.MethodTypeExceptions; import gov.nih.nci.cagrid.introduce.beans.method.MethodTypeExceptionsException; import gov.nih.nci.cagrid.introduce.beans.method.MethodTypeImportInformation; import gov.nih.nci.cagrid.introduce.beans.method.MethodTypeInputs; import gov.nih.nci.cagrid.introduce.beans.method.MethodTypeInputsInput; import gov.nih.nci.cagrid.introduce.beans.method.MethodTypeOutput; import gov.nih.nci.cagrid.introduce.beans.method.MethodTypeProviderInformation; import gov.nih.nci.cagrid.introduce.beans.method.MethodsType; import gov.nih.nci.cagrid.introduce.beans.namespace.NamespaceType; import gov.nih.nci.cagrid.introduce.beans.namespace.SchemaElementType; import gov.nih.nci.cagrid.introduce.beans.service.ServiceType; import gov.nih.nci.cagrid.introduce.common.CommonTools; import gov.nih.nci.cagrid.introduce.info.SpecificServiceInformation; import gov.nih.nci.cagrid.introduce.portal.common.IntroduceLookAndFeel; import gov.nih.nci.cagrid.introduce.portal.modification.security.MethodSecurityPanel; import gov.nih.nci.cagrid.introduce.portal.modification.services.ModifyService; import gov.nih.nci.cagrid.introduce.portal.modification.types.NamespaceTypeTreeNode; import gov.nih.nci.cagrid.introduce.portal.modification.types.NamespacesJTree; import gov.nih.nci.cagrid.introduce.portal.modification.types.SchemaElementTypeTreeNode; import java.awt.FlowLayout; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Insets; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; import java.io.File; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.SortedSet; import java.util.TreeSet; import javax.swing.BorderFactory; import javax.swing.JButton; import javax.swing.JCheckBox; import javax.swing.JComboBox; import javax.swing.JLabel; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JSplitPane; import javax.swing.JTabbedPane; import javax.swing.JTextField; import javax.swing.ScrollPaneConstants; import javax.swing.border.TitledBorder; import javax.xml.namespace.QName; import org.apache.axis.utils.JavaUtils; import org.projectmobius.portal.GridPortalBaseFrame; /** * MethodViewer * * @author <A HREF="MAILTO:[email protected]">Shannon Hastings </A> * @author <A HREF="MAILTO:[email protected]">Scott Oster </A> * @author <A HREF="MAILTO:[email protected]">Stephen Langella </A> */ public class MethodViewer extends GridPortalBaseFrame { public class ExceptionHolder implements Comparable { boolean isCreated; QName qname; public ExceptionHolder(QName qname, boolean isCreated) { this.qname = qname; this.isCreated = isCreated; } public int compareTo(Object arg0) { return this.qname.toString().compareTo(((QName) arg0).toString()); } public String toString() { return qname.toString(); } } private MethodType method; private JPanel mainPanel = null; private JScrollPane inputParamScrollPanel = null; private InputParametersTable inputParamTable = null; private JScrollPane outputTypejScrollPane = null; private OutputTypeTable outputTypeTable = null; private JPanel buttonPanel = null; private JButton doneButton = null; private JButton addInputParamButton = null; private JPanel namePanel = null; private JTextField nameField = null; private JButton removeButton = null; private JLabel methodLabel = null; private JPanel inputButtonPanel = null; private JButton cancelButton = null; private JPanel exceptionsPanel = null; private JScrollPane exceptionScrollPane = null; private JPanel exceptionInputPanel = null; private ExceptionsTable exceptionsTable = null; private JButton addExceptionButton = null; private JButton removeExceptionButton = null; private JTabbedPane tabbedPanel = null; private JPanel methodPanel = null; private JPanel securityContainerPanel = null; private SpecificServiceInformation info; private JTabbedPane configureTabbedPane = null; private JComboBox exceptionJComboBox = null; private JPanel inputNamespacesPanel = null; private JScrollPane inputNamespaceScrollPane = null; private NamespacesJTree inputNamespaceTypesJTree = null; private JPanel methodPropertiesPanel = null; private JPanel outputNamespacePanel = null; private JScrollPane outputNamespacesTypeScrollPane = null; private NamespacesJTree outputNamespacesJTree = null; private JPanel outputTypesTablePanel = null; private JPanel inputTypesTablePanel = null; private JPanel inputTableControlsPanel = null; private JLabel upLabel = null; private JLabel downLabel = null; private JButton clearOutputTypeButton = null; private JPanel exceptionsInputButtonPanel = null; private JPanel importInformationPanel = null; private JLabel serviceName = null; private JLabel wsdlFileLabel = null; private JLabel namespaceLabel = null; private JTextField namespaceTextField = null; private JTextField serviceNameTextField = null; private JTextField wsdlFileTextField = null; private JCheckBox isImportedCheckBox = null; private JLabel packageNameLabel = null; private JTextField packageNameTextField = null; private JCheckBox isProvidedCheckBox = null; private JScrollPane servicesTypeScrollPane = null; private ServicesTable servicesTypeTable = null; private JTextField providerClassnameTextField = null; private JPanel providerInformationPanel = null; private JLabel providerClassnameLabel = null; private JSplitPane inputParamsSplitPane = null; private JSplitPane outputTypeSplitPane = null; private JPanel messagePanel = null; private JLabel inputLabel = null; private JLabel outputLabel = null; private JTextField inputMessageNamespaceTextField = null; private JTextField outputMessageNamespaceTextField = null; private JCheckBox messagesCheckBox = null; private JTextField inputMessageNameTextField = null; private JTextField outputMessageNameTextField = null; private JPanel createFaultPanel = null; private JTextField newFaultNameTextField = null; private JButton createFaultButton = null; private JLabel faultTypeNameLabel = null; private JLabel existingExceptionLabel = null; private JSplitPane exceptionsPanelSplitPane = null; private JScrollPane exceptionNamespacesScrollPane = null; private NamespacesJTree namespacesJTree = null; private JPanel faultsFromTypesPanel = null; private JButton addFaultFromTypeButton = null; private JPanel removeFaultPanel = null; public MethodViewer(MethodType method, SpecificServiceInformation info) { this.info = info; this.method = method; this.setTitle("Modify Method"); initialize(); } private void initialize() { this.setContentPane(getMainPanel()); this.setTitle("Build/Modify Operation"); this.setSize(new java.awt.Dimension(683, 469)); this.setContentPane(getMainPanel()); this.setFrameIcon(IntroduceLookAndFeel.getModifyIcon()); } /** * This method initializes jPanel * * @return javax.swing.JPanel */ private JPanel getMainPanel() { if (mainPanel == null) { GridBagConstraints gridBagConstraints20 = new GridBagConstraints(); gridBagConstraints20.gridx = 0; gridBagConstraints20.weighty = 0.0D; gridBagConstraints20.weightx = 1.0D; gridBagConstraints20.fill = java.awt.GridBagConstraints.BOTH; gridBagConstraints20.gridy = 0; GridBagConstraints gridBagConstraints9 = new GridBagConstraints(); gridBagConstraints9.fill = java.awt.GridBagConstraints.BOTH; gridBagConstraints9.weighty = 1.0; gridBagConstraints9.gridx = 0; gridBagConstraints9.gridy = 1; gridBagConstraints9.weightx = 1.0; GridBagConstraints gridBagConstraints10 = new GridBagConstraints(); mainPanel = new JPanel(); mainPanel.setLayout(new GridBagLayout()); gridBagConstraints10.gridx = 0; gridBagConstraints10.gridy = 4; gridBagConstraints10.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints10.weightx = 0.0D; gridBagConstraints10.weighty = 0.0D; gridBagConstraints10.fill = java.awt.GridBagConstraints.BOTH; mainPanel.add(getButtonPanel(), gridBagConstraints10); mainPanel.add(getTabbedPanel(), gridBagConstraints9); mainPanel.add(getMethodPropertiesPanel(), gridBagConstraints20); } return mainPanel; } /** * This method initializes jScrollPane * * @return javax.swing.JScrollPane */ private JScrollPane getInputParamScrollPanel() { if (inputParamScrollPanel == null) { inputParamScrollPanel = new JScrollPane(); inputParamScrollPanel.setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_AS_NEEDED); inputParamScrollPanel.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER); inputParamScrollPanel.setViewportView(getInputParamTable()); } return inputParamScrollPanel; } /** * This method initializes jTable * * @return javax.swing.JTable */ private InputParametersTable getInputParamTable() { if (inputParamTable == null) { inputParamTable = new InputParametersTable(this.method); } return inputParamTable; } /** * This method initializes jScrollPane * * @return javax.swing.JScrollPane */ private JScrollPane getOutputTypejScrollPane() { if (outputTypejScrollPane == null) { outputTypejScrollPane = new JScrollPane(); outputTypejScrollPane.setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_AS_NEEDED); outputTypejScrollPane.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER); outputTypejScrollPane.setViewportView(getOutputTypeTable()); } return outputTypejScrollPane; } /** * This method initializes jTable * * @return javax.swing.JTable */ private OutputTypeTable getOutputTypeTable() { if (outputTypeTable == null) { outputTypeTable = new OutputTypeTable(this.method); } return outputTypeTable; } /** * This method initializes jPanel * * @return javax.swing.JPanel */ private JPanel getButtonPanel() { if (buttonPanel == null) { buttonPanel = new JPanel(); buttonPanel.setLayout(new FlowLayout()); buttonPanel.add(getDoneButton(), null); buttonPanel.add(getCancelButton(), null); } return buttonPanel; } /** * This method initializes jButton * * @return javax.swing.JButton */ public JButton getDoneButton() { if (doneButton == null) { doneButton = new JButton(IntroduceLookAndFeel.getDoneIcon()); doneButton.setText("Done"); doneButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent e) { // First process the inputs boolean valid = true; String message = ""; List usedNames = new ArrayList(); MethodsType methodsType = info.getService().getMethods(); if (methodsType != null) { MethodType methods[] = methodsType.getMethod(); if (methods != null) { for (int j = 0; j < methods.length; j++) { MethodType tmethod = methods[j]; if (!usedNames.contains(tmethod.getName())) { usedNames.add(tmethod.getName()); } else { valid = false; message = "Method name is not unique: " + tmethod.getName(); } } } } if (!method.getName().equals(getNameField().getText())) { if (usedNames.contains(getNameField().getText())) { valid = false; message = "Method name is not unique: " + getNameField().getText(); } } try { method.setName(getNameField().getText()); method.setMethodSecurity(((MethodSecurityPanel) securityContainerPanel).getMethodSecurity()); // process the inputs MethodTypeInputs inputs = new MethodTypeInputs(); MethodTypeInputsInput[] inputsA = new MethodTypeInputsInput[getInputParamTable().getRowCount()]; usedNames = new ArrayList(); for (int i = 0; i < getInputParamTable().getRowCount(); i++) { MethodTypeInputsInput input = getInputParamTable().getRowData(i); // validate the input param if (usedNames.contains(input.getName())) { valid = false; message = "Method " + method.getName() + " contains more that one parameter named " + input.getName(); } usedNames.add(input.getName()); if (!JavaUtils.isJavaId(input.getName())) { valid = false; message = "Parameter name must be a valid java identifier: Method: " + method.getName() + " param: " + input.getName(); } inputsA[i] = input; } inputs.setInput(inputsA); method.setInputs(inputs); // process exceptions MethodTypeExceptions exceptions = new MethodTypeExceptions(); MethodTypeExceptionsException[] exceptionsA = new MethodTypeExceptionsException[getExceptionsTable() .getRowCount()]; for (int i = 0; i < getExceptionsTable().getRowCount(); i++) { MethodTypeExceptionsException exception = getExceptionsTable().getRowData(i); exceptionsA[i] = exception; } exceptions.setException(exceptionsA); method.setExceptions(exceptions); // now process the output MethodTypeOutput output = getOutputTypeTable().getRowData(0); method.setOutput(output); if (getIsImportedCheckBox().isSelected() && getIsProvidedCheckBox().isSelected()) { method.setIsProvided(true); MethodTypeProviderInformation pi = new MethodTypeProviderInformation(); pi.setProviderClass(getProviderClassnameTextField().getText()); method.setProviderInformation(pi); } else { method.setIsProvided(false); } if (getIsImportedCheckBox().isSelected()) { // validate the import // make sure there are no collision problems with // namespaces or packages..... for (int i = 0; i < info.getNamespaces().getNamespace().length; i++) { NamespaceType nsType = info.getNamespaces().getNamespace(i); if (nsType.getNamespace().equals(getNamespaceTextField().getText()) && !nsType.getPackageName().equals(getPackageNameTextField().getText())) { valid = false; message = "Service Namespace is already being used and Package Name does not match : " + getPackageNameTextField().getText() + " != " + nsType.getPackageName(); } } // process the import information method.setIsImported(true); if (getIsImportedCheckBox().isSelected()) { MethodTypeImportInformation importInfo = new MethodTypeImportInformation(); importInfo.setNamespace(getNamespaceTextField().getText()); importInfo.setPortTypeName(getServiceNameTextField().getText()); importInfo.setPackageName(getPackageNameTextField().getText()); importInfo.setWsdlFile(getWsdlFileTextField().getText()); if (!getInputMessageNamespaceTextField().getText().equals("") && !getInputMessageNameTextField().getText().equals("")) { importInfo.setInputMessage(new QName(getInputMessageNamespaceTextField().getText(), getInputMessageNameTextField().getText())); } if (!getOutputMessageNamespaceTextField().getText().equals("") && !getOutputMessageNameTextField().getText().equals("")) { importInfo.setOutputMessage(new QName(getOutputMessageNamespaceTextField() .getText(), getOutputMessageNameTextField().getText())); } method.setImportInformation(importInfo); } } else { method.setIsImported(false); } } catch (Exception ex) { ex.printStackTrace(); // PortalUtils.showErrorMessage(ex); ErrorDialog.showErrorDialog(ex); } if (!valid) { JOptionPane.showMessageDialog(MethodViewer.this, message); } else { dispose(); } } }); } return doneButton; } /** * This method initializes jButton * * @return javax.swing.JButton */ private JButton getAddInputParamButton() { if (addInputParamButton == null) { addInputParamButton = new JButton(PortalLookAndFeel.getAddIcon()); addInputParamButton.setText("Add"); addInputParamButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent e) { if (getInputNamespaceTypesJTree().getCurrentNode() instanceof SchemaElementTypeTreeNode) { NamespaceType nt = ((NamespaceType) ((NamespaceTypeTreeNode) getInputNamespaceTypesJTree() .getCurrentNode().getParent()).getUserObject()); SchemaElementType st = ((SchemaElementType) ((SchemaElementTypeTreeNode) getInputNamespaceTypesJTree() .getCurrentNode()).getUserObject()); MethodTypeInputsInput input = new MethodTypeInputsInput(); input.setQName(new QName(nt.getNamespace(), st.getType())); input.setIsArray(false); input.setName(JavaUtils.xmlNameToJava(st.getType())); getInputParamTable().addRow(input); } else { JOptionPane.showMessageDialog(MethodViewer.this, "Please select a type to add"); } } }); } return addInputParamButton; } /** * This method initializes jPanel * * @return javax.swing.JPanel */ private JPanel getNamePanel() { if (namePanel == null) { GridBagConstraints gridBagConstraints110 = new GridBagConstraints(); gridBagConstraints110.gridx = 2; gridBagConstraints110.insets = new java.awt.Insets(0, 30, 0, 0); gridBagConstraints110.gridy = 0; methodLabel = new JLabel(); methodLabel.setText("Method Name"); GridBagConstraints gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints.gridheight = 2; gridBagConstraints.gridy = 0; GridBagConstraints gridBagConstraints2 = new GridBagConstraints(); namePanel = new JPanel(); namePanel.setLayout(new GridBagLayout()); namePanel.setBorder(javax.swing.BorderFactory.createTitledBorder(null, "Method Properties", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, null, PortalLookAndFeel.getPanelLabelColor())); gridBagConstraints2.gridx = 1; gridBagConstraints2.gridheight = 2; gridBagConstraints2.gridwidth = 1; gridBagConstraints2.anchor = java.awt.GridBagConstraints.CENTER; gridBagConstraints2.gridy = 0; gridBagConstraints2.weightx = 1.0D; gridBagConstraints2.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints2.insets = new java.awt.Insets(2, 2, 2, 2); namePanel.add(getNameField(), gridBagConstraints2); namePanel.add(methodLabel, gridBagConstraints); namePanel.add(getIsImportedCheckBox(), gridBagConstraints110); } return namePanel; } /** * This method initializes jTextField * * @return javax.swing.JTextField */ private JTextField getNameField() { if (nameField == null) { nameField = new JTextField(); nameField.setText(method.getName()); // nameField.setText(methodsTable.getSelectedMethodType().getName()); } return nameField; } /** * This method initializes jButton * * @return javax.swing.JButton */ private JButton getRemoveButton() { if (removeButton == null) { removeButton = new JButton(PortalLookAndFeel.getRemoveIcon()); removeButton.setText("Remove"); removeButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent e) { try { getInputParamTable().removeSelectedRow(); } catch (Exception ex) { PortalUtils.showErrorMessage("Please select an input parameter to Remove"); } } }); } return removeButton; } /** * This method initializes inputButtonPanel * * @return javax.swing.JPanel */ private JPanel getInputButtonPanel() { if (inputButtonPanel == null) { inputButtonPanel = new JPanel(); inputButtonPanel.add(getAddInputParamButton(), null); inputButtonPanel.add(getRemoveButton(), null); } return inputButtonPanel; } /** * This method initializes cancelButton * * @return javax.swing.JButton */ private JButton getCancelButton() { if (cancelButton == null) { cancelButton = new JButton(); cancelButton.setText("Cancel"); cancelButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent e) { dispose(); } }); cancelButton.setIcon(PortalLookAndFeel.getCloseIcon()); } return cancelButton; } /** * This method initializes exceptionsPanel * * @return jav gridBagConstraints50.gridy = 2; * exceptionsPanel.add(getExceptionInputPanel(), * gridBagConstraints50); gridBagConstraints49.fill = * java.awt.GridBagConstraints.BOTH; * exceptionsPanel.add(getCreateFaultPanel(), gridBagConstraints49); * gridBagConstraints49.gridy = 3; * exceptionsPanel.add(getFaultsFromTypesPanel(), * gridBagConstraints52); gridBagConstraints49.gridy = 2; * gridBagConstraints50.gridy = 3; gridBagConstraints50.fill = * java.awt.GridBagConstraints.BOTH; * exceptionsPanel.add(getCreateFaultPanel(), gridBagConstraints50); * gridBagConstraints49.fill = java.awt.GridBagConstraints.BOTH; * exceptionsPanel.add(getExceptionInputPanel(), * gridBagConstraints49); gridBagConstraints52.fill = * java.awt.GridBagConstraints.BOTH; * exceptionsPanel.add(getFaultsFromTypesPanel(), * gridBagConstraints52); * exceptionsPanel.add(getExceptionInputPanel(), * gridBagConstraints12); exceptionsPanel.add(getCreateFaultPanel(), * gridBagConstraints46); ax.swing.JPanel */ private JPanel getExceptionsPanel() { if (exceptionsPanel == null) { GridBagConstraints gridBagConstraints49 = new GridBagConstraints(); gridBagConstraints49.gridx = 1; gridBagConstraints49.gridheight = 3; gridBagConstraints49.gridy = 1; GridBagConstraints gridBagConstraints46 = new GridBagConstraints(); gridBagConstraints46.gridx = 0; gridBagConstraints46.fill = java.awt.GridBagConstraints.BOTH; gridBagConstraints46.weightx = 1.0D; gridBagConstraints46.gridy = 1; GridBagConstraints gridBagConstraints11 = new GridBagConstraints(); gridBagConstraints11.fill = GridBagConstraints.BOTH; gridBagConstraints11.gridy = 2; gridBagConstraints11.weightx = 1.0D; gridBagConstraints11.gridx = 0; GridBagConstraints gridBagConstraints12 = new GridBagConstraints(); gridBagConstraints12.fill = java.awt.GridBagConstraints.BOTH; gridBagConstraints12.gridy = 3; gridBagConstraints12.weightx = 1.0D; gridBagConstraints12.gridx = 0; GridBagConstraints gridBagConstraints3 = new GridBagConstraints(); gridBagConstraints3.fill = java.awt.GridBagConstraints.BOTH; gridBagConstraints3.weighty = 1.0; gridBagConstraints3.gridx = 0; gridBagConstraints3.gridy = 0; gridBagConstraints3.gridwidth = 2; gridBagConstraints3.weightx = 1.0; exceptionsPanel = new JPanel(); exceptionsPanel.setLayout(new GridBagLayout()); exceptionsPanel.setBorder(javax.swing.BorderFactory.createTitledBorder(null, "Faults", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, null, PortalLookAndFeel.getPanelLabelColor())); exceptionsPanel.add(getExceptionsPanelSplitPane(), gridBagConstraints3); exceptionsPanel.add(getCreateFaultPanel(), gridBagConstraints12); exceptionsPanel.add(getExceptionInputPanel(), gridBagConstraints11); exceptionsPanel.add(getFaultsFromTypesPanel(), gridBagConstraints46); exceptionsPanel.add(getRemoveFaultPanel(), gridBagConstraints49); } return exceptionsPanel; } /** * This method initializes jScrollPane * * @return javax.swing.JScrollPane */ private JScrollPane getExceptionScrollPane() { if (exceptionScrollPane == null) { exceptionScrollPane = new JScrollPane(); exceptionScrollPane.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER); exceptionScrollPane.setViewportView(getExceptionsTable()); } return exceptionScrollPane; } /** * This method initializes exceptionInputPanel * * @return javax.swing.JPanel */ private JPanel getExceptionInputPanel() { if (exceptionInputPanel == null) { GridBagConstraints gridBagConstraints4 = new GridBagConstraints(); gridBagConstraints4.insets = new Insets(2, 2, 2, 2); gridBagConstraints4.gridy = 0; gridBagConstraints4.gridx = 2; GridBagConstraints gridBagConstraints51 = new GridBagConstraints(); gridBagConstraints51.gridx = 0; gridBagConstraints51.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints51.insets = new java.awt.Insets(2, 2, 2, 10); gridBagConstraints51.gridy = 0; existingExceptionLabel = new JLabel(); existingExceptionLabel.setText("Used Faults:"); GridBagConstraints gridBagConstraints27 = new GridBagConstraints(); gridBagConstraints27.gridx = 2; gridBagConstraints27.fill = java.awt.GridBagConstraints.BOTH; gridBagConstraints27.gridheight = 2; gridBagConstraints27.gridy = 1; GridBagConstraints gridBagConstraints15 = new GridBagConstraints(); gridBagConstraints15.fill = GridBagConstraints.HORIZONTAL; gridBagConstraints15.gridx = 1; gridBagConstraints15.gridy = 0; gridBagConstraints15.weightx = 1.0; gridBagConstraints15.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints15.gridheight = 1; exceptionInputPanel = new JPanel(); exceptionInputPanel.setLayout(new GridBagLayout()); exceptionInputPanel.setBorder(javax.swing.BorderFactory.createTitledBorder(null, "Choose Used Fault", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, null, IntroduceLookAndFeel.getPanelLabelColor())); exceptionInputPanel.add(getExceptionJComboBox(), gridBagConstraints15); exceptionInputPanel.add(getExceptionsInputButtonPanel(), gridBagConstraints27); exceptionInputPanel.add(existingExceptionLabel, gridBagConstraints51); exceptionInputPanel.add(getAddExceptionButton(), gridBagConstraints4); } return exceptionInputPanel; } /** * This method initializes faultsTable * * @return javax.swing.JTable */ private ExceptionsTable getExceptionsTable() { if (exceptionsTable == null) { exceptionsTable = new ExceptionsTable(this.method, this.info.getService()); } return exceptionsTable; } /** * This method initializes addExceptionButton * * @return javax.swing.JButton */ private JButton getAddExceptionButton() { if (addExceptionButton == null) { addExceptionButton = new JButton(PortalLookAndFeel.getAddIcon()); addExceptionButton.setText("Add Used Fault"); addExceptionButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent e) { ExceptionHolder exceptionHolder = null; if (getExceptionJComboBox().getSelectedItem() != null) { exceptionHolder = (ExceptionHolder) getExceptionJComboBox().getSelectedItem(); } if (exceptionHolder != null) { // parse qname string into qname for (int i = 0; i < getExceptionsTable().getRowCount(); i++) { MethodTypeExceptionsException exception = null; try { exception = getExceptionsTable().getRowData(i); } catch (Exception e1) { e1.printStackTrace(); } if (exception != null && exception.getQname() != null && exception.getQname().equals(exceptionHolder.qname)) { JOptionPane.showMessageDialog(MethodViewer.this, "Exception (" + exceptionHolder + ") already thrown by method."); return; } } getExceptionsTable().addRow(exceptionHolder.qname, exceptionHolder.isCreated); } else { JOptionPane.showMessageDialog(MethodViewer.this, "Please select an exception first!"); } } }); } return addExceptionButton; } /** * This method initializes removeExceptionButton * * @return javax.swing.JButton */ private JButton getRemoveExceptionButton() { if (removeExceptionButton == null) { removeExceptionButton = new JButton(PortalLookAndFeel.getRemoveIcon()); removeExceptionButton.setText("Remove"); removeExceptionButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent e) { try { getExceptionsTable().removeSelectedRow(); } catch (Exception ex) { PortalUtils.showErrorMessage("Please select an exception to Remove"); } } }); } return removeExceptionButton; } /** * This method initializes tabbedPanel * * @return javax.swing.JTabbedPane */ private JTabbedPane getTabbedPanel() { if (tabbedPanel == null) { tabbedPanel = new JTabbedPane(); tabbedPanel.addTab("Method Signature", null, getMethodPanel(), null); tabbedPanel.addTab("Security", null, getSecurityContainerPanel(), null); tabbedPanel.addTab("Import Information", null, getImportInformationPanel(), null); } return tabbedPanel; } /** * This method initializes methodPanel * * @return javax.swing.JPanel */ private JPanel getMethodPanel() { if (methodPanel == null) { GridBagConstraints gridBagConstraints1 = new GridBagConstraints(); gridBagConstraints1.fill = GridBagConstraints.BOTH; gridBagConstraints1.gridx = -1; gridBagConstraints1.gridy = -1; gridBagConstraints1.weightx = 1.0; gridBagConstraints1.weighty = 1.0; gridBagConstraints1.insets = new Insets(2, 2, 2, 2); methodPanel = new JPanel(); methodPanel.setLayout(new GridBagLayout()); methodPanel.add(getConfigureTabbedPane(), gridBagConstraints1); } return methodPanel; } /** * This method initializes securityContainerPanel * * @return javax.swing.JPanel */ private JPanel getSecurityContainerPanel() { if (securityContainerPanel == null) { securityContainerPanel = new MethodSecurityPanel(info.getService().getServiceSecurity(), this.method .getMethodSecurity()); securityContainerPanel.setBorder(BorderFactory.createTitledBorder(null, "Method Level Security Configuration", TitledBorder.DEFAULT_JUSTIFICATION, TitledBorder.DEFAULT_POSITION, null, PortalLookAndFeel.getPanelLabelColor())); } return securityContainerPanel; } /** * This method initializes configureTabbedPane * * @return javax.swing.JTabbedPane */ private JTabbedPane getConfigureTabbedPane() { if (configureTabbedPane == null) { configureTabbedPane = new JTabbedPane(); configureTabbedPane.addTab("Inputs", null, getInputParamsSplitPane(), null); configureTabbedPane.addTab("Output", null, getOutputTypeSplitPane(), null); configureTabbedPane.addTab("Faults", null, getExceptionsPanel(), null); } return configureTabbedPane; } /** * This method initializes exceptionEditText * * @return javax.swing.JTextField */ private JComboBox getExceptionJComboBox() { if (exceptionJComboBox == null) { exceptionJComboBox = new JComboBox(); // populate with currently used exception names ServiceType[] service = this.info.getServices().getService(); SortedSet exceptionNameSet = new TreeSet(); for (int namespaceI = 0; namespaceI < info.getNamespaces().getNamespace().length; namespaceI++) { NamespaceType namespace = info.getNamespaces().getNamespace(namespaceI); } if (service != null) { for (int i = 0; i < service.length; i++) { MethodsType methodsType = service[i].getMethods(); if (methodsType != null) { MethodType methods[] = methodsType.getMethod(); if (methods != null) { for (int j = 0; j < methods.length; j++) { MethodTypeExceptions exceptionsType = methods[j].getExceptions(); if (exceptionsType != null) { MethodTypeExceptionsException[] exceptions = exceptionsType.getException(); if (exceptions != null) { for (int e = 0; e < exceptions.length; e++) { if (exceptions[e].getQname() != null) { exceptionNameSet .add(new ExceptionHolder(exceptions[e].getQname(), true)); } else { exceptionNameSet.add(new ExceptionHolder(new QName(info.getService() .getNamespace() + "/types", exceptions[e].getName()), false)); } } } } } } } } } for (Iterator iter = exceptionNameSet.iterator(); iter.hasNext();) { exceptionJComboBox.addItem(iter.next()); } } return exceptionJComboBox; } /** * This method initializes namespacesPanel * * @return javax.swing.JPanel */ private JPanel getInputNamespacesPanel() { if (inputNamespacesPanel == null) { GridBagConstraints gridBagConstraints19 = new GridBagConstraints(); gridBagConstraints19.fill = java.awt.GridBagConstraints.BOTH; gridBagConstraints19.weighty = 1.0; gridBagConstraints19.gridx = 0; gridBagConstraints19.gridy = 0; gridBagConstraints19.insets = new java.awt.Insets(0, 0, 0, 0); gridBagConstraints19.weightx = 1.0; inputNamespacesPanel = new JPanel(); inputNamespacesPanel.setLayout(new GridBagLayout()); inputNamespacesPanel.add(getInputNamespaceScrollPane(), gridBagConstraints19); } return inputNamespacesPanel; } /** * This method initializes namespaceScrollPane * * @return javax.swing.JScrollPane */ private JScrollPane getInputNamespaceScrollPane() { if (inputNamespaceScrollPane == null) { inputNamespaceScrollPane = new JScrollPane(); inputNamespaceScrollPane.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER); inputNamespaceScrollPane.setViewportView(getInputNamespaceTypesJTree()); } return inputNamespaceScrollPane; } /** * This method initializes namespaceTypesJTree * * @return javax.swing.JTree */ private NamespacesJTree getInputNamespaceTypesJTree() { if (inputNamespaceTypesJTree == null) { inputNamespaceTypesJTree = new NamespacesJTree(info.getNamespaces(), true); inputNamespaceTypesJTree.addMouseListener(new MouseListener() { public void mouseReleased(MouseEvent e) { // TODO Auto-generated method stub } public void mousePressed(MouseEvent e) { // TODO Auto-generated method stub } public void mouseExited(MouseEvent e) { // TODO Auto-generated method stub } public void mouseEntered(MouseEvent e) { // TODO Auto-generated method stub } public void mouseClicked(MouseEvent e) { if (e.getClickCount() == 2) { if (getInputNamespaceTypesJTree().getCurrentNode() instanceof SchemaElementTypeTreeNode) { NamespaceType nt = ((NamespaceType) ((NamespaceTypeTreeNode) getInputNamespaceTypesJTree() .getCurrentNode().getParent()).getUserObject()); SchemaElementType st = ((SchemaElementType) ((SchemaElementTypeTreeNode) getInputNamespaceTypesJTree() .getCurrentNode()).getUserObject()); MethodTypeInputsInput input = new MethodTypeInputsInput(); input.setQName(new QName(nt.getNamespace(), st.getType())); input.setIsArray(false); input.setName(JavaUtils.xmlNameToJava(st.getType())); getInputParamTable().addRow(input); } } } }); } return inputNamespaceTypesJTree; } /** * This method initializes methodPropertiesPanel * * @return javax.swing.JPanel */ private JPanel getMethodPropertiesPanel() { if (methodPropertiesPanel == null) { GridBagConstraints gridBagConstraints13 = new GridBagConstraints(); gridBagConstraints13.fill = GridBagConstraints.BOTH; gridBagConstraints13.gridx = 0; gridBagConstraints13.gridy = 0; gridBagConstraints13.weightx = 1.0D; gridBagConstraints13.weighty = 0.0D; gridBagConstraints13.insets = new Insets(2, 2, 2, 2); methodPropertiesPanel = new JPanel(); methodPropertiesPanel.setLayout(new GridBagLayout()); methodPropertiesPanel.add(getNamePanel(), gridBagConstraints13); } return methodPropertiesPanel; } /** * This method initializes jPanel * * @return javax.swing.JPanel */ private JPanel getOutputNamespacePanel() { if (outputNamespacePanel == null) { GridBagConstraints gridBagConstraints29 = new GridBagConstraints(); gridBagConstraints29.fill = java.awt.GridBagConstraints.BOTH; gridBagConstraints29.weighty = 1.0; gridBagConstraints29.gridx = 0; gridBagConstraints29.gridy = 3; gridBagConstraints29.weightx = 1.0; GridBagConstraints gridBagConstraints6 = new GridBagConstraints(); gridBagConstraints6.fill = java.awt.GridBagConstraints.BOTH; gridBagConstraints6.gridy = 0; gridBagConstraints6.gridx = 0; gridBagConstraints6.gridheight = 3; gridBagConstraints6.weighty = 1.0; gridBagConstraints6.weightx = 1.0; outputNamespacePanel = new JPanel(); outputNamespacePanel.setLayout(new GridBagLayout()); outputNamespacePanel.add(getOutputNamespacesTypeScrollPane(), gridBagConstraints6); outputNamespacePanel.add(getServicesTypeScrollPane(), gridBagConstraints29); } return outputNamespacePanel; } /** * This method initializes jScrollPane * * @return javax.swing.JScrollPane */ private JScrollPane getOutputNamespacesTypeScrollPane() { if (outputNamespacesTypeScrollPane == null) { outputNamespacesTypeScrollPane = new JScrollPane(); outputNamespacesTypeScrollPane.setBorder(javax.swing.BorderFactory.createTitledBorder(null, "Data Types", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, null, PortalLookAndFeel.getPanelLabelColor())); outputNamespacesTypeScrollPane.setViewportView(getOutputNamespacesJTree()); } return outputNamespacesTypeScrollPane; } /** * This method initializes outputNamespacesJTree * * @return javax.swing.JTree */ private NamespacesJTree getOutputNamespacesJTree() { if (outputNamespacesJTree == null) { outputNamespacesJTree = new NamespacesJTree(info.getNamespaces(), true); outputNamespacesJTree.addMouseListener(new MouseListener() { public void mouseClicked(MouseEvent e) { // TODO Auto-generated method stub if (e.getClickCount() == 2) { if (getOutputNamespacesJTree().getCurrentNode() instanceof SchemaElementTypeTreeNode) { NamespaceType nt = ((NamespaceType) ((NamespaceTypeTreeNode) getOutputNamespacesJTree() .getCurrentNode().getParent()).getUserObject()); SchemaElementType st = ((SchemaElementType) ((SchemaElementTypeTreeNode) getOutputNamespacesJTree() .getCurrentNode()).getUserObject()); MethodTypeOutput output = new MethodTypeOutput(); output.setQName(new QName(nt.getNamespace(), st.getType())); output.setIsArray(false); try { getOutputTypeTable().modifyRow(0, output); } catch (Exception ex) { ex.printStackTrace(); } } } } public void mouseEntered(MouseEvent e) { // TODO Auto-generated method stub } public void mouseExited(MouseEvent e) { // TODO Auto-generated method stub } public void mousePressed(MouseEvent e) { // TODO Auto-generated method stub } public void mouseReleased(MouseEvent e) { // TODO Auto-generated method stub } }); } return outputNamespacesJTree; } /** * This method initializes outputTypesTablePanel * * @return javax.swing.JPanel */ private JPanel getOutputTypesTablePanel() { if (outputTypesTablePanel == null) { GridBagConstraints gridBagConstraints26 = new GridBagConstraints(); gridBagConstraints26.gridx = 0; gridBagConstraints26.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints26.gridy = 0; GridBagConstraints gridBagConstraints22 = new GridBagConstraints(); gridBagConstraints22.fill = java.awt.GridBagConstraints.BOTH; gridBagConstraints22.gridx = 0; gridBagConstraints22.gridy = 1; gridBagConstraints22.weightx = 1.0; gridBagConstraints22.weighty = 1.0; gridBagConstraints22.gridwidth = 3; gridBagConstraints22.insets = new java.awt.Insets(2, 2, 2, 2); outputTypesTablePanel = new JPanel(); outputTypesTablePanel.setLayout(new GridBagLayout()); outputTypesTablePanel.add(getOutputTypejScrollPane(), gridBagConstraints22); outputTypesTablePanel.add(getClearOutputTypeButton(), gridBagConstraints26); } return outputTypesTablePanel; } /** * This method initializes inputTypesTablePanel * * @return javax.swing.JPanel */ private JPanel getInputTypesTablePanel() { if (inputTypesTablePanel == null) { GridBagConstraints gridBagConstraints18 = new GridBagConstraints(); gridBagConstraints18.gridx = 1; gridBagConstraints18.fill = java.awt.GridBagConstraints.VERTICAL; gridBagConstraints18.gridy = 0; GridBagConstraints gridBagConstraints14 = new GridBagConstraints(); gridBagConstraints14.fill = GridBagConstraints.HORIZONTAL; gridBagConstraints14.gridwidth = 2; gridBagConstraints14.gridx = 0; gridBagConstraints14.gridy = 1; gridBagConstraints14.weightx = 0.0D; gridBagConstraints14.weighty = 0.0D; gridBagConstraints14.insets = new Insets(2, 2, 2, 2); GridBagConstraints gridBagConstraints21 = new GridBagConstraints(); gridBagConstraints21.fill = java.awt.GridBagConstraints.BOTH; gridBagConstraints21.gridx = 0; gridBagConstraints21.gridy = 0; gridBagConstraints21.weightx = 1.0; gridBagConstraints21.weighty = 1.0; gridBagConstraints21.insets = new java.awt.Insets(0, 0, 0, 0); inputTypesTablePanel = new JPanel(); inputTypesTablePanel.setLayout(new GridBagLayout()); inputTypesTablePanel.add(getInputParamScrollPanel(), gridBagConstraints21); inputTypesTablePanel.add(getInputButtonPanel(), gridBagConstraints14); inputTypesTablePanel.add(getInputTableControlsPanel(), gridBagConstraints18); } return inputTypesTablePanel; } /** * This method initializes inputTableControlsPanel1 * * @return javax.swing.JPanel */ private JPanel getInputTableControlsPanel() { if (inputTableControlsPanel == null) { GridBagConstraints gridBagConstraints25 = new GridBagConstraints(); gridBagConstraints25.gridx = 0; gridBagConstraints25.gridy = 0; GridBagConstraints gridBagConstraints24 = new GridBagConstraints(); gridBagConstraints24.gridx = 0; gridBagConstraints24.gridy = 1; downLabel = new JLabel(); downLabel.setText(""); downLabel.setIcon(IntroduceLookAndFeel.getDownIcon()); downLabel.addMouseListener(new MouseAdapter() { public void mouseClicked(MouseEvent e) { super.mouseClicked(e); try { getInputParamTable().moveSelectedRowDown(); } catch (Exception e1) { e1.printStackTrace(); } } }); upLabel = new JLabel(); upLabel.setText(""); upLabel.setIcon(IntroduceLookAndFeel.getUpIcon()); upLabel.addMouseListener(new MouseAdapter() { public void mouseClicked(MouseEvent e) { super.mouseClicked(e); try { getInputParamTable().moveSelectedRowUp(); } catch (Exception e1) { e1.printStackTrace(); } } }); inputTableControlsPanel = new JPanel(); inputTableControlsPanel.setLayout(new GridBagLayout()); inputTableControlsPanel.add(upLabel, gridBagConstraints25); inputTableControlsPanel.add(downLabel, gridBagConstraints24); } return inputTableControlsPanel; } /** * This method initializes clearOutputTypeButton * * @return javax.swing.JButton */ private JButton getClearOutputTypeButton() { if (clearOutputTypeButton == null) { clearOutputTypeButton = new JButton(); clearOutputTypeButton.setText("Clear Output Type"); clearOutputTypeButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent e) { MethodTypeOutput output = new MethodTypeOutput(); output.setQName(new QName("", "void")); try { getOutputTypeTable().modifyRow(0, output); } catch (Exception e1) { e1.printStackTrace(); } } }); } return clearOutputTypeButton; } /** * This method initializes exceptionsInputButtonPanel * * @return javax.swing.JPanel */ private JPanel getExceptionsInputButtonPanel() { if (exceptionsInputButtonPanel == null) { exceptionsInputButtonPanel = new JPanel(); exceptionsInputButtonPanel.setLayout(new GridBagLayout()); } return exceptionsInputButtonPanel; } /** * Yabba dabba do * * @return */ private JPanel getImportInformationPanel() { if (importInformationPanel == null) { GridBagConstraints gridBagConstraints7 = new GridBagConstraints(); gridBagConstraints7.gridy = 5; gridBagConstraints7.fill = java.awt.GridBagConstraints.BOTH; gridBagConstraints7.gridwidth = 2; GridBagConstraints gridBagConstraints36 = new GridBagConstraints(); gridBagConstraints36.gridx = 0; gridBagConstraints36.fill = java.awt.GridBagConstraints.BOTH; gridBagConstraints36.gridwidth = 2; gridBagConstraints36.gridy = 6; GridBagConstraints gridBagConstraints23 = new GridBagConstraints(); gridBagConstraints23.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints23.gridy = 0; gridBagConstraints23.gridx = 1; GridBagConstraints gridBagConstraints41 = new GridBagConstraints(); gridBagConstraints41.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints41.gridy = 1; gridBagConstraints41.weightx = 1.0; gridBagConstraints41.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints41.gridx = 1; GridBagConstraints gridBagConstraints40 = new GridBagConstraints(); gridBagConstraints40.gridx = 0; gridBagConstraints40.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints40.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints40.gridy = 1; packageNameLabel = new JLabel(); packageNameLabel.setText("Package Name"); GridBagConstraints gridBagConstraints35 = new GridBagConstraints(); gridBagConstraints35.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints35.gridy = 4; gridBagConstraints35.weightx = 1.0; gridBagConstraints35.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints35.gridx = 1; GridBagConstraints gridBagConstraints33 = new GridBagConstraints(); gridBagConstraints33.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints33.gridy = 2; gridBagConstraints33.weightx = 1.0; gridBagConstraints33.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints33.gridx = 1; GridBagConstraints gridBagConstraints32 = new GridBagConstraints(); gridBagConstraints32.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints32.gridy = 0; gridBagConstraints32.weightx = 1.0; gridBagConstraints32.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints32.gridx = 1; GridBagConstraints gridBagConstraints31 = new GridBagConstraints(); gridBagConstraints31.gridx = 0; gridBagConstraints31.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints31.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints31.gridy = 0; namespaceLabel = new JLabel(); namespaceLabel.setText("Namepace"); GridBagConstraints gridBagConstraints30 = new GridBagConstraints(); gridBagConstraints30.gridx = 0; gridBagConstraints30.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints30.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints30.gridy = 4; wsdlFileLabel = new JLabel(); wsdlFileLabel.setText("WSDL File"); GridBagConstraints gridBagConstraints28 = new GridBagConstraints(); gridBagConstraints28.gridx = 0; gridBagConstraints28.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints28.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints28.gridy = 2; serviceName = new JLabel(); serviceName.setText("PortType"); importInformationPanel = new JPanel(); importInformationPanel.setLayout(new GridBagLayout()); importInformationPanel.add(serviceName, gridBagConstraints28); importInformationPanel.add(wsdlFileLabel, gridBagConstraints30); importInformationPanel.add(namespaceLabel, gridBagConstraints31); importInformationPanel.add(getNamespaceTextField(), gridBagConstraints32); importInformationPanel.add(getServiceNameTextField(), gridBagConstraints33); importInformationPanel.add(getWsdlFileTextField(), gridBagConstraints35); importInformationPanel.add(packageNameLabel, gridBagConstraints40); importInformationPanel.add(getPackageNameTextField(), gridBagConstraints41); importInformationPanel.add(getMessagePanel(), gridBagConstraints7); importInformationPanel.add(getProviderInformationPanel(), gridBagConstraints36); } return importInformationPanel; } /** * This method initializes namespaceTextField * * @return javax.swing.JTextField */ private JTextField getNamespaceTextField() { if (namespaceTextField == null) { namespaceTextField = new JTextField(); if (method.getImportInformation() != null) { namespaceTextField.setText(method.getImportInformation().getNamespace()); } } return namespaceTextField; } /** * This method initializes serviceNameTextField * * @return javax.swing.JTextField */ private JTextField getServiceNameTextField() { if (serviceNameTextField == null) { serviceNameTextField = new JTextField(); if (method.getImportInformation() != null) { serviceNameTextField.setText(method.getImportInformation().getPortTypeName()); } } return serviceNameTextField; } /** * This method initializes wsdlFileTextField * * @return javax.swing.JTextField */ private JTextField getWsdlFileTextField() { if (wsdlFileTextField == null) { wsdlFileTextField = new JTextField(); if (method.getImportInformation() != null) { wsdlFileTextField.setText(method.getImportInformation().getWsdlFile()); } } return wsdlFileTextField; } /** * This method initializes isImportedCheckBox * * @return javax.swing.JCheckBox */ private JCheckBox getIsImportedCheckBox() { if (isImportedCheckBox == null) { isImportedCheckBox = new JCheckBox(); isImportedCheckBox.setText("imported"); isImportedCheckBox.setSelected(method.isIsImported()); if (isImportedCheckBox.isSelected()) { getTabbedPanel().setEnabledAt(2, true); } else { getTabbedPanel().setEnabledAt(2, false); if (getTabbedPanel().getSelectedIndex() == 2) { getTabbedPanel().setSelectedIndex(0); } } isImportedCheckBox.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { if (isImportedCheckBox.isSelected()) { getTabbedPanel().setEnabledAt(2, true); } else { getTabbedPanel().setEnabledAt(2, false); if (getTabbedPanel().getSelectedIndex() == 2) { getTabbedPanel().setSelectedIndex(0); } } } }); } return isImportedCheckBox; } /** * This method initializes packageNameTextField * * @return javax.swing.JTextField */ private JTextField getPackageNameTextField() { if (packageNameTextField == null) { packageNameTextField = new JTextField(); if (method.getImportInformation() != null) { packageNameTextField.setText(method.getImportInformation().getPackageName()); } } return packageNameTextField; } /** * This method initializes isProvidedCheckBox * * @return javax.swing.JCheckBox */ private JCheckBox getIsProvidedCheckBox() { if (isProvidedCheckBox == null) { isProvidedCheckBox = new JCheckBox(); isProvidedCheckBox.setText("Provided"); getProviderClassnameTextField().setEnabled(false); getProviderClassnameTextField().setEditable(false); isProvidedCheckBox.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { if (isProvidedCheckBox.isSelected()) { getProviderClassnameTextField().setEnabled(true); getProviderClassnameTextField().setEditable(true); } else { getProviderClassnameTextField().setEnabled(false); getProviderClassnameTextField().setEditable(false); } } }); } isProvidedCheckBox.setSelected(method.isIsProvided()); return isProvidedCheckBox; } /** * This method initializes servicesTypeScrollPane * * @return javax.swing.JScrollPane */ private JScrollPane getServicesTypeScrollPane() { if (servicesTypeScrollPane == null) { servicesTypeScrollPane = new JScrollPane(); servicesTypeScrollPane.setBorder(javax.swing.BorderFactory.createTitledBorder(null, "Client Handle Types", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, null, PortalLookAndFeel.getPanelLabelColor())); servicesTypeScrollPane.setViewportView(getServicesTypeTable()); } return servicesTypeScrollPane; } /** * This method initializes servicesTypeTable * * @return javax.swing.JTable */ private ServicesTable getServicesTypeTable() { if (servicesTypeTable == null) { servicesTypeTable = new ServicesTable(info.getServices()); servicesTypeTable.addMouseListener(new MouseAdapter() { public void mouseClicked(MouseEvent e) { super.mouseClicked(e); if (e.getClickCount() == 2) { // set the epr type as this outputType MethodTypeOutput output = new MethodTypeOutput(); try { output.setQName(new QName(getServicesTypeTable().getSelectedRowData().getNamespace() + "/types", getServicesTypeTable().getSelectedRowData().getName() + "Reference")); output.setIsArray(false); output.setIsClientHandle(new Boolean(true)); output.setClientHandleClass(getServicesTypeTable().getSelectedRowData().getPackageName() + "." + "client" + "." + getServicesTypeTable().getSelectedRowData().getName() + "Client"); } catch (Exception e1) { // TODO Auto-generated catch block e1.printStackTrace(); } try { getOutputTypeTable().modifyRow(0, output); } catch (Exception ex) { ex.printStackTrace(); } } } }); } return servicesTypeTable; } /** * This method initializes providerClassnameTextField * * @return javax.swing.JTextField */ private JTextField getProviderClassnameTextField() { if (providerClassnameTextField == null) { providerClassnameTextField = new JTextField(); if (method.getProviderInformation() != null && method.getProviderInformation().getProviderClass() != null) { providerClassnameTextField.setText(method.getProviderInformation().getProviderClass()); } } return providerClassnameTextField; } /** * This method initializes providerInformationPanel * * @return javax.swing.JPanel */ private JPanel getProviderInformationPanel() { if (providerInformationPanel == null) { GridBagConstraints gridBagConstraints34 = new GridBagConstraints(); gridBagConstraints34.gridx = 0; gridBagConstraints34.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints34.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints34.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints34.gridy = 1; GridBagConstraints gridBagConstraints37 = new GridBagConstraints(); gridBagConstraints37.gridx = 1; gridBagConstraints37.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints37.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints37.weightx = 1.0D; gridBagConstraints37.gridy = 1; GridBagConstraints gridBagConstraints38 = new GridBagConstraints(); gridBagConstraints38.gridx = 0; gridBagConstraints38.gridwidth = 2; gridBagConstraints38.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints38.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints38.anchor = java.awt.GridBagConstraints.CENTER; gridBagConstraints38.weightx = 0.0D; gridBagConstraints38.gridy = 0; providerClassnameLabel = new JLabel(); providerClassnameLabel.setText("Provider Classname"); providerInformationPanel = new JPanel(); providerInformationPanel.setLayout(new GridBagLayout()); providerInformationPanel.setBorder(javax.swing.BorderFactory.createTitledBorder(null, "Provider Information", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, null, IntroduceLookAndFeel.getPanelLabelColor())); providerInformationPanel.add(getIsProvidedCheckBox(), gridBagConstraints38); providerInformationPanel.add(providerClassnameLabel, gridBagConstraints34); providerInformationPanel.add(getProviderClassnameTextField(), gridBagConstraints37); } return providerInformationPanel; } /** * This method initializes jSplitPane * * @return javax.swing.JSplitPane */ private JSplitPane getInputParamsSplitPane() { if (inputParamsSplitPane == null) { inputParamsSplitPane = new JSplitPane(); inputParamsSplitPane.setSize(new java.awt.Dimension(173, 68)); inputParamsSplitPane.setOneTouchExpandable(true); inputParamsSplitPane.setLeftComponent(getInputNamespacesPanel()); inputParamsSplitPane.setRightComponent(getInputTypesTablePanel()); inputParamsSplitPane.setBorder(javax.swing.BorderFactory.createTitledBorder(null, "Input Parameters", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, null, PortalLookAndFeel.getPanelLabelColor())); } return inputParamsSplitPane; } /** * This method initializes jSplitPane * * @return javax.swing.JSplitPane */ private JSplitPane getOutputTypeSplitPane() { if (outputTypeSplitPane == null) { outputTypeSplitPane = new JSplitPane(); outputTypeSplitPane.setSize(new java.awt.Dimension(192, 90)); outputTypeSplitPane.setOneTouchExpandable(true); outputTypeSplitPane.setLeftComponent(getOutputNamespacePanel()); outputTypeSplitPane.setRightComponent(getOutputTypesTablePanel()); outputTypeSplitPane.setBorder(javax.swing.BorderFactory.createTitledBorder(null, "Output Type", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, null, PortalLookAndFeel.getPanelLabelColor())); } return outputTypeSplitPane; } /** * This method initializes messagePanel * * @return javax.swing.JPanel */ private JPanel getMessagePanel() { if (messagePanel == null) { GridBagConstraints gridBagConstraints44 = new GridBagConstraints(); gridBagConstraints44.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints44.gridy = 2; gridBagConstraints44.weightx = 1.0; gridBagConstraints44.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints44.gridx = 2; GridBagConstraints gridBagConstraints43 = new GridBagConstraints(); gridBagConstraints43.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints43.gridy = 1; gridBagConstraints43.weightx = 1.0; gridBagConstraints43.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints43.gridx = 2; GridBagConstraints gridBagConstraints42 = new GridBagConstraints(); gridBagConstraints42.gridx = 0; gridBagConstraints42.gridy = 0; GridBagConstraints gridBagConstraints39 = new GridBagConstraints(); gridBagConstraints39.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints39.gridy = 2; gridBagConstraints39.weightx = 1.0; gridBagConstraints39.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints39.gridx = 1; GridBagConstraints gridBagConstraints17 = new GridBagConstraints(); gridBagConstraints17.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints17.gridy = 1; gridBagConstraints17.weightx = 1.0; gridBagConstraints17.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints17.gridx = 1; GridBagConstraints gridBagConstraints16 = new GridBagConstraints(); gridBagConstraints16.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints16.gridy = 2; gridBagConstraints16.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints16.gridx = 0; GridBagConstraints gridBagConstraints8 = new GridBagConstraints(); gridBagConstraints8.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints8.gridy = 1; gridBagConstraints8.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints8.gridx = 0; outputLabel = new JLabel(); outputLabel.setText("Output Message QName"); inputLabel = new JLabel(); inputLabel.setText("Input Message QName"); messagePanel = new JPanel(); messagePanel.setLayout(new GridBagLayout()); messagePanel.setBorder(javax.swing.BorderFactory.createTitledBorder(null, "Messages", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, null, IntroduceLookAndFeel.getPanelLabelColor())); messagePanel.add(inputLabel, gridBagConstraints8); messagePanel.add(outputLabel, gridBagConstraints16); messagePanel.add(getInputMessageNamespaceTextField(), gridBagConstraints17); messagePanel.add(getOutputMessageNamespaceTextField(), gridBagConstraints39); messagePanel.add(getMessagesCheckBox(), gridBagConstraints42); messagePanel.add(getInputMessageNameTextField(), gridBagConstraints43); messagePanel.add(getOutputMessageNameTextField(), gridBagConstraints44); } return messagePanel; } /** * This method initializes inputMessageTextField * * @return javax.swing.JTextField */ private JTextField getInputMessageNamespaceTextField() { if (inputMessageNamespaceTextField == null) { inputMessageNamespaceTextField = new JTextField(); if (method.isIsImported()) { inputMessageNamespaceTextField.setText(method.getImportInformation().getInputMessage() .getNamespaceURI()); } } return inputMessageNamespaceTextField; } /** * This method initializes ouputMessageTextField * * @return javax.swing.JTextField */ private JTextField getOutputMessageNamespaceTextField() { if (outputMessageNamespaceTextField == null) { outputMessageNamespaceTextField = new JTextField(); if (method.isIsImported()) { outputMessageNamespaceTextField.setText(method.getImportInformation().getOutputMessage() .getNamespaceURI()); } } return outputMessageNamespaceTextField; } /** * This method initializes messagesCheckBox * * @return javax.swing.JCheckBox */ private JCheckBox getMessagesCheckBox() { if (messagesCheckBox == null) { messagesCheckBox = new JCheckBox(); messagesCheckBox.setText("customize message imports"); messagesCheckBox.addItemListener(new java.awt.event.ItemListener() { public void itemStateChanged(java.awt.event.ItemEvent e) { if (messagesCheckBox.isSelected()) { getInputMessageNamespaceTextField().setEnabled(true); getInputMessageNamespaceTextField().setEditable(true); getInputMessageNameTextField().setEnabled(true); getInputMessageNameTextField().setEditable(true); getOutputMessageNamespaceTextField().setEnabled(true); getOutputMessageNamespaceTextField().setEditable(true); getOutputMessageNameTextField().setEnabled(true); getOutputMessageNameTextField().setEditable(true); } else { getInputMessageNamespaceTextField().setEnabled(false); getInputMessageNamespaceTextField().setEditable(false); getInputMessageNameTextField().setEnabled(false); getInputMessageNameTextField().setEditable(false); getOutputMessageNamespaceTextField().setEnabled(false); getOutputMessageNamespaceTextField().setEditable(false); getOutputMessageNameTextField().setEnabled(false); getOutputMessageNameTextField().setEditable(false); } } }); getInputMessageNamespaceTextField().setEnabled(false); getInputMessageNamespaceTextField().setEditable(false); getInputMessageNameTextField().setEnabled(false); getInputMessageNameTextField().setEditable(false); getOutputMessageNamespaceTextField().setEnabled(false); getOutputMessageNamespaceTextField().setEditable(false); getOutputMessageNameTextField().setEnabled(false); getOutputMessageNameTextField().setEditable(false); } return messagesCheckBox; } /** * This method initializes inputMessageNameTextField * * @return javax.swing.JTextField */ private JTextField getInputMessageNameTextField() { if (inputMessageNameTextField == null) { inputMessageNameTextField = new JTextField(); if (method.isIsImported()) { inputMessageNameTextField.setText(method.getImportInformation().getInputMessage().getLocalPart()); } } return inputMessageNameTextField; } /** * This method initializes outputMessageNameTextField * * @return javax.swing.JTextField */ private JTextField getOutputMessageNameTextField() { if (outputMessageNameTextField == null) { outputMessageNameTextField = new JTextField(); if (method.isIsImported()) { outputMessageNameTextField.setText(method.getImportInformation().getOutputMessage().getLocalPart()); } } return outputMessageNameTextField; } /** * This method initializes createFaultPanel * * @return javax.swing.JPanel */ private JPanel getCreateFaultPanel() { if (createFaultPanel == null) { GridBagConstraints gridBagConstraints48 = new GridBagConstraints(); gridBagConstraints48.gridx = 1; gridBagConstraints48.gridy = 0; faultTypeNameLabel = new JLabel(); faultTypeNameLabel.setText("Fault Type Name:"); GridBagConstraints gridBagConstraints47 = new GridBagConstraints(); gridBagConstraints47.gridx = 3; gridBagConstraints47.gridy = 0; GridBagConstraints gridBagConstraints45 = new GridBagConstraints(); gridBagConstraints45.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints45.gridx = 2; gridBagConstraints45.gridy = 0; gridBagConstraints45.weightx = 1.0; gridBagConstraints45.insets = new java.awt.Insets(5, 5, 5, 5); createFaultPanel = new JPanel(); createFaultPanel.setLayout(new GridBagLayout()); createFaultPanel.setBorder(javax.swing.BorderFactory.createTitledBorder(null, "Create New Service Faults", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, null, IntroduceLookAndFeel.getPanelLabelColor())); createFaultPanel.add(getNewFaultNameTextField(), gridBagConstraints45); createFaultPanel.add(getCreateFaultButton(), gridBagConstraints47); createFaultPanel.add(faultTypeNameLabel, gridBagConstraints48); } return createFaultPanel; } /** * This method initializes newFaultNameTextField * * @return javax.swing.JTextField */ private JTextField getNewFaultNameTextField() { if (newFaultNameTextField == null) { newFaultNameTextField = new JTextField(); } return newFaultNameTextField; } /** * This method initializes createFaultButton * * @return javax.swing.JButton */ private JButton getCreateFaultButton() { if (createFaultButton == null) { createFaultButton = new JButton(IntroduceLookAndFeel.getAddIcon()); createFaultButton.setText("Add New Fault"); createFaultButton .setToolTipText("Creates a new fault under this services types namespace and adds it to the list of available faults."); createFaultButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent e) { QName exceptionQName = null; if (CommonTools.isValidServiceName(getNewFaultNameTextField().getText())) { exceptionQName = new QName(info.getService().getNamespace() + "/types", getNewFaultNameTextField().getText()); } else { JOptionPane.showMessageDialog(MethodViewer.this, "Invalid Exception Name(" + getNewFaultNameTextField() + "): Exception must be a valid java indentifier."); return; } ExceptionHolder holder = new ExceptionHolder(exceptionQName, false); getExceptionJComboBox().addItem(holder); getExceptionsTable().addRow(holder.qname, holder.isCreated); } }); } return createFaultButton; } /** * This method initializes exceptionsPanelSplitPane * * @return javax.swing.JSplitPane */ private JSplitPane getExceptionsPanelSplitPane() { if (exceptionsPanelSplitPane == null) { exceptionsPanelSplitPane = new JSplitPane(); exceptionsPanelSplitPane.setRightComponent(getExceptionScrollPane()); exceptionsPanelSplitPane.setLeftComponent(getExceptionNamespacesScrollPane()); exceptionsPanelSplitPane.setDividerLocation(0.40); } return exceptionsPanelSplitPane; } /** * This method initializes exceptionNamespacesScrollPane * * @return javax.swing.JScrollPane */ private JScrollPane getExceptionNamespacesScrollPane() { if (exceptionNamespacesScrollPane == null) { exceptionNamespacesScrollPane = new JScrollPane(); exceptionNamespacesScrollPane.setViewportView(getNamespacesJTree()); } return exceptionNamespacesScrollPane; } /** * This method initializes namespacesJTree * * @return javax.swing.JTree */ private NamespacesJTree getNamespacesJTree() { if (namespacesJTree == null) { namespacesJTree = new NamespacesJTree(info.getNamespaces(), false); } return namespacesJTree; } /** * This method initializes faultsFromTypesPanel * * @return javax.swing.JPanel */ private JPanel getFaultsFromTypesPanel() { if (faultsFromTypesPanel == null) { faultsFromTypesPanel = new JPanel(); faultsFromTypesPanel.setLayout(new GridBagLayout()); faultsFromTypesPanel.setBorder(javax.swing.BorderFactory.createTitledBorder(null, "Add Fault From Types", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, null, IntroduceLookAndFeel.getPanelLabelColor())); faultsFromTypesPanel.add(getAddFaultFromTypeButton(), new GridBagConstraints()); } return faultsFromTypesPanel; } /** * This method initializes addFaultFromTypeButton * * @return javax.swing.JButton */ private JButton getAddFaultFromTypeButton() { if (addFaultFromTypeButton == null) { addFaultFromTypeButton = new JButton(IntroduceLookAndFeel.getAddIcon()); addFaultFromTypeButton.setText("Add From Type"); addFaultFromTypeButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent e) { if (getNamespacesJTree().getCurrentNode() instanceof SchemaElementTypeTreeNode) { NamespaceType nt = ((NamespaceType) ((NamespaceTypeTreeNode) getNamespacesJTree() .getCurrentNode().getParent()).getUserObject()); SchemaElementType st = ((SchemaElementType) ((SchemaElementTypeTreeNode) getNamespacesJTree() .getCurrentNode()).getUserObject()); if (CommonTools.validateIsFaultType(nt, st, new File(info.getBaseDirectory().getAbsolutePath() + File.separator + "schema" + File.separator + info.getServices().getService(0).getName()))) { QName qname = new QName(nt.getNamespace(), st.getType()); ExceptionHolder holder = new ExceptionHolder(qname, true); getExceptionJComboBox().addItem(holder); getExceptionsTable().addRow(holder.qname, holder.isCreated); } else { JOptionPane.showMessageDialog(MethodViewer.this, "Type does not appear to extend from {" + IntroduceConstants.BASEFAULTS_NAMESPACE + "}BaseFaultType"); } } else { JOptionPane.showMessageDialog(MethodViewer.this, "Please select a type to add"); } } }); } return addFaultFromTypeButton; } /** * This method initializes removeFaultPanel * * @return javax.swing.JPanel */ private JPanel getRemoveFaultPanel() { if (removeFaultPanel == null) { GridBagConstraints gridBagConstraints5 = new GridBagConstraints(); gridBagConstraints5.insets = new Insets(2, 2, 2, 2); gridBagConstraints5.gridx = -1; gridBagConstraints5.gridy = -1; gridBagConstraints5.gridheight = 2; removeFaultPanel = new JPanel(); removeFaultPanel.setLayout(new GridBagLayout()); removeFaultPanel.add(getRemoveExceptionButton(), gridBagConstraints5); } return removeFaultPanel; } } // @jve:decl-index=0:visual-constraint="4,12"
*** empty log message ***
cagrid-1-0/caGrid/projects/introduce/src/java/Portal/gov/nih/nci/cagrid/introduce/portal/modification/services/methods/MethodViewer.java
*** empty log message ***
Java
bsd-3-clause
375df269c709ab7f52ad61d45b46ca250d269424
0
inepex/ineform,inepex/ineform
package com.inepex.ineForm.client.form; import java.util.ArrayList; import java.util.List; import com.google.gwt.event.shared.EventHandler; import com.google.gwt.event.shared.HandlerRegistration; import com.google.gwt.event.shared.SimpleEventBus; import com.google.gwt.user.client.ui.Widget; import com.google.inject.Inject; import com.google.inject.assistedinject.Assisted; import com.inepex.ineForm.client.form.events.AfterUnsuccessfulSaveEvent; import com.inepex.ineForm.client.form.events.BeforeCancelEvent; import com.inepex.ineForm.client.form.events.BeforeSaveEvent; import com.inepex.ineForm.client.form.events.CancelledEvent; import com.inepex.ineForm.client.form.events.DeletedEvent; import com.inepex.ineForm.client.form.events.FormLifecycleEventBase; import com.inepex.ineForm.client.form.events.SavedEvent; import com.inepex.ineForm.client.form.formunits.AbstractFormUnit; import com.inepex.ineForm.client.form.widgets.customkvo.CustomKVOFW; import com.inepex.ineForm.client.i18n.IneFormI18n; import com.inepex.ineForm.client.table.IneDataConnector; import com.inepex.ineForm.client.table.IneDataConnector.ManipulateResultCallback; import com.inepex.ineom.shared.IFConsts; import com.inepex.ineom.shared.assistedobject.AssistedObject; import com.inepex.ineom.shared.descriptor.CustomKVOObjectDesc; import com.inepex.ineom.shared.descriptor.ValidatorDesc; import com.inepex.ineom.shared.dispatch.interfaces.ObjectManipulationResult; import com.inepex.ineom.shared.validation.ValidationResult; /** * A form that supports saving a new KeyValueObject and saving the modifications * of an existing one. It needs an IneDataConnector that implements the saving. * This form also fires following FormLifeCycle events:<br/> * <br/> * - {@link BeforeSaveEvent}<br/> * - {@link SavedEvent}<br/> * - {@link BeforeCancelEvent}<br/> * - {@link CancelledEvent}<br/> * <br/> * The form also validates before saving according to the default {@link ValidatorDesc}.<br/> * * @author istvanszoboszlai * */ public class SaveCancelForm extends IneForm implements SaveCancelFormView.Delegate { private class ManipulateCallback implements ManipulateResultCallback { @Override public void onManipulationResult(com.inepex.ineom.shared.dispatch.interfaces.ObjectManipulationResult result) { dealValidationResult(result.getValidationResult()); if(result.getValidationResult() == null) { if (result.getObjectsNewState() != null) { setInitialData(result.getObjectsNewState()); } fireSavedEvent(result); } else { fireAfterUnsuccesfulSaveEvent(result); } } } private class DeleteCallback implements ManipulateResultCallback { private Long id; public DeleteCallback(Long id) { super(); this.id = id; } @Override public void onManipulationResult(ObjectManipulationResult result) { fireDeletedEvent(id); } } // this flag indicates whether we want to display an error message // when the save button is clicked, but there were no modifications private boolean displayNothingToSaveMsg = true; protected final SimpleEventBus ineformEventbus = new SimpleEventBus(); private IneDataConnector ineDataConnector; private AssistedObject originalData; private AssistedObject kvo; public enum ValidateMode { ALL, PARTIAL, NONE; } protected SaveCancelFormView view; /** * @param ineDataConnector * @param descriptorName * @param valueRangeProvider * @param formRenderDescName should be DescriptorStore.DEFAULT_DESC_KEY or null if default descriptor needed * @param eventBus * @param view set it to null to use default view */ @Inject public SaveCancelForm(FormContext formCtx, @Assisted("dn") String descriptorName, @Assisted("frdn") String formRDescName, @Assisted IneDataConnector ineDataConnector, @Assisted SaveCancelFormView view) { super(formCtx, descriptorName, formRDescName); this.ineDataConnector = ineDataConnector; if (view == null) this.view = new DefaultSaveCancelFormView(); else this.view = view; this.view.setDelegate(this); } public IneDataConnector getIneDataConnector() { return ineDataConnector; } public void setSaveButtonVisible(boolean visible) { view.setSaveButtonVisible(visible); } public void setCancelButtonVisible(boolean visible) { view.setCancelButtonVisible(visible); } @Override public Widget asWidget() { return view.asWidget(); } @Override public void renderForm() { view.setFormWidget(super.asWidget()); super.renderForm(); } public void setSaveButtonText(String saveButtonText) { view.setSaveButtonText(saveButtonText); } public void setCancelButtonText(String cancelButtonText) { view.setCancelButtonText(cancelButtonText); } public void save(){ originalData = getInitialOrEmptyData(); kvo = getValues(originalData.clone()); if (fireBeforeSaveEvent(kvo).isCancelled()) return; doSave(); } public void doSave(){ boolean isFormValid = doValidate(kvo).isValid(); view.setFormValidationSuccess(isFormValid); if (!isFormValid){ return; } // Send only the changes to the server AssistedObject difference = handlerFactory.createHandler(kvo).getDifference( handlerFactory.createHandler(originalData)).getAssistedObject(); if (difference.getKeys().size() == 0 || difference.getKeys().size() == 1 && difference.getKeys().get(0).equals(IFConsts.KEY_ID)) { if(displayNothingToSaveMsg){ ValidationResult vr = new ValidationResult(); vr.addGeneralError(IneFormI18n.validationNothingToSave()); dealValidationResult(vr); }else cancelClicked(); return; } List<CustomKVOObjectDesc> descs = new ArrayList<CustomKVOObjectDesc>(); for(AbstractFormUnit unit : getRootPanelWidget().getFormUnits()) { for(String s : unit.getFormWidgetKeySet()) { if(!(unit.getWidgetByKey(s) instanceof CustomKVOFW)) continue; descs.add(((CustomKVOFW)unit.getWidgetByKey(s)).getOdFromRows()); } } ineDataConnector.objectCreateOrEditRequested(difference, new ManipulateCallback(), descs.size()>0 ? descs.toArray(new CustomKVOObjectDesc[descs.size()]) : null); } public HandlerRegistration addBeforeSaveHandler(BeforeSaveEvent.Handler handler) { return ineformEventbus.addHandler(BeforeSaveEvent.getType(), handler); } public HandlerRegistration addSavedHandler(SavedEvent.Handler handler) { return ineformEventbus.addHandler(SavedEvent.getType(), handler); } public HandlerRegistration addBeforeCancelHandler(BeforeCancelEvent.Handler handler) { return ineformEventbus.addHandler(BeforeCancelEvent.getType(), handler); } public HandlerRegistration addCancelledHandler(CancelledEvent.Handler handler) { return ineformEventbus.addHandler(CancelledEvent.getType(), handler); } public HandlerRegistration addAfterUnsuccesfulSaveHandler(AfterUnsuccessfulSaveEvent.Handler handler) { return ineformEventbus.addHandler(AfterUnsuccessfulSaveEvent.getType(), handler); } public HandlerRegistration addDeletedHandler(DeletedEvent.Handler handler) { return ineformEventbus.addHandler(DeletedEvent.getType(), handler); } public BeforeCancelEvent fireBeforeCancelEvent() { return doFireEvent(new BeforeCancelEvent()); } public CancelledEvent fireCancelledEvent() { return doFireEvent(new CancelledEvent()); } public BeforeSaveEvent fireBeforeSaveEvent(AssistedObject kvo) { return doFireEvent(new BeforeSaveEvent(kvo)); } public SavedEvent fireSavedEvent(com.inepex.ineom.shared.dispatch.interfaces.ObjectManipulationResult objectManipulationResult) { return doFireEvent(new SavedEvent(objectManipulationResult)); } public AfterUnsuccessfulSaveEvent fireAfterUnsuccesfulSaveEvent(com.inepex.ineom.shared.dispatch.interfaces.ObjectManipulationResult objectManipulationResult) { return doFireEvent(new AfterUnsuccessfulSaveEvent(objectManipulationResult)); } public DeletedEvent fireDeletedEvent(Long id) { return doFireEvent(new DeletedEvent(id)); } private <T extends EventHandler, E extends FormLifecycleEventBase<T>> E doFireEvent(E event) { ineformEventbus.fireEvent(event); return event; } public void setSaveBtnStyle(String style){ view.setSaveBtnStyle(style); } public void addSaveBtnStyle(String style){ view.addSaveBtnStyle(style); } public void setCancelBtnStyle(String style){ view.setCancelBtnStyle(style); } public void addCancelBtnStyle(String style){ view.addCancelBtnStyle(style); } public boolean isDisplayNothingToSaveMsg() { return displayNothingToSaveMsg; } /** * This flag indicates whether we want to display an error message * when the save button is clicked, but there were no modifications * * @param display */ public void displayNothingToSaveMsg(boolean display) { this.displayNothingToSaveMsg = display; } @Override public void saveClicked() { save(); } @Override public void cancelClicked() { if (!fireBeforeCancelEvent().isCancelled()) fireCancelledEvent(); } @Override public void deleteClicked() { AssistedObject data = getInitialOrEmptyData(); if (data.getId() == IFConsts.NEW_ITEM_ID) throw new RuntimeException("Delete called for a newly created object"); ineDataConnector.objectDeleteRequested(data, new DeleteCallback(data.getId())); } @Override public HandlerRegistration addFormSavedHandlerFromView(SavedEvent.Handler handler) { return addSavedHandler(handler); } @Override public HandlerRegistration addFormAfterUnsuccesfulSaveHandlerFromView( AfterUnsuccessfulSaveEvent.Handler handler) { return addAfterUnsuccesfulSaveHandler(handler); } }
ineform/src/main/java/com/inepex/ineForm/client/form/SaveCancelForm.java
package com.inepex.ineForm.client.form; import java.util.ArrayList; import java.util.List; import com.google.gwt.event.shared.EventHandler; import com.google.gwt.event.shared.HandlerRegistration; import com.google.gwt.event.shared.SimpleEventBus; import com.google.gwt.user.client.ui.Widget; import com.google.inject.Inject; import com.google.inject.assistedinject.Assisted; import com.inepex.ineForm.client.form.events.AfterUnsuccessfulSaveEvent; import com.inepex.ineForm.client.form.events.BeforeCancelEvent; import com.inepex.ineForm.client.form.events.BeforeSaveEvent; import com.inepex.ineForm.client.form.events.CancelledEvent; import com.inepex.ineForm.client.form.events.DeletedEvent; import com.inepex.ineForm.client.form.events.FormLifecycleEventBase; import com.inepex.ineForm.client.form.events.SavedEvent; import com.inepex.ineForm.client.form.formunits.AbstractFormUnit; import com.inepex.ineForm.client.form.widgets.customkvo.CustomKVOFW; import com.inepex.ineForm.client.i18n.IneFormI18n; import com.inepex.ineForm.client.table.IneDataConnector; import com.inepex.ineForm.client.table.IneDataConnector.ManipulateResultCallback; import com.inepex.ineom.shared.IFConsts; import com.inepex.ineom.shared.assistedobject.AssistedObject; import com.inepex.ineom.shared.descriptor.CustomKVOObjectDesc; import com.inepex.ineom.shared.descriptor.ValidatorDesc; import com.inepex.ineom.shared.dispatch.interfaces.ObjectManipulationResult; import com.inepex.ineom.shared.validation.ValidationResult; /** * A form that supports saving a new KeyValueObject and saving the modifications * of an existing one. It needs an IneDataConnector that implements the saving. * This form also fires following FormLifeCycle events:<br/> * <br/> * - {@link BeforeSaveEvent}<br/> * - {@link SavedEvent}<br/> * - {@link BeforeCancelEvent}<br/> * - {@link CancelledEvent}<br/> * <br/> * The form also validates before saving according to the default {@link ValidatorDesc}.<br/> * * @author istvanszoboszlai * */ public class SaveCancelForm extends IneForm implements SaveCancelFormView.Delegate { private class ManipulateCallback implements ManipulateResultCallback { @Override public void onManipulationResult(com.inepex.ineom.shared.dispatch.interfaces.ObjectManipulationResult result) { dealValidationResult(result.getValidationResult()); if(result.getValidationResult() == null) { if (result.getObjectsNewState() != null) { setInitialData(result.getObjectsNewState()); } fireSavedEvent(result); } else { fireAfterUnsuccesfulSaveEvent(result); } } } private class DeleteCallback implements ManipulateResultCallback { private Long id; public DeleteCallback(Long id) { super(); this.id = id; } @Override public void onManipulationResult(ObjectManipulationResult result) { fireDeletedEvent(id); } } // this flag indicates whether we want to display an error message // when the save button is clicked, but there were no modifications private boolean displayNothingToSaveMsg = true; protected final SimpleEventBus ineformEventbus = new SimpleEventBus(); private IneDataConnector ineDataConnector; private AssistedObject originalData; private AssistedObject kvo; public enum ValidateMode { ALL, PARTIAL, NONE; } protected SaveCancelFormView view; /** * @param ineDataConnector * @param descriptorName * @param valueRangeProvider * @param formRenderDescName should be DescriptorStore.DEFAULT_DESC_KEY or null if default descriptor needed * @param eventBus * @param view set it to null to use default view */ @Inject public SaveCancelForm(FormContext formCtx, @Assisted("dn") String descriptorName, @Assisted("frdn") String formRDescName, @Assisted IneDataConnector ineDataConnector, @Assisted SaveCancelFormView view) { super(formCtx, descriptorName, formRDescName); this.ineDataConnector = ineDataConnector; if (view == null) this.view = new DefaultSaveCancelFormView(); else this.view = view; this.view.setDelegate(this); } public void setSaveButtonVisible(boolean visible) { view.setSaveButtonVisible(visible); } public void setCancelButtonVisible(boolean visible) { view.setCancelButtonVisible(visible); } @Override public Widget asWidget() { return view.asWidget(); } @Override public void renderForm() { view.setFormWidget(super.asWidget()); super.renderForm(); } public void setSaveButtonText(String saveButtonText) { view.setSaveButtonText(saveButtonText); } public void setCancelButtonText(String cancelButtonText) { view.setCancelButtonText(cancelButtonText); } public void save(){ originalData = getInitialOrEmptyData(); kvo = getValues(originalData.clone()); if (fireBeforeSaveEvent(kvo).isCancelled()) return; doSave(); } public void doSave(){ boolean isFormValid = doValidate(kvo).isValid(); view.setFormValidationSuccess(isFormValid); if (!isFormValid){ return; } // Send only the changes to the server AssistedObject difference = handlerFactory.createHandler(kvo).getDifference( handlerFactory.createHandler(originalData)).getAssistedObject(); if (difference.getKeys().size() == 0 || difference.getKeys().size() == 1 && difference.getKeys().get(0).equals(IFConsts.KEY_ID)) { if(displayNothingToSaveMsg){ ValidationResult vr = new ValidationResult(); vr.addGeneralError(IneFormI18n.validationNothingToSave()); dealValidationResult(vr); }else cancelClicked(); return; } List<CustomKVOObjectDesc> descs = new ArrayList<CustomKVOObjectDesc>(); for(AbstractFormUnit unit : getRootPanelWidget().getFormUnits()) { for(String s : unit.getFormWidgetKeySet()) { if(!(unit.getWidgetByKey(s) instanceof CustomKVOFW)) continue; descs.add(((CustomKVOFW)unit.getWidgetByKey(s)).getOdFromRows()); } } ineDataConnector.objectCreateOrEditRequested(difference, new ManipulateCallback(), descs.size()>0 ? descs.toArray(new CustomKVOObjectDesc[descs.size()]) : null); } public HandlerRegistration addBeforeSaveHandler(BeforeSaveEvent.Handler handler) { return ineformEventbus.addHandler(BeforeSaveEvent.getType(), handler); } public HandlerRegistration addSavedHandler(SavedEvent.Handler handler) { return ineformEventbus.addHandler(SavedEvent.getType(), handler); } public HandlerRegistration addBeforeCancelHandler(BeforeCancelEvent.Handler handler) { return ineformEventbus.addHandler(BeforeCancelEvent.getType(), handler); } public HandlerRegistration addCancelledHandler(CancelledEvent.Handler handler) { return ineformEventbus.addHandler(CancelledEvent.getType(), handler); } public HandlerRegistration addAfterUnsuccesfulSaveHandler(AfterUnsuccessfulSaveEvent.Handler handler) { return ineformEventbus.addHandler(AfterUnsuccessfulSaveEvent.getType(), handler); } public HandlerRegistration addDeletedHandler(DeletedEvent.Handler handler) { return ineformEventbus.addHandler(DeletedEvent.getType(), handler); } public BeforeCancelEvent fireBeforeCancelEvent() { return doFireEvent(new BeforeCancelEvent()); } public CancelledEvent fireCancelledEvent() { return doFireEvent(new CancelledEvent()); } public BeforeSaveEvent fireBeforeSaveEvent(AssistedObject kvo) { return doFireEvent(new BeforeSaveEvent(kvo)); } public SavedEvent fireSavedEvent(com.inepex.ineom.shared.dispatch.interfaces.ObjectManipulationResult objectManipulationResult) { return doFireEvent(new SavedEvent(objectManipulationResult)); } public AfterUnsuccessfulSaveEvent fireAfterUnsuccesfulSaveEvent(com.inepex.ineom.shared.dispatch.interfaces.ObjectManipulationResult objectManipulationResult) { return doFireEvent(new AfterUnsuccessfulSaveEvent(objectManipulationResult)); } public DeletedEvent fireDeletedEvent(Long id) { return doFireEvent(new DeletedEvent(id)); } private <T extends EventHandler, E extends FormLifecycleEventBase<T>> E doFireEvent(E event) { ineformEventbus.fireEvent(event); return event; } public void setSaveBtnStyle(String style){ view.setSaveBtnStyle(style); } public void addSaveBtnStyle(String style){ view.addSaveBtnStyle(style); } public void setCancelBtnStyle(String style){ view.setCancelBtnStyle(style); } public void addCancelBtnStyle(String style){ view.addCancelBtnStyle(style); } public boolean isDisplayNothingToSaveMsg() { return displayNothingToSaveMsg; } /** * This flag indicates whether we want to display an error message * when the save button is clicked, but there were no modifications * * @param display */ public void displayNothingToSaveMsg(boolean display) { this.displayNothingToSaveMsg = display; } @Override public void saveClicked() { save(); } @Override public void cancelClicked() { if (!fireBeforeCancelEvent().isCancelled()) fireCancelledEvent(); } @Override public void deleteClicked() { AssistedObject data = getInitialOrEmptyData(); if (data.getId() == IFConsts.NEW_ITEM_ID) throw new RuntimeException("Delete called for a newly created object"); ineDataConnector.objectDeleteRequested(data, new DeleteCallback(data.getId())); } @Override public HandlerRegistration addFormSavedHandlerFromView(SavedEvent.Handler handler) { return addSavedHandler(handler); } @Override public HandlerRegistration addFormAfterUnsuccesfulSaveHandlerFromView( AfterUnsuccessfulSaveEvent.Handler handler) { return addAfterUnsuccesfulSaveHandler(handler); } }
Add getter to get IneDataConnector git-svn-id: 1119f77b1e9a9b65230954af94f4d37570a443ef@823 7ef67a20-634e-90e6-9ab5-3f2075439470
ineform/src/main/java/com/inepex/ineForm/client/form/SaveCancelForm.java
Add getter to get IneDataConnector
Java
mit
6991a3b086791dbbec8e6b52033524b90196ef19
0
EasyAssessSystem/track,EasyAssessSystem/track
package com.stardust.easyassess.track.models.plan; import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.HashMap; import java.util.Map; public class IQCPlanSpecimen { public final static String SPECIMEN_TYPE_SELECTION = "S"; public final static String SPECIMEN_TYPE_TARGET_WITH_FIX_FLOAT = "T"; public final static String SPECIMEN_TYPE_TARGET_WITH_PERCENTAGE_FLOAT = "P"; private String type = SPECIMEN_TYPE_SELECTION; private String number; private String value; private double targetValue; private double floatValue; private Map<String, Integer> enumValues = new HashMap(); public String getType() { return type; } public void setType(String type) { this.type = type; } public String getNumber() { return number; } public void setNumber(String number) { this.number = number; } public String getValue() { return value; } public void setValue(String value) { this.value = value; } public double getTargetValue() { return targetValue; } public void setTargetValue(double targetValue) { this.targetValue = targetValue; } public double getFloatValue() { return floatValue; } public void setFloatValue(double floatValue) { this.floatValue = floatValue; } public Map<String, Integer> getEnumValues() { return enumValues; } public void setEnumValues(Map<String, Integer> enumValues) { this.enumValues = enumValues; } @JsonIgnore public boolean isInControl() { boolean result = false; if (getValue() != null) { switch (type) { case SPECIMEN_TYPE_SELECTION: if (Math.abs(new Double(getTargetValue()).intValue() - enumValues.get(getValue())) <= new Double(getFloatValue()).intValue()) { result = true; } break; case SPECIMEN_TYPE_TARGET_WITH_FIX_FLOAT: if (Math.abs(getTargetValue() - getSafelyNumberValue()) <= new Double(getFloatValue()).doubleValue()) { result = true; } break; case SPECIMEN_TYPE_TARGET_WITH_PERCENTAGE_FLOAT: double diff = getTargetValue() - getSafelyNumberValue(); double percentage = (diff/getTargetValue()) * 100; if (percentage < getFloatValue()) { result = true; } break; } } return result; } private double getSafelyNumberValue() { String value = getValue(); if (value != null && !value.isEmpty()) { value = value.replaceAll("[^-+.\\d]", ""); } try { return Double.parseDouble(value); } catch (NumberFormatException e) { return 0; } } }
src/main/java/com/stardust/easyassess/track/models/plan/IQCPlanSpecimen.java
package com.stardust.easyassess.track.models.plan; import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.HashMap; import java.util.Map; public class IQCPlanSpecimen { public final static String SPECIMEN_TYPE_SELECTION = "S"; public final static String SPECIMEN_TYPE_TARGET_WITH_FIX_FLOAT = "T"; public final static String SPECIMEN_TYPE_TARGET_WITH_PERCENTAGE_FLOAT = "P"; private String type = SPECIMEN_TYPE_SELECTION; private String number; private String value; private double targetValue; private double floatValue; private Map<String, Integer> enumValues = new HashMap(); public String getType() { return type; } public void setType(String type) { this.type = type; } public String getNumber() { return number; } public void setNumber(String number) { this.number = number; } public String getValue() { return value; } public void setValue(String value) { this.value = value; } public double getTargetValue() { return targetValue; } public void setTargetValue(double targetValue) { this.targetValue = targetValue; } public double getFloatValue() { return floatValue; } public void setFloatValue(double floatValue) { this.floatValue = floatValue; } public Map<String, Integer> getEnumValues() { return enumValues; } public void setEnumValues(Map<String, Integer> enumValues) { this.enumValues = enumValues; } @JsonIgnore public boolean isInControl() { boolean result = false; switch (type) { case SPECIMEN_TYPE_SELECTION: if (Math.abs(new Double(getTargetValue()).intValue() - enumValues.get(getValue())) <= new Double(getFloatValue()).intValue()) { result = true; } break; case SPECIMEN_TYPE_TARGET_WITH_FIX_FLOAT: if (Math.abs(getTargetValue() - getSafelyNumberValue()) <= new Double(getFloatValue()).doubleValue()) { result = true; } break; case SPECIMEN_TYPE_TARGET_WITH_PERCENTAGE_FLOAT: double diff = getTargetValue() - getSafelyNumberValue(); double percentage = (diff/getTargetValue()) * 100; if (percentage < getFloatValue()) { result = true; } break; } return result; } private double getSafelyNumberValue() { String value = getValue(); if (value != null && !value.isEmpty()) { value = value.replaceAll("[^-+.\\d]", ""); } try { return Double.parseDouble(value); } catch (NumberFormatException e) { return 0; } } }
NPE fixes
src/main/java/com/stardust/easyassess/track/models/plan/IQCPlanSpecimen.java
NPE fixes
Java
mit
a139ca26f87502eb159870f918d25ab7b1ee53d2
0
shyamu/TransLoc-Android-Widget
package com.shyamu.translocwidget; import java.io.BufferedReader; import java.io.InputStream; import java.io.InputStreamReader; import java.util.ArrayList; import org.apache.http.HttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.DefaultHttpClient; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import android.app.Activity; import android.appwidget.AppWidgetManager; import android.content.Intent; import android.os.AsyncTask; import android.os.Bundle; import android.util.Log; import android.view.View; import android.view.View.OnClickListener; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.Spinner; public class WidgetConfigurationActivity extends Activity { private int mAppWidgetId = 0; private String agencyId = ""; String routeId = ""; Spinner sSelectAgency, sSelectRoute, sSelectStop; Button bReset, bMakeWidget; ArrayList<String> agencyLongNameArray = new ArrayList<String>(); ArrayList<String> agencyShortNameArray = new ArrayList<String>(); ArrayList<String> agencyIdArray = new ArrayList<String>(); ArrayList<String> routeLongNameArray = new ArrayList<String>(); ArrayList<String> routeShortNameArray = new ArrayList<String>(); ArrayList<String> routeIdArray = new ArrayList<String>(); ArrayList<String> stopNameArray = new ArrayList<String>(); ArrayList<String> stopShortNameArray = new ArrayList<String>(); ArrayList<String> stopIdArray = new ArrayList<String>(); @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.configuration_activity); // Getting references to Spinners and Buttons sSelectAgency = (Spinner) findViewById(R.id.sSelectAgency); sSelectRoute = (Spinner) findViewById(R.id.sSelectRoute); sSelectStop = (Spinner) findViewById(R.id.sSelectStop); bReset = (Button) findViewById(R.id.bReset); bMakeWidget = (Button) findViewById(R.id.bMakeWidget); // Make agency selected listener AdapterView.OnItemSelectedListener agencySelectedListener = new AgencySpinnerActivity(); sSelectAgency.setOnItemSelectedListener(agencySelectedListener); // Make route selected listener AdapterView.OnItemSelectedListener stopSelectedListener = new RouteSpinnerActivity(); sSelectRoute.setOnItemSelectedListener(stopSelectedListener); // Populate agency spinner PopulateAgenciesTask task = new PopulateAgenciesTask(); task.execute(); // Defining a click event listener for the button "Set Color" OnClickListener setColorClickedListener = new OnClickListener() { @Override public void onClick(View v) { // colorPicker(); } }; Intent intent = getIntent(); Bundle extras = intent.getExtras(); if (extras != null) { mAppWidgetId = extras.getInt(AppWidgetManager.EXTRA_APPWIDGET_ID, AppWidgetManager.INVALID_APPWIDGET_ID); } // Setting the click listener on the "Set Color" button // btnSetColor.setOnClickListener(setColorClickedListener); } private class PopulateAgenciesTask extends AsyncTask<Void, Void, Void> { protected Void doInBackground(Void... voids) { String response = getJsonResponse("http://api.transloc.com/1.1/agencies.json"); try { JSONObject jObject = new JSONObject(response); JSONArray jArray = jObject.getJSONArray("data"); for (int i = 0; i < jArray.length(); i++) { Log.v("From jArray", jArray.getJSONObject(i).getString( "long_name")); agencyLongNameArray.add(jArray.getJSONObject(i).getString( "long_name")); agencyShortNameArray.add(jArray.getJSONObject(i).getString( "short_name")); agencyIdArray.add(jArray.getJSONObject(i).getString( "agency_id")); } } catch (JSONException e) { // TODO Auto-generated catch block e.printStackTrace(); Log.e("JSON", "ERROR in getting JSON data"); } return null; } @Override protected void onPostExecute(Void result) { super.onPostExecute(result); ArrayAdapter<String> agencyArrayAdapter = new ArrayAdapter<String>(getBaseContext(), android.R.layout.simple_list_item_1, agencyLongNameArray); sSelectAgency.setAdapter(agencyArrayAdapter); } } private class PopulateRoutesTask extends AsyncTask<Void, Void, Void> { @Override protected void onPreExecute() { int position = sSelectAgency.getSelectedItemPosition(); agencyId = agencyIdArray.get(position); Log.v("DEBUG", "Selected agency ID is " + agencyId); routeLongNameArray.clear(); routeIdArray.clear(); routeShortNameArray.clear(); stopShortNameArray.clear(); stopNameArray.clear(); stopIdArray.clear(); } protected Void doInBackground(Void... voids) { String response = getJsonResponse("http://api.transloc.com/1.1/routes.json?agencies=" + agencyId); Log.v("DEBUG", response); try { JSONObject jObject = new JSONObject(response); JSONObject jObjectData = jObject.getJSONObject("data"); JSONArray jArrayAgency = jObjectData.getJSONArray(agencyId); for (int i = 0; i < jArrayAgency.length(); i++) { routeLongNameArray.add(jArrayAgency.getJSONObject(i).getString( "long_name")); routeShortNameArray.add(jArrayAgency.getJSONObject(i).getString( "short_name")); routeIdArray.add(jArrayAgency.getJSONObject(i).getString( "route_id")); } } catch (JSONException e) { // TODO Auto-generated catch block e.printStackTrace(); Log.e("JSON", "ERROR in getting JSON data"); } return null; } @Override protected void onPostExecute(Void result) { super.onPostExecute(result); ArrayAdapter<String> routeArrayAdapter = new ArrayAdapter<String>(getBaseContext(), android.R.layout.simple_list_item_1, routeLongNameArray); sSelectRoute.setAdapter(routeArrayAdapter); } } private class PopulateStopsTask extends AsyncTask<Void, Void, Void> { @Override protected void onPreExecute() { int position = sSelectRoute.getSelectedItemPosition(); routeId = routeIdArray.get(position); Log.v("DEBUG", "Selected route ID is " + routeId); routeLongNameArray.clear(); routeIdArray.clear(); routeShortNameArray.clear(); stopIdArray.clear(); stopNameArray.clear(); stopShortNameArray.clear(); } protected Void doInBackground(Void... voids) { String response = getJsonResponse("http://api.transloc.com/1.1/stops.json?agencies=" + agencyId); Log.v("DEBUG", response); try { JSONObject jObject = new JSONObject(response); JSONArray jArrayData = jObject.getJSONArray("data"); for(int i = 0; i < jArrayData.length(); i++) { JSONObject jObjectStop = jArrayData.getJSONObject(i); JSONArray jArrayStopRoutes = jObjectStop.getJSONArray("routes"); for(int j = 0; j < jArrayStopRoutes.length(); j++) { if(jArrayStopRoutes.get(j).equals(routeId)) { stopNameArray.add((String) jObjectStop.get("name")); stopIdArray.add((String) jObjectStop.get("stop_id")); break; } } } } catch (JSONException e) { // TODO Auto-generated catch block e.printStackTrace(); Log.e("JSON", "ERROR in getting JSON data"); } return null; } @Override protected void onPostExecute(Void result) { super.onPostExecute(result); ArrayAdapter<String> stopArrayAdapter = new ArrayAdapter<String>(getBaseContext(), android.R.layout.simple_list_item_1, stopNameArray); sSelectRoute.setAdapter(stopArrayAdapter); } } /* * public void colorPicker() { * * // initialColor is the initially-selected color to be shown in the * rectangle on the left of the arrow. // for example, 0xff000000 is black, * 0xff0000ff is blue. Please be aware of the initial 0xff which is the * alpha. AmbilWarnaDialog dialog = new AmbilWarnaDialog(this, 0xff0000ff, * new OnAmbilWarnaListener() { * * // Executes, when user click Cancel button * * @Override public void onCancel(AmbilWarnaDialog dialog){ } * * // Executes, when user click OK button * * @Override public void onOk(AmbilWarnaDialog dialog, int color) { // * Create an Intent to launch WidgetConfigurationActivity screen Intent * intent = new Intent(getBaseContext(), WidgetConfigurationActivity.class); * * intent.putExtra(AppWidgetManager.EXTRA_APPWIDGET_ID, mAppWidgetId); * * // This is needed to make this intent different from its previous intents * intent.setData(Uri.parse("tel:/"+ (int)System.currentTimeMillis())); * * // Creating a pending intent, which will be invoked when the user // * clicks on the widget PendingIntent pendingIntent = * PendingIntent.getActivity(getBaseContext(), 0, intent, * PendingIntent.FLAG_UPDATE_CURRENT); * * // Getting an instance of WidgetManager AppWidgetManager appWidgetManager * = AppWidgetManager.getInstance(getBaseContext()); * * // Instantiating the class RemoteViews with widget_layout RemoteViews * views = new RemoteViews(getBaseContext().getPackageName(), * R.layout.widget_layout); * * // Setting the background color of the widget * views.setInt(R.id.widget_aclock, "setBackgroundColor", color); * * // Attach an on-click listener to the clock * views.setOnClickPendingIntent(R.id.widget_aclock, pendingIntent); * * // Tell the AppWidgetManager to perform an update on the app widget * appWidgetManager.updateAppWidget(mAppWidgetId, views); * * // Return RESULT_OK from this activity Intent resultValue = new Intent(); * resultValue.putExtra(AppWidgetManager.EXTRA_APPWIDGET_ID, mAppWidgetId); * setResult(RESULT_OK, resultValue); finish(); } }); dialog.show(); * * } */ private class AgencySpinnerActivity extends Activity implements AdapterView.OnItemSelectedListener { public void onItemSelected(AdapterView<?> parent, View view, int pos, long id) { // An item was selected. You can retrieve the selected item using // parent.getItemAtPosition(pos) sSelectRoute.setEnabled(true); // Get routes PopulateRoutesTask task = new PopulateRoutesTask(); task.execute(); } public void onNothingSelected(AdapterView<?> parent) { // Another interface callback } } private class RouteSpinnerActivity extends Activity implements AdapterView.OnItemSelectedListener { public void onItemSelected(AdapterView<?> parent, View view, int pos, long id) { // An item was selected. You can retrieve the selected item using // parent.getItemAtPosition(pos) sSelectRoute.setEnabled(true); // Get routes PopulateStopsTask task = new PopulateStopsTask(); task.execute(); } public void onNothingSelected(AdapterView<?> parent) { // Another interface callback } } private String getJsonResponse(String url) { String response = ""; DefaultHttpClient client = new DefaultHttpClient(); HttpGet httpGet = new HttpGet(url); try { HttpResponse execute = client.execute(httpGet); InputStream content = execute.getEntity().getContent(); BufferedReader buffer = new BufferedReader( new InputStreamReader(content)); String s = ""; while ((s = buffer.readLine()) != null) { response += s; } } catch (Exception e) { e.printStackTrace(); } return response; } }
TransLocWidget/src/com/shyamu/translocwidget/WidgetConfigurationActivity.java
package com.shyamu.translocwidget; import java.io.BufferedReader; import java.io.InputStream; import java.io.InputStreamReader; import java.util.ArrayList; import org.apache.http.HttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.DefaultHttpClient; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import android.app.Activity; import android.appwidget.AppWidgetManager; import android.content.Intent; import android.os.AsyncTask; import android.os.Bundle; import android.util.Log; import android.view.View; import android.view.View.OnClickListener; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.Spinner; public class WidgetConfigurationActivity extends Activity { private int mAppWidgetId = 0; private String agencyId = ""; String routeId = ""; Spinner sSelectAgency, sSelectRoute, sSelectStop; Button bReset, bMakeWidget; ArrayList<String> agencyLongNameArray = new ArrayList<String>(); ArrayList<String> agencyShortNameArray = new ArrayList<String>(); ArrayList<String> agencyIdArray = new ArrayList<String>(); ArrayList<String> routeLongNameArray = new ArrayList<String>(); ArrayList<String> routeShortNameArray = new ArrayList<String>(); ArrayList<String> routeIdArray = new ArrayList<String>(); ArrayList<String> stopLongNameArray = new ArrayList<String>(); ArrayList<String> stopShortNameArray = new ArrayList<String>(); ArrayList<String> stopIdArray = new ArrayList<String>(); @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.configuration_activity); // Getting references to Spinners and Buttons sSelectAgency = (Spinner) findViewById(R.id.sSelectAgency); sSelectRoute = (Spinner) findViewById(R.id.sSelectRoute); sSelectStop = (Spinner) findViewById(R.id.sSelectStop); bReset = (Button) findViewById(R.id.bReset); bMakeWidget = (Button) findViewById(R.id.bMakeWidget); // Make agency selected listener AdapterView.OnItemSelectedListener agencySelectedListener = new AgencySpinnerActivity(); sSelectAgency.setOnItemSelectedListener(agencySelectedListener); // Populate agency spinner PopulateAgenciesTask task = new PopulateAgenciesTask(); task.execute(); // Defining a click event listener for the button "Set Color" OnClickListener setColorClickedListener = new OnClickListener() { @Override public void onClick(View v) { // colorPicker(); } }; Intent intent = getIntent(); Bundle extras = intent.getExtras(); if (extras != null) { mAppWidgetId = extras.getInt(AppWidgetManager.EXTRA_APPWIDGET_ID, AppWidgetManager.INVALID_APPWIDGET_ID); } // Setting the click listener on the "Set Color" button // btnSetColor.setOnClickListener(setColorClickedListener); } private class PopulateAgenciesTask extends AsyncTask<Void, Void, Void> { protected Void doInBackground(Void... voids) { String response = getJsonResponse("http://api.transloc.com/1.1/agencies.json"); try { JSONObject jObject = new JSONObject(response); JSONArray jArray = jObject.getJSONArray("data"); for (int i = 0; i < jArray.length(); i++) { Log.v("From jArray",jArray.getJSONObject(i).getString( "long_name")); agencyLongNameArray.add(jArray.getJSONObject(i).getString( "long_name")); agencyShortNameArray.add(jArray.getJSONObject(i).getString( "short_name")); agencyIdArray.add(jArray.getJSONObject(i).getString( "agency_id")); } } catch (JSONException e) { // TODO Auto-generated catch block e.printStackTrace(); Log.e("JSON","ERROR in getting JSON data"); } return null; } @Override protected void onPostExecute(Void result) { super.onPostExecute(result); ArrayAdapter<String> agencyArrayAdapter = new ArrayAdapter<String>(getBaseContext(), android.R.layout.simple_list_item_1,agencyLongNameArray); sSelectAgency.setAdapter(agencyArrayAdapter); } } private class PopulateRoutesTask extends AsyncTask<Void, Void, Void> { @Override protected void onPreExecute() { int position = sSelectAgency.getSelectedItemPosition(); agencyId = agencyIdArray.get(position); Log.v("DEBUG","Selected agency ID is " + agencyId); routeLongNameArray.clear(); routeIdArray.clear(); routeShortNameArray.clear(); stopShortNameArray.clear(); stopLongNameArray.clear(); stopIdArray.clear(); } protected Void doInBackground(Void... voids) { String response = getJsonResponse("http://api.transloc.com/1.1/routes.json?agencies=" + agencyId); Log.v("DEBUG", response); try { JSONObject jObject = new JSONObject(response); JSONObject jObjectData = jObject.getJSONObject("data"); JSONArray jArrayAgency = jObjectData.getJSONArray(agencyId); for(int i = 0; i < jArrayAgency.length(); i++) { routeLongNameArray.add(jArrayAgency.getJSONObject(i).getString( "long_name")); routeShortNameArray.add(jArrayAgency.getJSONObject(i).getString( "short_name")); routeIdArray.add(jArrayAgency.getJSONObject(i).getString( "route_id")); } } catch (JSONException e) { // TODO Auto-generated catch block e.printStackTrace(); Log.e("JSON","ERROR in getting JSON data"); } return null; } @Override protected void onPostExecute(Void result) { super.onPostExecute(result); ArrayAdapter<String> routeArrayAdapter = new ArrayAdapter<String>(getBaseContext(), android.R.layout.simple_list_item_1,routeLongNameArray); sSelectRoute.setAdapter(routeArrayAdapter); } } private class PopulateStopsTask extends AsyncTask<Void, Void, Void> { @Override protected void onPreExecute() { int position = sSelectRoute.getSelectedItemPosition(); routeId = routeIdArray.get(position); Log.v("DEBUG","Selected route ID is " + routeId); routeLongNameArray.clear(); routeIdArray.clear(); routeShortNameArray.clear(); stopIdArray.clear(); stopLongNameArray.clear(); stopShortNameArray.clear(); } protected Void doInBackground(Void... voids) { String response = getJsonResponse("http://api.transloc.com/1.1/stops.json?agencies=" + agencyId); Log.v("DEBUG", response); try { JSONObject jObject = new JSONObject(response); JSONObject jObjectData = jObject.getJSONObject("data"); JSONArray jArrayAgency = jObjectData.getJSONArray(agencyId); for(int i = 0; i < jArrayAgency.length(); i++) { routeLongNameArray.add(jArrayAgency.getJSONObject(i).getString( "long_name")); routeShortNameArray.add(jArrayAgency.getJSONObject(i).getString( "short_name")); routeIdArray.add(jArrayAgency.getJSONObject(i).getString( "route_id")); } } catch (JSONException e) { // TODO Auto-generated catch block e.printStackTrace(); Log.e("JSON","ERROR in getting JSON data"); } return null; } @Override protected void onPostExecute(Void result) { super.onPostExecute(result); ArrayAdapter<String> routeArrayAdapter = new ArrayAdapter<String>(getBaseContext(), android.R.layout.simple_list_item_1,routeLongNameArray); sSelectRoute.setAdapter(routeArrayAdapter); } } /* * public void colorPicker() { * * // initialColor is the initially-selected color to be shown in the * rectangle on the left of the arrow. // for example, 0xff000000 is black, * 0xff0000ff is blue. Please be aware of the initial 0xff which is the * alpha. AmbilWarnaDialog dialog = new AmbilWarnaDialog(this, 0xff0000ff, * new OnAmbilWarnaListener() { * * // Executes, when user click Cancel button * * @Override public void onCancel(AmbilWarnaDialog dialog){ } * * // Executes, when user click OK button * * @Override public void onOk(AmbilWarnaDialog dialog, int color) { // * Create an Intent to launch WidgetConfigurationActivity screen Intent * intent = new Intent(getBaseContext(), WidgetConfigurationActivity.class); * * intent.putExtra(AppWidgetManager.EXTRA_APPWIDGET_ID, mAppWidgetId); * * // This is needed to make this intent different from its previous intents * intent.setData(Uri.parse("tel:/"+ (int)System.currentTimeMillis())); * * // Creating a pending intent, which will be invoked when the user // * clicks on the widget PendingIntent pendingIntent = * PendingIntent.getActivity(getBaseContext(), 0, intent, * PendingIntent.FLAG_UPDATE_CURRENT); * * // Getting an instance of WidgetManager AppWidgetManager appWidgetManager * = AppWidgetManager.getInstance(getBaseContext()); * * // Instantiating the class RemoteViews with widget_layout RemoteViews * views = new RemoteViews(getBaseContext().getPackageName(), * R.layout.widget_layout); * * // Setting the background color of the widget * views.setInt(R.id.widget_aclock, "setBackgroundColor", color); * * // Attach an on-click listener to the clock * views.setOnClickPendingIntent(R.id.widget_aclock, pendingIntent); * * // Tell the AppWidgetManager to perform an update on the app widget * appWidgetManager.updateAppWidget(mAppWidgetId, views); * * // Return RESULT_OK from this activity Intent resultValue = new Intent(); * resultValue.putExtra(AppWidgetManager.EXTRA_APPWIDGET_ID, mAppWidgetId); * setResult(RESULT_OK, resultValue); finish(); } }); dialog.show(); * * } */ private class AgencySpinnerActivity extends Activity implements AdapterView.OnItemSelectedListener { public void onItemSelected(AdapterView<?> parent, View view, int pos, long id) { // An item was selected. You can retrieve the selected item using // parent.getItemAtPosition(pos) sSelectRoute.setEnabled(true); // Get routes PopulateRoutesTask task = new PopulateRoutesTask(); task.execute(); } public void onNothingSelected(AdapterView<?> parent) { // Another interface callback } } private String getJsonResponse(String url) { String response = ""; DefaultHttpClient client = new DefaultHttpClient(); HttpGet httpGet = new HttpGet(url); try { HttpResponse execute = client.execute(httpGet); InputStream content = execute.getEntity().getContent(); BufferedReader buffer = new BufferedReader( new InputStreamReader(content)); String s = ""; while ((s = buffer.readLine()) != null) { response += s; } } catch (Exception e) { e.printStackTrace(); } return response; } }
added asynctask for routes. error
TransLocWidget/src/com/shyamu/translocwidget/WidgetConfigurationActivity.java
added asynctask for routes. error
Java
mit
4b0aa02095e4f8dabaf874621cdc2ad4804874c0
0
stucco/graph-db-connection
package gov.pnnl.stucco.dbconnect.inmemory; import gov.pnnl.stucco.dbconnect.Condition; import gov.pnnl.stucco.dbconnect.DBConnectionBase; import gov.pnnl.stucco.dbconnect.DBConnectionAlignment; import gov.pnnl.stucco.dbconnect.DBConnectionIndexerInterface; import gov.pnnl.stucco.dbconnect.DBConnectionTestInterface; import gov.pnnl.stucco.dbconnect.DBConstraint; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.PrintStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import org.apache.commons.io.IOUtils; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.slf4j.Logger; import com.tinkerpop.blueprints.impls.orient.OrientVertex; /** * This class represents a concrete implementation of an in-memory DB Connection Type * */ public class InMemoryDBConnection extends DBConnectionBase{ /** logger variable to track activities in this class*/ private Logger logger = null; /** contains all the vertices via a map of maps where the values are a property map*/ private Map<String, Map<String, Object>> vertices = null; /** contains a mapping of vertexIDs to the actual vertex canonical name*/ //private Map<String, String> vertIDs = null; /** * contains a map of edges and their properties. * * <p> Note: We're keeping this structure even though edge IDs are no * longer exposed in the interface, in order to minimize code changes. */ private Map<String, Map<String, Object>> edges = null; //TODO: make/use an Edge class, to store inV, outV, label? And maybe index that. //private Map<String, String> edgeIDs = null; //edges don't have meaningful names. /** * Index of vert fields. * This is a map the field name to a map of values to matching IDs * (eg. "name" -> some vertex name -> set of vert ID(s) which have that name) */ private Map<String, Map<String, Set<String>>> indexedVertFields = null; /** * Constructor of an InMemory type of DB Connection */ public InMemoryDBConnection(){ vertices = new HashMap<String, Map<String, Object>>(); //vertIDs = new HashMap<String, String>(); edges = new HashMap<String, Map<String, Object>>(); //edgeIDs = new HashMap<String, String>(); //edges don't have meaningful names. indexedVertFields = new HashMap<String, Map<String, Set<String>>>(); //initialize any indexes. indexedVertFields.put("name", new HashMap<String, Set<String>>()); } /** * return the number of vertices * @return count */ public int getVertCount(){ return vertices.size(); } /** * return the number of edges * @return count */ public int getEdgeCount(){ return edges.size(); } /** * return the vertex's property map given the vertex ID * @param vertID * @return property map */ public Map<String, Object> getVertByID(String vertID){ return vertices.get(vertID); } /** * returns list of edge info maps for the outgoing edges of this vertex * @param vertName * @return list of edge property maps */ public List<Map<String, Object>> getOutEdges(String outVertID) throws IllegalArgumentException{ if(outVertID == null || outVertID.equals("") || !vertices.containsKey(outVertID)){ throw new IllegalArgumentException("cannot get edge with missing or invalid outVertID"); } List<Map<String, Object>> foundEdges = new LinkedList<Map<String, Object>>(); for(Map<String, Object> currEdge : edges.values()){ if( ((String)currEdge.get("outVertID")).equals(outVertID) ){ //inVertID = currEdge.get("inVertID"); //outVertID = currEdge.get("outVertID"); //relation = currEdge.get("relation"); foundEdges.add( currEdge ); } } return foundEdges; } /** * returns list of edge info maps for the incoming edges of this vertex * @param vertName * @return list of edge property maps */ public List<Map<String, Object>> getInEdges(String inVertID) throws IllegalArgumentException{ if(inVertID == null || inVertID.equals("") || !vertices.containsKey(inVertID)){ throw new IllegalArgumentException("cannot get edge with missing or invalid inVertID"); } List<Map<String, Object>> foundEdges = new LinkedList<Map<String, Object>>(); for(Map<String, Object> currEdge : edges.values()){ if( ((String)currEdge.get("inVertID")).equals(inVertID) ){ foundEdges.add( currEdge ); } } return foundEdges; } /** * return a list of Incoming vertices based on their edge type relation * @param outVertID * @param relation * @return list of vertices */ public List<String> getInVertIDsByRelation(String outVertID, String relation){ if(relation == null || relation.equals("") ){ throw new IllegalArgumentException("cannot get edge with missing or invalid relation"); } if(outVertID == null || outVertID.equals("") || !vertices.containsKey(outVertID)){ throw new IllegalArgumentException("cannot get edge with missing or invalid outVertID"); } List<String> relatedIDs = new LinkedList<String>(); for(Map<String, Object> currEdge : edges.values()){ if( ((String)currEdge.get("relation")).equals(relation) ){ if( ((String)currEdge.get("outVertID")).equals(outVertID) ){ relatedIDs.add( (String)currEdge.get("inVertID") ); //TODO: check valid state here? } } } return relatedIDs; } /** * return a list of Outgoing vertices based on their edge type relation * @param inVertID * @param relation * @return list of vertices */ public List<String> getOutVertIDsByRelation(String inVertID, String relation){ if(relation == null || relation.equals("") ){ throw new IllegalArgumentException("cannot get edge with missing or invalid relation"); } if(inVertID == null || inVertID.equals("") || !vertices.containsKey(inVertID)){ throw new IllegalArgumentException("cannot get edge with missing or invalid inVertID"); } List<String> relatedIDs = new LinkedList<String>(); for(Map<String, Object> currEdge : edges.values()){ if( ((String)currEdge.get("relation")).equals(relation) ){ if( ((String)currEdge.get("inVertID")).equals(inVertID) ){ relatedIDs.add( (String)currEdge.get("outVertID") ); //TODO: check valid state here? } } } return relatedIDs; } /** * get the list of incoming or outgoing vertices based on edge relationship * @param vertID * @param relation * @return list of vertices */ public List<String> getVertIDsByRelation(String vertID, String relation){ if(relation == null || relation.equals("") ){ throw new IllegalArgumentException("cannot get edge with missing or invalid relation"); } if(vertID == null || vertID.equals("") || !vertices.containsKey(vertID)){ throw new IllegalArgumentException("cannot get edge with missing or invalid inVertID"); } List<String> relatedIDs = new LinkedList<String>(); for(Map<String, Object> currEdge : edges.values()){ if( ((String)currEdge.get("relation")).equals(relation) ){ if( ((String)currEdge.get("inVertID")).equals(vertID) || ((String)currEdge.get("outVertID")).equals(vertID)){ relatedIDs.add( (String)currEdge.get("outVertID") ); //TODO: check valid state here? } } } return relatedIDs; } @Override public int getEdgeCountByRelation(String inVertID, String outVertID, String relation){ if(relation == null || relation.equals("") ){ throw new IllegalArgumentException("cannot get edge with missing or invalid relation"); } if(inVertID == null || inVertID.equals("") || !vertices.containsKey(inVertID)){ throw new IllegalArgumentException("cannot get edge with missing or invalid inVertID"); } if(outVertID == null || outVertID.equals("") || !vertices.containsKey(outVertID)){ throw new IllegalArgumentException("cannot get edge with missing or invalid outVertID"); } int count = 0; for(Map<String, Object> currEdge : edges.values()) { if( currEdge.get("relation").equals(relation) && currEdge.get("outVertID").equals(outVertID) && currEdge.get("inVertID").equals(inVertID) ) { count++; } } return count; } /** * get a list of vertex IDs based on a list of constraints * @param constraints list of constraints * @return list of vertex IDs */ public List<String> getVertIDsByConstraints(List<DBConstraint> constraints){ Set<String> candidateIDs = null; Set<String> nonMatchingIDs = new HashSet<String>(); List<String> matchingIDs = new LinkedList<String>(); //First, generate candidateIDs set. //Note that after candidateIDs is populated here, it will not be modified. //TODO: really, we want to create a set of candidate ids for each index used, then find the overlap, // then match against any constraints that weren't used. boolean indicesUsed = false; if(indexedVertFields.size() > 0){ //TODO: needs better test coverage for use of indices //This should use indexed fields to find candidateIDs, then find the nonMatchingIDs below as usual. //we need to decide if only exact matches are allowed, or if ranges & etc. are ok here. //also, somehow indicate that the constraints used here are 'done', so they aren't re-checked below. candidateIDs = new HashSet<String>(); for(DBConstraint c : constraints){ if(c.getCond() != Condition.eq) continue; if(indexedVertFields.containsKey(c.getProp())){ indicesUsed = true; Map<String, Set<String>> currIndex = indexedVertFields.get(c.getProp()); String currValue = c.getVal().toString(); Set<String> currSet = currIndex.get(currValue); if(currSet != null){ candidateIDs.addAll(currSet); } } } } if(!indicesUsed){ //if no initial matchingIDs set was generated yet, use all IDs candidateIDs = vertices.keySet(); } //make set of non-matching candidates, based on constraints for(String id : candidateIDs){ Map<String, Object> candidateVert = vertices.get(id); for(DBConstraint c : constraints){ Object candidateValue = candidateVert.get(c.getProp()); if( !compare(candidateValue, c.getCond(), c.getVal()) ){ nonMatchingIDs.add(id); break; } } } // build the matchingIDs list, based on candidateIDs and nonMatchingIDs for(String id : candidateIDs){ if( !nonMatchingIDs.contains(id) ){ matchingIDs.add(id); } } return matchingIDs; } /** * method to compare two objects that can use the conditional object * @param o1 * @param cond * @param o2 * @return true or false */ private boolean compare(Object o1, Condition cond, Object o2){ if(o1 == null && cond == Condition.eq && o2 == null) return true; if(o1 == null || o2 == null) return false; if(cond == Condition.eq){ return o1.equals(o2); } if(cond == Condition.neq){ return !o1.equals(o2); } if(cond == Condition.gt){ if(o1 instanceof Comparable && o2 instanceof Comparable){ Comparable c1 = (Comparable)o1; Comparable c2 = (Comparable)o2; return ( c1.compareTo(c2) > 0 ); }else{ return false; } } if(cond == Condition.gte){ if(o1 instanceof Comparable && o2 instanceof Comparable){ Comparable c1 = (Comparable)o1; Comparable c2 = (Comparable)o2; return ( c1.compareTo(c2) >= 0 ); }else{ return false; } } if(cond == Condition.lt){ if(o1 instanceof Comparable && o2 instanceof Comparable){ Comparable c1 = (Comparable)o1; Comparable c2 = (Comparable)o2; return ( c1.compareTo(c2) < 0 ); }else{ return false; } } if(cond == Condition.lte){ if(o1 instanceof Comparable && o2 instanceof Comparable){ Comparable c1 = (Comparable)o1; Comparable c2 = (Comparable)o2; return ( c1.compareTo(c2) <= 0 ); }else{ return false; } } if(cond == Condition.contains){ if(o1 instanceof Collection){ Collection c1 = (Collection)o1; return c1.contains(o2); }else{ return false; } } if(cond == Condition.substring){ if(o1 instanceof String){ String s1 = (String)o1; String s2 = ""; if(o2 instanceof String || o2 instanceof Character){ s2 += o2; return s1.contains(s2); } } return false; } return false; } /** * remove a vertex by a vertex ID * @param vertID */ public void removeVertByID(String vertID){ Map<String,Object> vert = vertices.get(vertID); if(vert != null){ removeVertFromIndex(vert, vertID); vertices.remove(vertID); //TODO: remove edges that contain this vertID!!!!! } } /** * add a vertex given a property map * @param vert - property map * @return vertexID */ public String addVertex(Map<String, Object> vert){ // make sure all multi-value properties are sets convertAllMultiValuesToSet(vert); String vertID = String.valueOf( UUID.randomUUID() ); vertices.put(vertID, vert); //update any indices addVertToIndex(vert, vertID); return vertID; } private void addVertToIndex(Map<String, Object> vert, String vertID){ for(String prop : vert.keySet()){ if(indexedVertFields.containsKey(prop)){ Map<String, Set<String>> currIndex = indexedVertFields.get(prop); String currValue = vert.get(prop).toString(); Set<String> currSet = currIndex.get(currValue); if(currSet == null){ currSet = new HashSet<String>(); currIndex.put(currValue, currSet); } currSet.add(vertID); } } } private void removeVertFromIndex(Map<String, Object> vert, String vertID){ for(String prop : vert.keySet()){ if(indexedVertFields.containsKey(prop)){ Map<String, Set<String>> currIndex = indexedVertFields.get(prop); String currValue = vert.get(prop).toString(); currIndex.get(currValue).remove(vertID); } } } /** * add and edge * @param inVertID ID of the incoming vertex edge * @param outVertID - ID of the outgoing vertex edge * @param relation - type of edge relation */ public void addEdge(String inVertID, String outVertID, String relation){ if(relation == null || relation.equals("") ){ throw new IllegalArgumentException("cannot add edge with missing or invlid relation"); } if(inVertID == null || inVertID.equals("") || !vertices.containsKey(inVertID)){ throw new IllegalArgumentException("cannot add edge with missing or invalid inVertID"); } if(outVertID == null || outVertID.equals("") || !vertices.containsKey(outVertID)){ throw new IllegalArgumentException("cannot add edge with missing or invalid outVertID"); } //TODO: check if edge is duplicate?? For now, just add it, duplicates are ok I guess. Map<String, Object> newEdge = new HashMap<String, Object>(); newEdge.put("inVertID", inVertID); newEdge.put("outVertID", outVertID); newEdge.put("relation", relation); String edgeID = String.valueOf( UUID.randomUUID() ); edges.put(edgeID, newEdge); //TODO: update any indices } /** * overwrite or add new properties to an existing vertex's property map * @param vertID * @param newVert - property map */ public void updateVertex(String vertID, Map<String, Object> newVert){ Map<String, Object> oldVert = vertices.get(vertID); if(oldVert == null){ throw new IllegalArgumentException("invalid vertex ID"); } removeVertFromIndex(oldVert, vertID); for(Map.Entry<String, Object> entry: newVert.entrySet()){ String key = entry.getKey(); Object newValue = entry.getValue(); updateVertexProperty(vertID, key, newValue); } addVertToIndex(newVert, vertID); } @Override public void close() { // TODO Auto-generated method stub } @Override public void open() { // TODO Auto-generated method stub } @Override public void removeEdgeByRelation(String inVertID, String outVertID, String relation){ if(relation == null || relation.equals("") ){ throw new IllegalArgumentException("cannot add edge with missing or invlid relation"); } if(inVertID == null || inVertID.equals("") || !vertices.containsKey(inVertID)){ throw new IllegalArgumentException("cannot add edge with missing or invalid inVertID"); } if(outVertID == null || outVertID.equals("") || !vertices.containsKey(outVertID)){ throw new IllegalArgumentException("cannot add edge with missing or invalid outVertID"); } // collect the edge IDs that need to be removed Set<String> edgeIDs = new HashSet<String>(); for(Map.Entry<String, Map<String, Object>> entry : edges.entrySet()) { String edgeID = entry.getKey(); Map<String, Object> currEdge = entry.getValue(); if( currEdge.get("relation").equals(relation) && currEdge.get("outVertID").equals(outVertID) && currEdge.get("inVertID").equals(inVertID) ) { edgeIDs.add(edgeID); } } //remove the IDs we found for(String edgeID : edgeIDs) { edges.remove(edgeID); } } @Override public void removeAllVertices() { vertices.clear(); indexedVertFields.clear(); edges.clear(); } @Override public DBConstraint getConstraint(String property, Condition condition, Object value) { return new InMemoryConstraint(property, condition, value); } @Override public void buildIndex(String indexConfig) { // NO-OP } @Override protected void setPropertyInDB(String id, String key, Object newValue) { vertices.get(id).put(key, newValue); } public void loadState(String filePath) { try { InputStream is = new FileInputStream(filePath); String textContents = IOUtils.toString( is ); is.close(); JSONObject contents = new JSONObject(textContents); JSONObject vertsJSON = contents.getJSONObject("vertices"); JSONArray edgesJSON = contents.getJSONArray("edges"); //add vertices for( Object id : vertsJSON.keySet() ) { JSONObject jsonVert = vertsJSON.getJSONObject(id.toString()); String description = jsonVert.optString("description"); if(description != null && !description.equals("")){ //This is kind of an odd workaround, to prevent ui from treating, eg, "URI: www.blah.com | Type: URL |" as a URL instead of a string. //TODO: this is really a problem in the UI, as far as we care it's still just a string either way. jsonVert.put("description", " " + description); }else{ //ui assumes everything has a description, this is a workaround to avoid having empty text in various places. jsonVert.put("description", jsonVert.optString("name")); } Map<String, Object> vert = jsonVertToMap(jsonVert); vertices.put(id.toString(), vert); String name = (String)vert.get("name"); addVertToIndex(vert, id.toString()); //System.out.println("loaded vertex named " + name + " with id: " + id); //for debugging } //add edges. for( int i=0; i<edgesJSON.length(); i++ ) { JSONObject edge = edgesJSON.getJSONObject(i); try { String inVertID = edge.getString("inVertID"); String outVertID = edge.getString("outVertID"); String relation = edge.getString("relation"); int matchingEdgeCount = getEdgeCountByRelation(inVertID, outVertID, relation); if(matchingEdgeCount == 0){ addEdge(inVertID, outVertID, relation); } } catch (JSONException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IllegalArgumentException e) { // TODO Auto-generated catch block System.err.println("error when loading edge: " + edge); e.printStackTrace(); } } } catch (IOException e) { e.printStackTrace(); } } //TODO: tests public void saveState(String filePath) { try { OutputStream os = new FileOutputStream(filePath); PrintStream printStream = new PrintStream(os); //printStream.print("String"); JSONObject vertsJSON = new JSONObject(); JSONArray edgesJSON = new JSONArray(); //add vertices for( String id : vertices.keySet() ) { Map<String, Object> vert = vertices.get(id); JSONObject currEdge = new JSONObject(); for( String prop : vert.keySet() ){ //TODO: confirm this handles sets properly currEdge.put(prop, vert.get(prop)); } edgesJSON.put(currEdge); } //add edges. for( String id : edges.keySet() ) { Map<String, Object> edge = edges.get(id); JSONObject currEdge = new JSONObject(); for( String prop : edge.keySet() ){ currEdge.put(prop, edge.get(prop)); } edgesJSON.put(currEdge); } JSONObject contents = new JSONObject(); contents.put("vertices", vertsJSON); contents.put("edges", edgesJSON); printStream.print(contents.toString(2)); printStream.close(); os.close(); } catch (IOException e) { e.printStackTrace(); } } }
src/main/java/gov/pnnl/stucco/dbconnect/inmemory/InMemoryDBConnection.java
package gov.pnnl.stucco.dbconnect.inmemory; import gov.pnnl.stucco.dbconnect.Condition; import gov.pnnl.stucco.dbconnect.DBConnectionBase; import gov.pnnl.stucco.dbconnect.DBConnectionAlignment; import gov.pnnl.stucco.dbconnect.DBConnectionIndexerInterface; import gov.pnnl.stucco.dbconnect.DBConnectionTestInterface; import gov.pnnl.stucco.dbconnect.DBConstraint; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.PrintStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import org.apache.commons.io.IOUtils; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.slf4j.Logger; import com.tinkerpop.blueprints.impls.orient.OrientVertex; /** * This class represents a concrete implementation of an in-memory DB Connection Type * */ public class InMemoryDBConnection extends DBConnectionBase{ /** logger variable to track activities in this class*/ private Logger logger = null; /** contains all the vertices via a map of maps where the values are a property map*/ private Map<String, Map<String, Object>> vertices = null; /** contains a mapping of vertexIDs to the actual vertex canonical name*/ private Map<String, String> vertIDs = null; /** * contains a map of edges and their properties. * * <p> Note: We're keeping this structure even though edge IDs are no * longer exposed in the interface, in order to minimize code changes. */ private Map<String, Map<String, Object>> edges = null; //TODO: make/use an Edge class, to store inV, outV, label? And maybe index that. //private Map<String, String> edgeIDs = null; //edges don't have meaningful names. /** which property names are indexed*/ private Set<String> indexedVertFields = null; //TODO: not maintaining any indexed fields for now, add later if desired. /** * Constructor of an InMemory type of DB Connection */ public InMemoryDBConnection(){ vertices = new HashMap<String, Map<String, Object>>(); vertIDs = new HashMap<String, String>(); edges = new HashMap<String, Map<String, Object>>(); //edgeIDs = new HashMap<String, String>(); //edges don't have meaningful names. indexedVertFields = new HashSet<String>(); //TODO: initialize any indexes. } /** * return the number of vertices * @return count */ public int getVertCount(){ return vertices.size(); } /** * return the number of edges * @return count */ public int getEdgeCount(){ return edges.size(); } /** * return the vertex's property map given the vertex ID * @param vertID * @return property map */ public Map<String, Object> getVertByID(String vertID){ return vertices.get(vertID); } /** * get the vertex's property map using the vertex's canonical name * @param vertName * @return property map */ public Map<String,Object> getVertByName(String vertName) { if(vertName == null || vertName == "") return null; String id = vertIDs.get(vertName); if(id == null) return null; Map<String, Object> retVal = vertices.get(id); if(retVal == null) throw new IllegalStateException("bad state: known vertex name has no known content."); return retVal; } /** * get the vertexID using the canonical name * @param vertName * @return ID */ public String getVertIDByName(String vertName){ if(vertName == null || vertName == "") return null; String id = vertIDs.get(vertName); return id; } /** * returns list of edge info maps for the outgoing edges of this vertex * @param vertName * @return list of edge property maps */ public List<Map<String, Object>> getOutEdges(String outVertID) throws IllegalArgumentException{ if(outVertID == null || outVertID.equals("") || !vertices.containsKey(outVertID)){ throw new IllegalArgumentException("cannot get edge with missing or invalid outVertID"); } List<Map<String, Object>> foundEdges = new LinkedList<Map<String, Object>>(); for(Map<String, Object> currEdge : edges.values()){ if( ((String)currEdge.get("outVertID")).equals(outVertID) ){ //inVertID = currEdge.get("inVertID"); //outVertID = currEdge.get("outVertID"); //relation = currEdge.get("relation"); foundEdges.add( currEdge ); } } return foundEdges; } /** * returns list of edge info maps for the incoming edges of this vertex * @param vertName * @return list of edge property maps */ public List<Map<String, Object>> getInEdges(String inVertID) throws IllegalArgumentException{ if(inVertID == null || inVertID.equals("") || !vertices.containsKey(inVertID)){ throw new IllegalArgumentException("cannot get edge with missing or invalid inVertID"); } List<Map<String, Object>> foundEdges = new LinkedList<Map<String, Object>>(); for(Map<String, Object> currEdge : edges.values()){ if( ((String)currEdge.get("inVertID")).equals(inVertID) ){ foundEdges.add( currEdge ); } } return foundEdges; } /** * return a list of Incoming vertices based on their edge type relation * @param outVertID * @param relation * @return list of vertices */ public List<String> getInVertIDsByRelation(String outVertID, String relation){ if(relation == null || relation.equals("") ){ throw new IllegalArgumentException("cannot get edge with missing or invalid relation"); } if(outVertID == null || outVertID.equals("") || !vertices.containsKey(outVertID)){ throw new IllegalArgumentException("cannot get edge with missing or invalid outVertID"); } List<String> relatedIDs = new LinkedList<String>(); for(Map<String, Object> currEdge : edges.values()){ if( ((String)currEdge.get("relation")).equals(relation) ){ if( ((String)currEdge.get("outVertID")).equals(outVertID) ){ relatedIDs.add( (String)currEdge.get("inVertID") ); //TODO: check valid state here? } } } return relatedIDs; } /** * return a list of Outgoing vertices based on their edge type relation * @param inVertID * @param relation * @return list of vertices */ public List<String> getOutVertIDsByRelation(String inVertID, String relation){ if(relation == null || relation.equals("") ){ throw new IllegalArgumentException("cannot get edge with missing or invalid relation"); } if(inVertID == null || inVertID.equals("") || !vertices.containsKey(inVertID)){ throw new IllegalArgumentException("cannot get edge with missing or invalid inVertID"); } List<String> relatedIDs = new LinkedList<String>(); for(Map<String, Object> currEdge : edges.values()){ if( ((String)currEdge.get("relation")).equals(relation) ){ if( ((String)currEdge.get("inVertID")).equals(inVertID) ){ relatedIDs.add( (String)currEdge.get("outVertID") ); //TODO: check valid state here? } } } return relatedIDs; } /** * get the list of incoming or outgoing vertices based on edge relationship * @param vertID * @param relation * @return list of vertices */ public List<String> getVertIDsByRelation(String vertID, String relation){ if(relation == null || relation.equals("") ){ throw new IllegalArgumentException("cannot get edge with missing or invalid relation"); } if(vertID == null || vertID.equals("") || !vertices.containsKey(vertID)){ throw new IllegalArgumentException("cannot get edge with missing or invalid inVertID"); } List<String> relatedIDs = new LinkedList<String>(); for(Map<String, Object> currEdge : edges.values()){ if( ((String)currEdge.get("relation")).equals(relation) ){ if( ((String)currEdge.get("inVertID")).equals(vertID) || ((String)currEdge.get("outVertID")).equals(vertID)){ relatedIDs.add( (String)currEdge.get("outVertID") ); //TODO: check valid state here? } } } return relatedIDs; } @Override public int getEdgeCountByRelation(String inVertID, String outVertID, String relation){ if(relation == null || relation.equals("") ){ throw new IllegalArgumentException("cannot get edge with missing or invalid relation"); } if(inVertID == null || inVertID.equals("") || !vertices.containsKey(inVertID)){ throw new IllegalArgumentException("cannot get edge with missing or invalid inVertID"); } if(outVertID == null || outVertID.equals("") || !vertices.containsKey(outVertID)){ throw new IllegalArgumentException("cannot get edge with missing or invalid outVertID"); } int count = 0; for(Map<String, Object> currEdge : edges.values()) { if( currEdge.get("relation").equals(relation) && currEdge.get("outVertID").equals(outVertID) && currEdge.get("inVertID").equals(inVertID) ) { count++; } } return count; } /** * get a list of vertex IDs based on a list of constraints * @param constraints list of constraints * @return list of vertex IDs */ public List<String> getVertIDsByConstraints(List<DBConstraint> constraints){ Set<String> candidateIDs = null; Set<String> nonMatchingIDs = new HashSet<String>(); List<String> matchingIDs = new LinkedList<String>(); //First, generate candidateIDs set. //Note that after candidateIDs is populated here, it will not be modified. if(indexedVertFields.size() > 0){ //TODO: indices //This should use indexed fields to find candidateIDs, then find the nonMatchingIDs below as usual. //we need to decide if only exact matches are allowed, or if ranges & etc. are ok here. //also, somehow indicate that the constraints used here are 'done', so they aren't re-checked below. candidateIDs = new HashSet<String>(); } if(candidateIDs == null){ //if no initial matchingIDs set was generated yet, use all IDs candidateIDs = vertices.keySet(); } //make set of non-matching candidates, based on constraints for(String id : candidateIDs){ Map<String, Object> candidateVert = vertices.get(id); for(DBConstraint c : constraints){ Object candidateValue = candidateVert.get(c.getProp()); if( !compare(candidateValue, c.getCond(), c.getVal()) ){ nonMatchingIDs.add(id); break; } } } // build the matchingIDs list, based on candidateIDs and nonMatchingIDs for(String id : candidateIDs){ if( !nonMatchingIDs.contains(id) ){ matchingIDs.add(id); } } return matchingIDs; } /** * method to compare two objects that can use the conditional object * @param o1 * @param cond * @param o2 * @return true or false */ private boolean compare(Object o1, Condition cond, Object o2){ if(o1 == null && cond == Condition.eq && o2 == null) return true; if(o1 == null || o2 == null) return false; if(cond == Condition.eq){ return o1.equals(o2); } if(cond == Condition.neq){ return !o1.equals(o2); } if(cond == Condition.gt){ if(o1 instanceof Comparable && o2 instanceof Comparable){ Comparable c1 = (Comparable)o1; Comparable c2 = (Comparable)o2; return ( c1.compareTo(c2) > 0 ); }else{ return false; } } if(cond == Condition.gte){ if(o1 instanceof Comparable && o2 instanceof Comparable){ Comparable c1 = (Comparable)o1; Comparable c2 = (Comparable)o2; return ( c1.compareTo(c2) >= 0 ); }else{ return false; } } if(cond == Condition.lt){ if(o1 instanceof Comparable && o2 instanceof Comparable){ Comparable c1 = (Comparable)o1; Comparable c2 = (Comparable)o2; return ( c1.compareTo(c2) < 0 ); }else{ return false; } } if(cond == Condition.lte){ if(o1 instanceof Comparable && o2 instanceof Comparable){ Comparable c1 = (Comparable)o1; Comparable c2 = (Comparable)o2; return ( c1.compareTo(c2) <= 0 ); }else{ return false; } } if(cond == Condition.contains){ if(o1 instanceof Collection){ Collection c1 = (Collection)o1; return c1.contains(o2); }else{ return false; } } if(cond == Condition.substring){ if(o1 instanceof String){ String s1 = (String)o1; String s2 = ""; if(o2 instanceof String || o2 instanceof Character){ s2 += o2; return s1.contains(s2); } } return false; } return false; } /** * remove a vertex by a vertex ID * @param vertID */ public void removeVertByID(String vertID){ Object nameObj = vertices.get(vertID).get("name"); if(nameObj == null || !(nameObj instanceof String) ){ throw new IllegalStateException("bad state: vertex must contain name field"); } String name = (String)nameObj; vertIDs.remove(name); vertices.remove(vertID); //TODO: remove edges that contain this vertID!!!!! } /** * add a vertex given a property map * @param vert - property map * @return vertexID */ public String addVertex(Map<String, Object> vert){ Object nameObj = vert.get("name"); if(nameObj == null || !(nameObj instanceof String) || ((String)nameObj).equals("") ){ throw new IllegalArgumentException("cannot add vertes with empty name field"); }//TODO check any other mandatory fields String name = (String)nameObj; if(vertIDs.containsKey(name)){ removeVertByID(getVertIDByName(name)); } String vertID = String.valueOf( UUID.randomUUID() ); vertIDs.put(name, vertID); // make sure all multi-value properties are sets convertAllMultiValuesToSet(vert); vertices.put(vertID, vert); //TODO: update any indices return vertID; } /** * add and edge * @param inVertID ID of the incoming vertex edge * @param outVertID - ID of the outgoing vertex edge * @param relation - type of edge relation */ public void addEdge(String inVertID, String outVertID, String relation){ if(relation == null || relation.equals("") ){ throw new IllegalArgumentException("cannot add edge with missing or invlid relation"); } if(inVertID == null || inVertID.equals("") || !vertices.containsKey(inVertID)){ throw new IllegalArgumentException("cannot add edge with missing or invalid inVertID"); } if(outVertID == null || outVertID.equals("") || !vertices.containsKey(outVertID)){ throw new IllegalArgumentException("cannot add edge with missing or invalid outVertID"); } //TODO: check if edge is duplicate?? For now, just add it, duplicates are ok I guess. Map<String, Object> newEdge = new HashMap<String, Object>(); newEdge.put("inVertID", inVertID); newEdge.put("outVertID", outVertID); newEdge.put("relation", relation); String edgeID = String.valueOf( UUID.randomUUID() ); edges.put(edgeID, newEdge); //TODO: update any indices } /** * overwrite or add new properties to an existing vertex's property map * @param VertID * @param newVert - property map */ public void updateVertex(String VertID, Map<String, Object> newVert){ Map<String, Object> oldVert = vertices.get(VertID); if(oldVert == null){ throw new IllegalArgumentException("invalid vertex ID"); } Object newVertName = newVert.remove("name"); Object oldVertName = oldVert.get("name"); if(newVertName != null && !(((String)newVertName).equals((String)oldVertName)) ){ throw new IllegalArgumentException("cannot update name of existing vertex"); } for(Map.Entry<String, Object> entry: newVert.entrySet()){ String key = entry.getKey(); Object newValue = entry.getValue(); updateVertexProperty(VertID, key, newValue); } } @Override public void close() { // TODO Auto-generated method stub } @Override public void open() { // TODO Auto-generated method stub } @Override public void removeEdgeByRelation(String inVertID, String outVertID, String relation){ if(relation == null || relation.equals("") ){ throw new IllegalArgumentException("cannot add edge with missing or invlid relation"); } if(inVertID == null || inVertID.equals("") || !vertices.containsKey(inVertID)){ throw new IllegalArgumentException("cannot add edge with missing or invalid inVertID"); } if(outVertID == null || outVertID.equals("") || !vertices.containsKey(outVertID)){ throw new IllegalArgumentException("cannot add edge with missing or invalid outVertID"); } // collect the edge IDs that need to be removed Set<String> edgeIDs = new HashSet<String>(); for(Map.Entry<String, Map<String, Object>> entry : edges.entrySet()) { String edgeID = entry.getKey(); Map<String, Object> currEdge = entry.getValue(); if( currEdge.get("relation").equals(relation) && currEdge.get("outVertID").equals(outVertID) && currEdge.get("inVertID").equals(inVertID) ) { edgeIDs.add(edgeID); } } //remove the IDs we found for(String edgeID : edgeIDs) { edges.remove(edgeID); } } @Override public void removeAllVertices() { vertices.clear(); vertIDs.clear(); edges.clear(); } @Override public DBConstraint getConstraint(String property, Condition condition, Object value) { return new InMemoryConstraint(property, condition, value); } @Override public void buildIndex(String indexConfig) { // NO-OP } @Override protected void setPropertyInDB(String id, String key, Object newValue) { vertices.get(id).put(key, newValue); } public void loadState(String filePath) { try { InputStream is = new FileInputStream(filePath); String textContents = IOUtils.toString( is ); is.close(); JSONObject contents = new JSONObject(textContents); JSONObject vertsJSON = contents.getJSONObject("vertices"); JSONArray edgesJSON = contents.getJSONArray("edges"); //add vertices for( Object id : vertsJSON.keySet() ) { JSONObject jsonVert = vertsJSON.getJSONObject(id.toString()); String description = jsonVert.optString("description"); if(description != null && !description.equals("")){ //This is kind of an odd workaround, to prevent ui from treating, eg, "URI: www.blah.com | Type: URL |" as a URL instead of a string. //TODO: this is really a problem in the UI, as far as we care it's still just a string either way. jsonVert.put("description", " " + description); }else{ //ui assumes everything has a description, this is a workaround to avoid having empty text in various places. jsonVert.put("description", jsonVert.optString("name")); } Map<String, Object> vert = jsonVertToMap(jsonVert); vertices.put(id.toString(), vert); String name = (String)vert.get("name"); vertIDs.put(name, id.toString() ); //System.out.println("loaded vertex named " + name + " with id: " + id); //for debugging } //add edges. for( int i=0; i<edgesJSON.length(); i++ ) { JSONObject edge = edgesJSON.getJSONObject(i); try { String inVertID = edge.getString("inVertID"); String outVertID = edge.getString("outVertID"); String relation = edge.getString("relation"); int matchingEdgeCount = getEdgeCountByRelation(inVertID, outVertID, relation); if(matchingEdgeCount == 0){ addEdge(inVertID, outVertID, relation); } } catch (JSONException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IllegalArgumentException e) { // TODO Auto-generated catch block System.err.println("error when loading edge: " + edge); e.printStackTrace(); } } } catch (IOException e) { e.printStackTrace(); } } //TODO: tests public void saveState(String filePath) { try { OutputStream os = new FileOutputStream(filePath); PrintStream printStream = new PrintStream(os); //printStream.print("String"); JSONObject vertsJSON = new JSONObject(); JSONArray edgesJSON = new JSONArray(); //add vertices for( String id : vertices.keySet() ) { Map<String, Object> vert = vertices.get(id); JSONObject currEdge = new JSONObject(); for( String prop : vert.keySet() ){ //TODO: confirm this handles sets properly currEdge.put(prop, vert.get(prop)); } edgesJSON.put(currEdge); } //add edges. for( String id : edges.keySet() ) { Map<String, Object> edge = edges.get(id); JSONObject currEdge = new JSONObject(); for( String prop : edge.keySet() ){ currEdge.put(prop, edge.get(prop)); } edgesJSON.put(currEdge); } JSONObject contents = new JSONObject(); contents.put("vertices", vertsJSON); contents.put("edges", edgesJSON); printStream.print(contents.toString(2)); printStream.close(); os.close(); } catch (IOException e) { e.printStackTrace(); } } }
remove getVertByName and getVertIDByName from InMemoryDBConnection. removed the map of names->ids. added basic indexing, added name as an index.
src/main/java/gov/pnnl/stucco/dbconnect/inmemory/InMemoryDBConnection.java
remove getVertByName and getVertIDByName from InMemoryDBConnection. removed the map of names->ids. added basic indexing, added name as an index.
Java
mit
045f155aba6870dadf052a0d605f84125e439826
0
Thatsmusic99/HeadsPlus
package io.github.thatsmusic99.headsplus.config; import io.github.thatsmusic99.configurationmaster.api.ConfigSection; import io.github.thatsmusic99.headsplus.HeadsPlus; import io.github.thatsmusic99.headsplus.config.defaults.HeadsXEnums; import io.github.thatsmusic99.headsplus.config.defaults.HeadsXSections; import io.github.thatsmusic99.headsplus.managers.HeadManager; import io.github.thatsmusic99.headsplus.managers.PersistenceManager; import org.bukkit.inventory.ItemStack; import org.bukkit.inventory.meta.ItemMeta; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; public class ConfigHeadsSelector extends FeatureConfig { private static ConfigHeadsSelector instance; private HashMap<String, SectionInfo> sections = new LinkedHashMap<>(); private HashMap<String, BuyableHeadInfo> buyableHeads = new LinkedHashMap<>(); private int totalHeads = 0; public ConfigHeadsSelector() { super("heads-selector.yml"); instance = this; } @Override public boolean shouldLoad() { return MainConfig.get().getMainFeatures().HEADS_SELECTOR; } public static ConfigHeadsSelector get() { return instance; } @Override public void loadDefaults() { double version = getDouble("version", 0.0); if (isNew()) version = 0.0; addComment("This is where you can configure where the heads selector (/heads)"); addDefault("version", 3.5); makeSectionLenient("sections"); makeSectionLenient("heads"); if (version >= 3.5) return; for (HeadsXSections section : HeadsXSections.values()) { if (section.version > version) { addDefault("sections." + section.id + ".texture", section.texture); addDefault("sections." + section.id + ".display-name", section.displayName); addDefault("sections." + section.id + ".permission", "headsplus.section." + section.id); addDefault("sections." + section.id + ".enabled", true); } } for (HeadsXEnums head : HeadsXEnums.values()) { if (head.version > version) { addDefault("heads.HP#" + head.name().toLowerCase() + ".section", head.section); } } } @Override public void postSave() { sections.clear(); totalHeads = 0; // Setting up sections for (String key : getConfigSection("sections").getKeys(false)) { ConfigSection section = getConfigSection("sections." + key); if (section == null) continue; // If the section isn't enabled, continue if (!section.getBoolean("enabled")) continue; sections.put(key, new SectionInfo(key) .withDisplayName(section.getString("display-name", null)) .withPermission(section.getString("permission")) .withTexture(section.getString("texture"))); } // Setting up heads for (String key : getConfigSection("heads").getKeys(false)) { ConfigSection section = getConfigSection("heads." + key); if (section == null) continue; // If the section doesn't exist, continue if (!section.contains("section")) continue; if (!sections.containsKey(section.getString("section"))) continue; SectionInfo sectionInfo = sections.get(section.getString("section")); // Get the head info itself if (!HeadManager.get().contains(key)) continue; // TODO - lore BuyableHeadInfo headInfo = new BuyableHeadInfo(HeadManager.get().getHeadInfo(key)); headInfo.withDisplayName(section.getString("display-name", null)); buyableHeads.put(key, headInfo); sectionInfo.addHead(headInfo); totalHeads++; } } public HashMap<String, SectionInfo> getSections() { return sections; } public SectionInfo getSection(String name) { return sections.get(name); } public HashMap<String, BuyableHeadInfo> getBuyableHeads() { return buyableHeads; } public BuyableHeadInfo getBuyableHead(String id) { return buyableHeads.get(id); } public int getTotalHeads() { return totalHeads; } public static class SectionInfo { private String texture = null; private String displayName = null; private String permission; private final String id; private final List<BuyableHeadInfo> heads; public SectionInfo(String id) { this.id = id; this.permission = "headsplus.section." + id; this.heads = new ArrayList<>(); } public SectionInfo withPermission(String permission) { this.permission = permission; return this; } public SectionInfo withTexture(String texture) { this.texture = texture; return this; } public SectionInfo withDisplayName(String displayName) { this.displayName = displayName; return this; } public void addHead(BuyableHeadInfo head) { heads.add(head); } public String getPermission() { return permission; } public String getId() { return id; } public List<BuyableHeadInfo> getHeads() { return heads; } public ItemStack buildSection() { if (texture == null) throw new IllegalStateException("Texture must not be null!"); if (!texture.startsWith("HP#")) throw new IllegalStateException("The texture for " + id + " must be a registered (HP#) head!"); HeadManager.HeadInfo headInfo = HeadManager.get().getHeadInfo(texture); HeadsPlus.debug("Building a head for " + texture + "..."); HeadsPlus.debug("Contains texture > " + HeadManager.get().contains(texture)); ItemStack item = headInfo.forceBuildHead(); PersistenceManager.get().makeIcon(item); if (displayName == null) return item; ItemMeta meta = item.getItemMeta(); if (meta == null) return item; meta.setDisplayName(displayName); item.setItemMeta(meta); return item; } } public static class BuyableHeadInfo extends HeadManager.HeadInfo { private double price; public BuyableHeadInfo(HeadManager.HeadInfo info) { super(info.getId()); this.withDisplayName(info.getDisplayName()) .withMaterial(info.getMaterial()) .withTexture(info.getTexture()); setLore(info.getLore()); } } }
src/main/java/io/github/thatsmusic99/headsplus/config/ConfigHeadsSelector.java
package io.github.thatsmusic99.headsplus.config; import io.github.thatsmusic99.configurationmaster.api.ConfigSection; import io.github.thatsmusic99.headsplus.HeadsPlus; import io.github.thatsmusic99.headsplus.config.defaults.HeadsXEnums; import io.github.thatsmusic99.headsplus.config.defaults.HeadsXSections; import io.github.thatsmusic99.headsplus.managers.HeadManager; import io.github.thatsmusic99.headsplus.managers.PersistenceManager; import org.bukkit.inventory.ItemStack; import org.bukkit.inventory.meta.ItemMeta; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; public class ConfigHeadsSelector extends FeatureConfig { private static ConfigHeadsSelector instance; private HashMap<String, SectionInfo> sections = new LinkedHashMap<>(); private HashMap<String, BuyableHeadInfo> buyableHeads = new LinkedHashMap<>(); private int totalHeads = 0; public ConfigHeadsSelector() { super("heads-selector.yml"); instance = this; } @Override public boolean shouldLoad() { return MainConfig.get().getMainFeatures().HEADS_SELECTOR; } public static ConfigHeadsSelector get() { return instance; } @Override public void loadDefaults() { double version = getDouble("version", 0.0); if (isNew()) version = 0.0; addComment("This is where you can configure where the heads selector (/heads)"); addDefault("version", 3.5); makeSectionLenient("sections"); makeSectionLenient("heads"); if (version >= 3.5) return; for (HeadsXSections section : HeadsXSections.values()) { if (section.version > version) { addDefault("sections." + section.id + ".texture", section.texture); addDefault("sections." + section.id + ".display-name", section.displayName); addDefault("sections." + section.id + ".permission", "headsplus.section." + section.id); addDefault("sections." + section.id + ".enabled", true); } } for (HeadsXEnums head : HeadsXEnums.values()) { if (head.version > version) { addDefault("heads.HP#" + head.name().toLowerCase() + ".section", head.section); } } } @Override public void postSave() { sections.clear(); totalHeads = 0; // Setting up sections for (String key : getConfigSection("sections").getKeys(false)) { ConfigSection section = getConfigSection("sections." + key); if (section == null) continue; // If the section isn't enabled, continue if (!section.getBoolean("enabled")) continue; sections.put(key, new SectionInfo(key) .withDisplayName(section.getString("display-name", null)) .withPermission(section.getString("permission")) .withTexture("texture")); } // Setting up heads for (String key : getConfigSection("heads").getKeys(false)) { ConfigSection section = getConfigSection("heads." + key); if (section == null) continue; // If the section doesn't exist, continue if (!section.contains("section")) continue; if (!sections.containsKey(section.getString("section"))) continue; SectionInfo sectionInfo = sections.get(section.getString("section")); // Get the head info itself if (!HeadManager.get().contains(key)) continue; // TODO - lore BuyableHeadInfo headInfo = new BuyableHeadInfo(HeadManager.get().getHeadInfo(key)); headInfo.withDisplayName(section.getString("display-name", null)); buyableHeads.put(key, headInfo); sectionInfo.addHead(headInfo); totalHeads++; } } public HashMap<String, SectionInfo> getSections() { return sections; } public SectionInfo getSection(String name) { return sections.get(name); } public HashMap<String, BuyableHeadInfo> getBuyableHeads() { return buyableHeads; } public BuyableHeadInfo getBuyableHead(String id) { return buyableHeads.get(id); } public int getTotalHeads() { return totalHeads; } public static class SectionInfo { private String texture = null; private String displayName = null; private String permission; private final String id; private final List<BuyableHeadInfo> heads; public SectionInfo(String id) { this.id = id; this.permission = "headsplus.section." + id; this.heads = new ArrayList<>(); } public SectionInfo withPermission(String permission) { this.permission = permission; return this; } public SectionInfo withTexture(String texture) { this.texture = texture; return this; } public SectionInfo withDisplayName(String displayName) { this.displayName = displayName; return this; } public void addHead(BuyableHeadInfo head) { heads.add(head); } public String getPermission() { return permission; } public String getId() { return id; } public List<BuyableHeadInfo> getHeads() { return heads; } public ItemStack buildSection() { if (texture == null) throw new IllegalStateException("Texture must not be null!"); if (!texture.startsWith("HP#")) throw new IllegalStateException("The texture for " + id + " must be a registered (HP#) head!"); HeadManager.HeadInfo headInfo = HeadManager.get().getHeadInfo(texture); HeadsPlus.debug("Building a head for " + texture + "..."); HeadsPlus.debug("Contains texture > " + HeadManager.get().contains(texture)); ItemStack item = headInfo.forceBuildHead(); PersistenceManager.get().makeIcon(item); if (displayName == null) return item; ItemMeta meta = item.getItemMeta(); if (meta == null) return item; meta.setDisplayName(displayName); item.setItemMeta(meta); return item; } } public static class BuyableHeadInfo extends HeadManager.HeadInfo { private double price; public BuyableHeadInfo(HeadManager.HeadInfo info) { super(info.getId()); this.withDisplayName(info.getDisplayName()) .withMaterial(info.getMaterial()) .withTexture(info.getTexture()); setLore(info.getLore()); } } }
Fixed textures not being applied
src/main/java/io/github/thatsmusic99/headsplus/config/ConfigHeadsSelector.java
Fixed textures not being applied
Java
mit
a018d17f7e8a5d43027e92ea5d0a4184a616905b
0
hansjoachim/mockito,mockito/mockito,hansjoachim/mockito,ze-pequeno/mockito,terebesirobert/mockito,mockito/mockito,bric3/mockito,bric3/mockito,bric3/mockito,ze-pequeno/mockito,mockito/mockito
/* * Copyright (c) 2007 Mockito contributors * This program is made available under the terms of the MIT License. */ package org.mockito; import org.mockito.internal.framework.DefaultMockitoFramework; import org.mockito.internal.MockitoCore; import org.mockito.internal.creation.MockSettingsImpl; import org.mockito.internal.debugging.MockitoDebuggerImpl; import org.mockito.internal.stubbing.defaultanswers.ReturnsEmptyValues; import org.mockito.internal.stubbing.defaultanswers.ReturnsMoreEmptyValues; import org.mockito.internal.verification.VerificationModeFactory; import org.mockito.mock.SerializableMode; import org.mockito.runners.MockitoJUnitRunner; import org.mockito.stubbing.*; import org.mockito.verification.*; import org.mockito.junit.*; /** * <p align="left"><img src="logo.png" srcset="[email protected] 2x" alt="Mockito logo"/></p> * The Mockito library enables mock creation, verification and stubbing. * <p> * This javadoc content is also available on the <a href="http://mockito.org">http://mockito.org</a> web page. * All documentation is kept in javadocs because it guarantees consistency between what's on the web and what's in the source code. * It allows access to documentation straight from the IDE even if you work offline. * It motivates Mockito developers to keep documentation up-to-date with the code that they write, * every day, with every commit. * * <h1>Contents</h1> * * <b> * <a href="#0">0. Migrating to Mockito 2</a><br/> * <a href="#1">1. Let's verify some behaviour! </a><br/> * <a href="#2">2. How about some stubbing? </a><br/> * <a href="#3">3. Argument matchers </a><br/> * <a href="#4">4. Verifying exact number of invocations / at least once / never </a><br/> * <a href="#5">5. Stubbing void methods with exceptions </a><br/> * <a href="#6">6. Verification in order </a><br/> * <a href="#7">7. Making sure interaction(s) never happened on mock </a><br/> * <a href="#8">8. Finding redundant invocations </a><br/> * <a href="#9">9. Shorthand for mocks creation - <code>&#064;Mock</code> annotation </a><br/> * <a href="#10">10. Stubbing consecutive calls (iterator-style stubbing) </a><br/> * <a href="#11">11. Stubbing with callbacks </a><br/> * <a href="#12">12. <code>doReturn()</code>|<code>doThrow()</code>|<code>doAnswer()</code>|<code>doNothing()</code>|<code>doCallRealMethod()</code> family of methods</a><br/> * <a href="#13">13. Spying on real objects </a><br/> * <a href="#14">14. Changing default return values of unstubbed invocations (Since 1.7) </a><br/> * <a href="#15">15. Capturing arguments for further assertions (Since 1.8.0) </a><br/> * <a href="#16">16. Real partial mocks (Since 1.8.0) </a><br/> * <a href="#17">17. Resetting mocks (Since 1.8.0) </a><br/> * <a href="#18">18. Troubleshooting & validating framework usage (Since 1.8.0) </a><br/> * <a href="#19">19. Aliases for behavior driven development (Since 1.8.0) </a><br/> * <a href="#20">20. Serializable mocks (Since 1.8.1) </a><br/> * <a href="#21">21. New annotations: <code>&#064;Captor</code>, <code>&#064;Spy</code>, <code>&#064;InjectMocks</code> (Since 1.8.3) </a><br/> * <a href="#22">22. Verification with timeout (Since 1.8.5) </a><br/> * <a href="#23">23. Automatic instantiation of <code>&#064;Spies</code>, <code>&#064;InjectMocks</code> and constructor injection goodness (Since 1.9.0)</a><br/> * <a href="#24">24. One-liner stubs (Since 1.9.0)</a><br/> * <a href="#25">25. Verification ignoring stubs (Since 1.9.0)</a><br/> * <a href="#26">26. Mocking details (Since 1.9.5)</a><br/> * <a href="#27">27. Delegate calls to real instance (Since 1.9.5)</a><br/> * <a href="#28">28. <code>MockMaker</code> API (Since 1.9.5)</a><br/> * <a href="#29">29. BDD style verification (Since 1.10.0)</a><br/> * <a href="#30">30. Spying or mocking abstract classes (Since 1.10.12)</a><br/> * <a href="#31">31. Mockito mocks can be <em>serialized</em> / <em>deserialized</em> across classloaders (Since 1.10.0)</a></h3><br/> * <a href="#32">32. Better generic support with deep stubs (Since 1.10.0)</a></h3><br/> * <a href="#32">33. Mockito JUnit rule (Since 1.10.17)</a><br/> * <a href="#34">34. Switch <em>on</em> or <em>off</em> plugins (Since 1.10.15)</a><br/> * <a href="#35">35. Custom verification failure message (Since 2.1.0)</a><br/> * <a href="#36">36. Java 8 Lambda Matcher Support (Since 2.1.0)</a><br/> * <a href="#37">37. Java 8 Custom Answer Support (Since 2.1.0)</a><br/> * </b> * * <h3 id="0">0. <a class="meaningful_link" href="#mockito2">Migrating to Mockito 2</a></h3> * * In order to continue improving Mockito and further improve the unit testing experience, we want you to upgrade to 2.1.0! * Mockito follows <a href="http://semver.org/">semantic versioning</a> and contains breaking changes only on major version upgrades. * In the lifecycle of a library, breaking changes are necessary * to roll out a set of brand new features that alter the existing behavior or even change the API. * For a comprehensive guide on the new release including incompatible changes, * see '<a href="https://github.com/mockito/mockito/wiki/What%27s-new-in-Mockito-2">What's new in Mockito 2</a>' wiki page. * We hope that you enjoy Mockito 2! * * <h3 id="1">1. <a class="meaningful_link" href="#verification">Let's verify some behaviour!</a></h3> * * The following examples mock a List, because most people are familiar with the interface (such as the * <code>add()</code>, <code>get()</code>, <code>clear()</code> methods). <br> * In reality, please don't mock the List class. Use a real instance instead. * * <pre class="code"><code class="java"> * //Let's import Mockito statically so that the code looks clearer * import static org.mockito.Mockito.*; * * //mock creation * List mockedList = mock(List.class); * * //using mock object * mockedList.add("one"); * mockedList.clear(); * * //verification * verify(mockedList).add("one"); * verify(mockedList).clear(); * </code></pre> * * <p> * Once created, a mock will remember all interactions. Then you can selectively * verify whatever interactions you are interested in. * * * * * <h3 id="2">2. <a class="meaningful_link" href="#stubbing">How about some stubbing?</a></h3> * * <pre class="code"><code class="java"> * //You can mock concrete classes, not just interfaces * LinkedList mockedList = mock(LinkedList.class); * * //stubbing * when(mockedList.get(0)).thenReturn("first"); * when(mockedList.get(1)).thenThrow(new RuntimeException()); * * //following prints "first" * System.out.println(mockedList.get(0)); * * //following throws runtime exception * System.out.println(mockedList.get(1)); * * //following prints "null" because get(999) was not stubbed * System.out.println(mockedList.get(999)); * * //Although it is possible to verify a stubbed invocation, usually <b>it's just redundant</b> * //If your code cares what get(0) returns, then something else breaks (often even before verify() gets executed). * //If your code doesn't care what get(0) returns, then it should not be stubbed. Not convinced? See <a href="http://monkeyisland.pl/2008/04/26/asking-and-telling">here</a>. * verify(mockedList).get(0); * </code></pre> * * <ul> * <li> By default, for all methods that return a value, a mock will return either null, a * a primitive/primitive wrapper value, or an empty collection, as appropriate. * For example 0 for an int/Integer and false for a boolean/Boolean. </li> * * <li> Stubbing can be overridden: for example common stubbing can go to * fixture setup but the test methods can override it. * Please note that overridding stubbing is a potential code smell that points out too much stubbing</li> * * <li> Once stubbed, the method will always return a stubbed value, regardless * of how many times it is called. </li> * * <li> Last stubbing is more important - when you stubbed the same method with * the same arguments many times. * Other words: <b>the order of stubbing matters</b> but it is only meaningful rarely, * e.g. when stubbing exactly the same method calls or sometimes when argument matchers are used, etc.</li> * * </ul> * * * * <h3 id="3">3. <a class="meaningful_link" href="#argument_matchers">Argument matchers</a></h3> * * Mockito verifies argument values in natural java style: by using an <code>equals()</code> method. * Sometimes, when extra flexibility is required then you might use argument matchers: * * <pre class="code"><code class="java"> * //stubbing using built-in anyInt() argument matcher * when(mockedList.get(anyInt())).thenReturn("element"); * * //stubbing using custom matcher (let's say isValid() returns your own matcher implementation): * when(mockedList.contains(argThat(isValid()))).thenReturn("element"); * * //following prints "element" * System.out.println(mockedList.get(999)); * * //<b>you can also verify using an argument matcher</b> * verify(mockedList).get(anyInt()); * * //<b>argument matchers can also be written as Java 8 Lambdas</b> * verify(mockedList).add(someString -> someString.length() > 5); * * </code></pre> * * <p> * Argument matchers allow flexible verification or stubbing. * {@link ArgumentMatchers Click here} {@link org.mockito.hamcrest.MockitoHamcrest or here} to see more built-in matchers * and examples of <b>custom argument matchers / hamcrest matchers</b>. * <p> * For information solely on <b>custom argument matchers</b> check out javadoc for {@link ArgumentMatcher} class. * <p> * Be reasonable with using complicated argument matching. * The natural matching style using <code>equals()</code> with occasional <code>anyX()</code> matchers tend to give clean & simple tests. * Sometimes it's just better to refactor the code to allow <code>equals()</code> matching or even implement <code>equals()</code> method to help out with testing. * <p> * Also, read <a href="#15">section 15</a> or javadoc for {@link ArgumentCaptor} class. * {@link ArgumentCaptor} is a special implementation of an argument matcher that captures argument values for further assertions. * <p> * <b>Warning on argument matchers:</b> * <p> * If you are using argument matchers, <b>all arguments</b> have to be provided * by matchers. * <p> The following example shows verification but the same applies to stubbing: * * <pre class="code"><code class="java"> * verify(mock).someMethod(anyInt(), anyString(), <b>eq("third argument")</b>); * //above is correct - eq() is also an argument matcher * * verify(mock).someMethod(anyInt(), anyString(), <b>"third argument"</b>); * //above is incorrect - exception will be thrown because third argument is given without an argument matcher. * </code></pre> * * <p> * Matcher methods like <code>anyObject()</code>, <code>eq()</code> <b>do not</b> return matchers. * Internally, they record a matcher on a stack and return a dummy value (usually null). * This implementation is due to static type safety imposed by the java compiler. * The consequence is that you cannot use <code>anyObject()</code>, <code>eq()</code> methods outside of verified/stubbed method. * * * * * <h3 id="4">4. <a class="meaningful_link" href="#exact_verification">Verifying exact number of invocations</a> / * <a class="meaningful_link" href="#at_least_verification">at least x</a> / never</h3> * * <pre class="code"><code class="java"> * //using mock * mockedList.add("once"); * * mockedList.add("twice"); * mockedList.add("twice"); * * mockedList.add("three times"); * mockedList.add("three times"); * mockedList.add("three times"); * * //following two verifications work exactly the same - times(1) is used by default * verify(mockedList).add("once"); * verify(mockedList, times(1)).add("once"); * * //exact number of invocations verification * verify(mockedList, times(2)).add("twice"); * verify(mockedList, times(3)).add("three times"); * * //verification using never(). never() is an alias to times(0) * verify(mockedList, never()).add("never happened"); * * //verification using atLeast()/atMost() * verify(mockedList, atLeastOnce()).add("three times"); * verify(mockedList, atLeast(2)).add("five times"); * verify(mockedList, atMost(5)).add("three times"); * * </code></pre> * * <p> * <b>times(1) is the default.</b> Therefore using times(1) explicitly can be * omitted. * * * * * <h3 id="5">5. <a class="meaningful_link" href="#stubbing_with_exceptions">Stubbing void methods with exceptions</a></h3> * * <pre class="code"><code class="java"> * doThrow(new RuntimeException()).when(mockedList).clear(); * * //following throws RuntimeException: * mockedList.clear(); * </code></pre> * * Read more about <code>doThrow()</code>|<code>doAnswer()</code> family of methods in <a href="#12">section 12</a>. * <p> * * <h3 id="6">6. <a class="meaningful_link" href="#in_order_verification">Verification in order</a></h3> * * <pre class="code"><code class="java"> * // A. Single mock whose methods must be invoked in a particular order * List singleMock = mock(List.class); * * //using a single mock * singleMock.add("was added first"); * singleMock.add("was added second"); * * //create an inOrder verifier for a single mock * InOrder inOrder = inOrder(singleMock); * * //following will make sure that add is first called with "was added first, then with "was added second" * inOrder.verify(singleMock).add("was added first"); * inOrder.verify(singleMock).add("was added second"); * * // B. Multiple mocks that must be used in a particular order * List firstMock = mock(List.class); * List secondMock = mock(List.class); * * //using mocks * firstMock.add("was called first"); * secondMock.add("was called second"); * * //create inOrder object passing any mocks that need to be verified in order * InOrder inOrder = inOrder(firstMock, secondMock); * * //following will make sure that firstMock was called before secondMock * inOrder.verify(firstMock).add("was called first"); * inOrder.verify(secondMock).add("was called second"); * * // Oh, and A + B can be mixed together at will * </code></pre> * * Verification in order is flexible - <b>you don't have to verify all * interactions</b> one-by-one but only those that you are interested in * testing in order. * <p> * Also, you can create an InOrder object passing only the mocks that are relevant for * in-order verification. * * * * * <h3 id="7">7. <a class="meaningful_link" href="#never_verification">Making sure interaction(s) never happened on mock</a></h3> * * <pre class="code"><code class="java"> * //using mocks - only mockOne is interacted * mockOne.add("one"); * * //ordinary verification * verify(mockOne).add("one"); * * //verify that method was never called on a mock * verify(mockOne, never()).add("two"); * * //verify that other mocks were not interacted * verifyZeroInteractions(mockTwo, mockThree); * * </code></pre> * * * * * <h3 id="8">8. <a class="meaningful_link" href="#finding_redundant_invocations">Finding redundant invocations</a></h3> * * <pre class="code"><code class="java"> * //using mocks * mockedList.add("one"); * mockedList.add("two"); * * verify(mockedList).add("one"); * * //following verification will fail * verifyNoMoreInteractions(mockedList); * </code></pre> * * A word of <b>warning</b>: * Some users who did a lot of classic, expect-run-verify mocking tend to use <code>verifyNoMoreInteractions()</code> very often, even in every test method. * <code>verifyNoMoreInteractions()</code> is not recommended to use in every test method. * <code>verifyNoMoreInteractions()</code> is a handy assertion from the interaction testing toolkit. Use it only when it's relevant. * Abusing it leads to <strong>overspecified</strong>, <strong>less maintainable</strong> tests. You can find further reading * <a href="http://monkeyisland.pl/2008/07/12/should-i-worry-about-the-unexpected/">here</a>. * * <p> * See also {@link Mockito#never()} - it is more explicit and * communicates the intent well. * <p> * * * * * <h3 id="9">9. <a class="meaningful_link" href="#mock_annotation">Shorthand for mocks creation - <code>&#064;Mock</code> annotation</a></h3> * * <ul> * <li>Minimizes repetitive mock creation code.</li> * <li>Makes the test class more readable.</li> * <li>Makes the verification error easier to read because the <b>field name</b> * is used to identify the mock.</li> * </ul> * * <pre class="code"><code class="java"> * public class ArticleManagerTest { * * &#064;Mock private ArticleCalculator calculator; * &#064;Mock private ArticleDatabase database; * &#064;Mock private UserProvider userProvider; * * private ArticleManager manager; * </code></pre> * * <b>Important!</b> This needs to be somewhere in the base class or a test * runner: * * <pre class="code"><code class="java"> * MockitoAnnotations.initMocks(testClass); * </code></pre> * * You can use built-in runner: {@link MockitoJUnitRunner} or a rule: {@link MockitoRule}. * <p> * Read more here: {@link MockitoAnnotations} * * * * * <h3 id="10">10. <a class="meaningful_link" href="#stubbing_consecutive_calls">Stubbing consecutive calls</a> (iterator-style stubbing)</h3> * * Sometimes we need to stub with different return value/exception for the same * method call. Typical use case could be mocking iterators. * Original version of Mockito did not have this feature to promote simple mocking. * For example, instead of iterators one could use {@link Iterable} or simply * collections. Those offer natural ways of stubbing (e.g. using real * collections). In rare scenarios stubbing consecutive calls could be useful, * though: * <p> * * <pre class="code"><code class="java"> * when(mock.someMethod("some arg")) * .thenThrow(new RuntimeException()) * .thenReturn("foo"); * * //First call: throws runtime exception: * mock.someMethod("some arg"); * * //Second call: prints "foo" * System.out.println(mock.someMethod("some arg")); * * //Any consecutive call: prints "foo" as well (last stubbing wins). * System.out.println(mock.someMethod("some arg")); * </code></pre> * * Alternative, shorter version of consecutive stubbing: * * <pre class="code"><code class="java"> * when(mock.someMethod("some arg")) * .thenReturn("one", "two", "three"); * </code></pre> * * * * * <h3 id="11">11. <a class="meaningful_link" href="#answer_stubs">Stubbing with callbacks</a></h3> * * Allows stubbing with generic {@link Answer} interface. * <p> * Yet another controversial feature which was not included in Mockito * originally. We recommend simply stubbing with <code>thenReturn()</code> or * <code>thenThrow()</code>, which should be enough to test/test-drive * any clean & simple code. However, if you do have a need to stub with the generic Answer interface, here is an example: * * <pre class="code"><code class="java"> * when(mock.someMethod(anyString())).thenAnswer(new Answer() { * Object answer(InvocationOnMock invocation) { * Object[] args = invocation.getArguments(); * Object mock = invocation.getMock(); * return "called with arguments: " + args; * } * }); * * //the following prints "called with arguments: foo" * System.out.println(mock.someMethod("foo")); * </code></pre> * * * * * <h3 id="12">12. <a class="meaningful_link" href="#do_family_methods_stubs"><code>doReturn()</code>|<code>doThrow()</code>| * <code>doAnswer()</code>|<code>doNothing()</code>|<code>doCallRealMethod()</code> family of methods</a></h3> * * Stubbing void methods requires a different approach from {@link Mockito#when(Object)} because the compiler does not * like void methods inside brackets... * <p> * Use <code>doThrow()</code> when you want to stub a void method with an exception: * <pre class="code"><code class="java"> * doThrow(new RuntimeException()).when(mockedList).clear(); * * //following throws RuntimeException: * mockedList.clear(); * </code></pre> * </p> * * <p> * You can use <code>doThrow()</code>, <code>doAnswer()</code>, <code>doNothing()</code>, <code>doReturn()</code> * and <code>doCallRealMethod()</code> in place of the corresponding call with <code>when()</code>, for any method. * It is necessary when you * <ul> * <li>stub void methods</li> * <li>stub methods on spy objects (see below)</li> * <li>stub the same method more than once, to change the behaviour of a mock in the middle of a test.</li> * </ul> * but you may prefer to use these methods in place of the alternative with <code>when()</code>, for all of your stubbing calls. * <p> * Read more about these methods: * <p> * {@link Mockito#doReturn(Object)} * <p> * {@link Mockito#doThrow(Throwable...)} * <p> * {@link Mockito#doThrow(Class)} * <p> * {@link Mockito#doAnswer(Answer)} * <p> * {@link Mockito#doNothing()} * <p> * {@link Mockito#doCallRealMethod()} * * * * * <h3 id="13">13. <a class="meaningful_link" href="#spy">Spying on real objects</a></h3> * * You can create spies of real objects. When you use the spy then the <b>real</b> methods are called * (unless a method was stubbed). * <p> * Real spies should be used <b>carefully and occasionally</b>, for example when dealing with legacy code. * * <p> * Spying on real objects can be associated with "partial mocking" concept. * <b>Before the release 1.8</b>, Mockito spies were not real partial mocks. * The reason was we thought partial mock is a code smell. * At some point we found legitimate use cases for partial mocks * (3rd party interfaces, interim refactoring of legacy code, the full article is <a href= * "http://monkeyisland.pl/2009/01/13/subclass-and-override-vs-partial-mocking-vs-refactoring" * >here</a>) * <p> * * <pre class="code"><code class="java"> * List list = new LinkedList(); * List spy = spy(list); * * //optionally, you can stub out some methods: * when(spy.size()).thenReturn(100); * * //using the spy calls <b>*real*</b> methods * spy.add("one"); * spy.add("two"); * * //prints "one" - the first element of a list * System.out.println(spy.get(0)); * * //size() method was stubbed - 100 is printed * System.out.println(spy.size()); * * //optionally, you can verify * verify(spy).add("one"); * verify(spy).add("two"); * </code></pre> * * <h4>Important gotcha on spying real objects!</h4> * <ol> * <li>Sometimes it's impossible or impractical to use {@link Mockito#when(Object)} for stubbing spies. * Therefore when using spies please consider <code>doReturn</code>|<code>Answer</code>|<code>Throw()</code> family of * methods for stubbing. Example: * * <pre class="code"><code class="java"> * List list = new LinkedList(); * List spy = spy(list); * * //Impossible: real method is called so spy.get(0) throws IndexOutOfBoundsException (the list is yet empty) * when(spy.get(0)).thenReturn("foo"); * * //You have to use doReturn() for stubbing * doReturn("foo").when(spy).get(0); * </code></pre> * </li> * * <li>Mockito <b>*does not*</b> delegate calls to the passed real instance, instead it actually creates a copy of it. * So if you keep the real instance and interact with it, don't expect the spied to be aware of those interaction * and their effect on real instance state. * The corollary is that when an <b>*unstubbed*</b> method is called <b>*on the spy*</b> but <b>*not on the real instance*</b>, * you won't see any effects on the real instance. * </li> * * <li>Watch out for final methods. * Mockito doesn't mock final methods so the bottom line is: when you spy on real objects + you try to stub a final method = trouble. * Also you won't be able to verify those method as well. * </li> * </ol> * * * * * <h3 id="14">14. Changing <a class="meaningful_link" href="#defaultreturn">default return values of unstubbed invocations</a> (Since 1.7)</h3> * * You can create a mock with specified strategy for its return values. * It's quite an advanced feature and typically you don't need it to write decent tests. * However, it can be helpful for working with <b>legacy systems</b>. * <p> * It is the default answer so it will be used <b>only when you don't</b> stub the method call. * * <pre class="code"><code class="java"> * Foo mock = mock(Foo.class, Mockito.RETURNS_SMART_NULLS); * Foo mockTwo = mock(Foo.class, new YourOwnAnswer()); * </code></pre> * * <p> * Read more about this interesting implementation of <i>Answer</i>: {@link Mockito#RETURNS_SMART_NULLS} * * * * * <h3 id="15">15. <a class="meaningful_link" href="#captors">Capturing arguments</a> for further assertions (Since 1.8.0)</h3> * * Mockito verifies argument values in natural java style: by using an <code>equals()</code> method. * This is also the recommended way of matching arguments because it makes tests clean & simple. * In some situations though, it is helpful to assert on certain arguments after the actual verification. * For example: * <pre class="code"><code class="java"> * ArgumentCaptor&lt;Person&gt; argument = ArgumentCaptor.forClass(Person.class); * verify(mock).doSomething(argument.capture()); * assertEquals("John", argument.getValue().getName()); * </code></pre> * * <b>Warning:</b> it is recommended to use ArgumentCaptor with verification <b>but not</b> with stubbing. * Using ArgumentCaptor with stubbing may decrease test readability because captor is created outside of assert (aka verify or 'then') block. * Also it may reduce defect localization because if stubbed method was not called then no argument is captured. * <p> * In a way ArgumentCaptor is related to custom argument matchers (see javadoc for {@link ArgumentMatcher} class). * Both techniques can be used for making sure certain arguments where passed to mocks. * However, ArgumentCaptor may be a better fit if: * <ul> * <li>custom argument matcher is not likely to be reused</li> * <li>you just need it to assert on argument values to complete verification</li> * </ul> * Custom argument matchers via {@link ArgumentMatcher} are usually better for stubbing. * * * * * <h3 id="16">16. <a class="meaningful_link" href="#partial_mocks">Real partial mocks</a> (Since 1.8.0)</h3> * * Finally, after many internal debates & discussions on the mailing list, partial mock support was added to Mockito. * Previously we considered partial mocks as code smells. However, we found a legitimate use case for partial mocks - more reading: * <a href="http://monkeyisland.pl/2009/01/13/subclass-and-override-vs-partial-mocking-vs-refactoring">here</a> * <p> * <b>Before release 1.8</b> <code>spy()</code> was not producing real partial mocks and it was confusing for some users. * Read more about spying: <a href="#13">here</a> or in javadoc for {@link Mockito#spy(Object)} method. * <p> * <pre class="code"><code class="java"> * //you can create partial mock with spy() method: * List list = spy(new LinkedList()); * * //you can enable partial mock capabilities selectively on mocks: * Foo mock = mock(Foo.class); * //Be sure the real implementation is 'safe'. * //If real implementation throws exceptions or depends on specific state of the object then you're in trouble. * when(mock.someMethod()).thenCallRealMethod(); * </code></pre> * * As usual you are going to read <b>the partial mock warning</b>: * Object oriented programming is more less tackling complexity by dividing the complexity into separate, specific, SRPy objects. * How does partial mock fit into this paradigm? Well, it just doesn't... * Partial mock usually means that the complexity has been moved to a different method on the same object. * In most cases, this is not the way you want to design your application. * <p> * However, there are rare cases when partial mocks come handy: * dealing with code you cannot change easily (3rd party interfaces, interim refactoring of legacy code etc.) * However, I wouldn't use partial mocks for new, test-driven & well-designed code. * * * * * <h3 id="17">17. <a class="meaningful_link" href="#resetting_mocks">Resetting mocks</a> (Since 1.8.0)</h3> * * Smart Mockito users hardly use this feature because they know it could be a sign of poor tests. * Normally, you don't need to reset your mocks, just create new mocks for each test method. * <p> * Instead of <code>reset()</code> please consider writing simple, small and focused test methods over lengthy, over-specified tests. * <b>First potential code smell is <code>reset()</code> in the middle of the test method.</b> This probably means you're testing too much. * Follow the whisper of your test methods: "Please keep us small & focused on single behavior". * There are several threads about it on mockito mailing list. * <p> * The only reason we added <code>reset()</code> method is to * make it possible to work with container-injected mocks. * For more information see FAQ (<a href="https://github.com/mockito/mockito/wiki/FAQ">here</a>). * <p> * <b>Don't harm yourself.</b> <code>reset()</code> in the middle of the test method is a code smell (you're probably testing too much). * <pre class="code"><code class="java"> * List mock = mock(List.class); * when(mock.size()).thenReturn(10); * mock.add(1); * * reset(mock); * //at this point the mock forgot any interactions & stubbing * </code></pre> * * * * * <h3 id="18">18. <a class="meaningful_link" href="#framework_validation">Troubleshooting & validating framework usage</a> (Since 1.8.0)</h3> * * First of all, in case of any trouble, I encourage you to read the Mockito FAQ: * <a href="https://github.com/mockito/mockito/wiki/FAQ">https://github.com/mockito/mockito/wiki/FAQ</a> * <p> * In case of questions you may also post to mockito mailing list: * <a href="http://groups.google.com/group/mockito">http://groups.google.com/group/mockito</a> * <p> * Next, you should know that Mockito validates if you use it correctly <b>all the time</b>. * However, there's a gotcha so please read the javadoc for {@link Mockito#validateMockitoUsage()} * * * * * <h3 id="19">19. <a class="meaningful_link" href="#bdd_mockito">Aliases for behavior driven development</a> (Since 1.8.0)</h3> * * Behavior Driven Development style of writing tests uses <b>//given //when //then</b> comments as fundamental parts of your test methods. * This is exactly how we write our tests and we warmly encourage you to do so! * <p> * Start learning about BDD here: <a href="http://en.wikipedia.org/wiki/Behavior_Driven_Development">http://en.wikipedia.org/wiki/Behavior_Driven_Development</a> * <p> * The problem is that current stubbing api with canonical role of <b>when</b> word does not integrate nicely with <b>//given //when //then</b> comments. * It's because stubbing belongs to <b>given</b> component of the test and not to the <b>when</b> component of the test. * Hence {@link BDDMockito} class introduces an alias so that you stub method calls with {@link BDDMockito#given(Object)} method. * Now it really nicely integrates with the <b>given</b> component of a BDD style test! * <p> * Here is how the test might look like: * <pre class="code"><code class="java"> * import static org.mockito.BDDMockito.*; * * Seller seller = mock(Seller.class); * Shop shop = new Shop(seller); * * public void shouldBuyBread() throws Exception { * //given * given(seller.askForBread()).willReturn(new Bread()); * * //when * Goods goods = shop.buyBread(); * * //then * assertThat(goods, containBread()); * } * </code></pre> * * * * * <h3 id="20">20. <a class="meaningful_link" href="#serializable_mocks">Serializable mocks</a> (Since 1.8.1)</h3> * * Mocks can be made serializable. With this feature you can use a mock in a place that requires dependencies to be serializable. * <p> * WARNING: This should be rarely used in unit testing. * <p> * The behaviour was implemented for a specific use case of a BDD spec that had an unreliable external dependency. This * was in a web environment and the objects from the external dependency were being serialized to pass between layers. * <p> * To create serializable mock use {@link MockSettings#serializable()}: * <pre class="code"><code class="java"> * List serializableMock = mock(List.class, withSettings().serializable()); * </code></pre> * <p> * The mock can be serialized assuming all the normal <a href='http://java.sun.com/j2se/1.5.0/docs/api/java/io/Serializable.html'> * serialization requirements</a> are met by the class. * <p> * Making a real object spy serializable is a bit more effort as the spy(...) method does not have an overloaded version * which accepts MockSettings. No worries, you will hardly ever use it. * * <pre class="code"><code class="java"> * List&lt;Object&gt; list = new ArrayList&lt;Object&gt;(); * List&lt;Object&gt; spy = mock(ArrayList.class, withSettings() * .spiedInstance(list) * .defaultAnswer(CALLS_REAL_METHODS) * .serializable()); * </code></pre> * * * * * <h3 id="21">21. New annotations: <a class="meaningful_link" href="#captor_annotation"><code>&#064;Captor</code></a>, * <a class="meaningful_link" href="#spy_annotation"><code>&#064;Spy</code></a>, * <a class="meaningful_link" href="#injectmocks_annotation"><code>&#064;InjectMocks</code></a> (Since 1.8.3)</h3> * * <p> * Release 1.8.3 brings new annotations that may be helpful on occasion: * * <ul> * <li>&#064;{@link Captor} simplifies creation of {@link ArgumentCaptor} * - useful when the argument to capture is a nasty generic class and you want to avoid compiler warnings * <li>&#064;{@link Spy} - you can use it instead {@link Mockito#spy(Object)}. * <li>&#064;{@link InjectMocks} - injects mock or spy fields into tested object automatically. * </ul> * * <p> * Note that &#064;{@link InjectMocks} can also be used in combination with the &#064;{@link Spy} annotation, it means * that Mockito will inject mocks into the partial mock under test. This complexity is another good reason why you * should only use partial mocks as a last resort. See point 16 about partial mocks. * * <p> * All new annotations are <b>*only*</b> processed on {@link MockitoAnnotations#initMocks(Object)}. * Just like for &#064;{@link Mock} annotation you can use the built-in runner: {@link MockitoJUnitRunner} or rule: * {@link MockitoRule}. * <p> * * * * * <h3 id="22">22. <a class="meaningful_link" href="#verification_timeout">Verification with timeout</a> (Since 1.8.5)</h3> * <p> * Allows verifying with timeout. It causes a verify to wait for a specified period of time for a desired * interaction rather than fails immediately if had not already happened. May be useful for testing in concurrent * conditions. * <p> * This feature should be used rarely - figure out a better way of testing your multi-threaded system. * <p> * Not yet implemented to work with InOrder verification. * <p> * Examples: * <p> * <pre class="code"><code class="java"> * //passes when someMethod() is called within given time span * verify(mock, timeout(100)).someMethod(); * //above is an alias to: * verify(mock, timeout(100).times(1)).someMethod(); * * //passes when someMethod() is called <b>*exactly*</b> 2 times within given time span * verify(mock, timeout(100).times(2)).someMethod(); * * //passes when someMethod() is called <b>*at least*</b> 2 times within given time span * verify(mock, timeout(100).atLeast(2)).someMethod(); * * //verifies someMethod() within given time span using given verification mode * //useful only if you have your own custom verification modes. * verify(mock, new Timeout(100, yourOwnVerificationMode)).someMethod(); * </code></pre> * * * * * <h3 id="23">23. <a class="meaningful_link" href="#automatic_instantiation">Automatic instantiation of <code>&#064;Spies</code>, * <code>&#064;InjectMocks</code></a> and <a class="meaningful_link" href="#constructor_injection">constructor injection goodness</a> (Since 1.9.0)</h3> * * <p> * Mockito will now try to instantiate &#064;{@link Spy} and will instantiate &#064;{@link InjectMocks} fields * using <b>constructor</b> injection, <b>setter</b> injection, or <b>field</b> injection. * <p> * To take advantage of this feature you need to use {@link MockitoAnnotations#initMocks(Object)}, {@link MockitoJUnitRunner} * or {@link MockitoRule}. * <p> * Read more about available tricks and the rules of injection in the javadoc for {@link InjectMocks} * <pre class="code"><code class="java"> * //instead: * &#064;Spy BeerDrinker drinker = new BeerDrinker(); * //you can write: * &#064;Spy BeerDrinker drinker; * * //same applies to &#064;InjectMocks annotation: * &#064;InjectMocks LocalPub; * </code></pre> * * * * * <h3 id="24">24. <a class="meaningful_link" href="#one_liner_stub">One-liner stubs</a> (Since 1.9.0)</h3> * <p> * Mockito will now allow you to create mocks when stubbing. * Basically, it allows to create a stub in one line of code. * This can be helpful to keep test code clean. * For example, some boring stub can be created & stubbed at field initialization in a test: * <pre class="code"><code class="java"> * public class CarTest { * Car boringStubbedCar = when(mock(Car.class).shiftGear()).thenThrow(EngineNotStarted.class).getMock(); * * &#064;Test public void should... {} * </code></pre> * * * * * <h3 id="25">25. <a class="meaningful_link" href="#ignore_stubs_verification">Verification ignoring stubs</a> (Since 1.9.0)</h3> * <p> * Mockito will now allow to ignore stubbing for the sake of verification. * Sometimes useful when coupled with <code>verifyNoMoreInteractions()</code> or verification <code>inOrder()</code>. * Helps avoiding redundant verification of stubbed calls - typically we're not interested in verifying stubs. * <p> * <b>Warning</b>, <code>ignoreStubs()</code> might lead to overuse of verifyNoMoreInteractions(ignoreStubs(...)); * Bear in mind that Mockito does not recommend bombarding every test with <code>verifyNoMoreInteractions()</code> * for the reasons outlined in javadoc for {@link Mockito#verifyNoMoreInteractions(Object...)} * <p>Some examples: * <pre class="code"><code class="java"> * verify(mock).foo(); * verify(mockTwo).bar(); * * //ignores all stubbed methods: * verifyNoMoreInteractions(ignoreStubs(mock, mockTwo)); * * //creates InOrder that will ignore stubbed * InOrder inOrder = inOrder(ignoreStubs(mock, mockTwo)); * inOrder.verify(mock).foo(); * inOrder.verify(mockTwo).bar(); * inOrder.verifyNoMoreInteractions(); * </code></pre> * <p> * Advanced examples and more details can be found in javadoc for {@link Mockito#ignoreStubs(Object...)} * * * * * <h3 id="26">26. <a class="meaningful_link" href="#mocking_details">Mocking details</a> (Since 1.9.5)</h3> * <p> * To identify whether a particular object is a mock or a spy: * <pre class="code"><code class="java"> * Mockito.mockingDetails(someObject).isMock(); * Mockito.mockingDetails(someObject).isSpy(); * </code></pre> * Both the {@link MockingDetails#isMock} and {@link MockingDetails#isSpy()} methods return <code>boolean</code>. * As a spy is just a different kind of mock, <code>isMock()</code> returns true if the object is a spy. * In future Mockito versions MockingDetails may grow and provide other useful information about the mock, * e.g. invocations, stubbing info, etc. * * * * * <h3 id="27">27. <a class="meaningful_link" href="#delegating_call_to_real_instance">Delegate calls to real instance</a> (Since 1.9.5)</h3> * * <p>Useful for spies or partial mocks of objects <strong>that are difficult to mock or spy</strong> using the usual spy API. * Since Mockito 1.10.11, the delegate may or may not be of the same type as the mock. * If the type is different, a matching method needs to be found on delegate type otherwise an exception is thrown. * * Possible use cases for this feature: * <ul> * <li>Final classes but with an interface</li> * <li>Already custom proxied object</li> * <li>Special objects with a finalize method, i.e. to avoid executing it 2 times</li> * </ul> * * <p>The difference with the regular spy: * <ul> * <li> * The regular spy ({@link #spy(Object)}) contains <strong>all</strong> state from the spied instance * and the methods are invoked on the spy. The spied instance is only used at mock creation to copy the state from. * If you call a method on a regular spy and it internally calls other methods on this spy, those calls are remembered * for verifications, and they can be effectively stubbed. * </li> * <li> * The mock that delegates simply delegates all methods to the delegate. * The delegate is used all the time as methods are delegated onto it. * If you call a method on a mock that delegates and it internally calls other methods on this mock, * those calls are <strong>not</strong> remembered for verifications, stubbing does not have effect on them, too. * Mock that delegates is less powerful than the regular spy but it is useful when the regular spy cannot be created. * </li> * </ul> * * <p> * See more information in docs for {@link AdditionalAnswers#delegatesTo(Object)}. * * * * * <h3 id="28">28. <a class="meaningful_link" href="#mock_maker_plugin"><code>MockMaker</code> API</a> (Since 1.9.5)</h3> * <p>Driven by requirements and patches from Google Android guys Mockito now offers an extension point * that allows replacing the proxy generation engine. By default, Mockito uses <a href="https://github.com/raphw/byte-buddy">Byte Buddy</a> * to create dynamic proxies. * <p>The extension point is for advanced users that want to extend Mockito. For example, it is now possible * to use Mockito for Android testing with a help of <a href="https://github.com/crittercism/dexmaker">dexmaker</a>. * <p>For more details, motivations and examples please refer to * the docs for {@link org.mockito.plugins.MockMaker}. * * * * * <h3 id="29">29. <a class="meaningful_link" href="#BDD_behavior_verification">BDD style verification</a> (Since 1.10.0)</h3> * * Enables Behavior Driven Development (BDD) style verification by starting verification with the BDD <b>then</b> keyword. * * <pre class="code"><code class="java"> * given(dog.bark()).willReturn(2); * * // when * ... * * then(person).should(times(2)).ride(bike); * </code></pre> * * For more information and an example see {@link BDDMockito#then(Object)}} * * * * * <h3 id="30">30. <a class="meaningful_link" href="#spying_abstract_classes">Spying or mocking abstract classes (Since 1.10.12)</a></h3> * * It is now possible to conveniently spy on abstract classes. Note that overusing spies hints at code design smells (see {@link #spy(Object)}). * <p> * Previously, spying was only possible on instances of objects. * New API makes it possible to use constructor when creating an instance of the mock. * This is particularly useful for mocking abstract classes because the user is no longer required to provide an instance of the abstract class. * At the moment, only parameter-less constructor is supported, let us know if it is not enough. * * <pre class="code"><code class="java"> * //convenience API, new overloaded spy() method: * SomeAbstract spy = spy(SomeAbstract.class); * * //Robust API, via settings builder: * OtherAbstract spy = mock(OtherAbstract.class, withSettings() * .useConstructor().defaultAnswer(CALLS_REAL_METHODS)); * * //Mocking a non-static inner abstract class: * InnerAbstract spy = mock(InnerAbstract.class, withSettings() * .useConstructor().outerInstance(outerInstance).defaultAnswer(CALLS_REAL_METHODS)); * </code></pre> * * For more information please see {@link MockSettings#useConstructor()}. * * * * * <h3 id="31">31. <a class="meaningful_link" href="#serilization_across_classloader">Mockito mocks can be <em>serialized</em> / <em>deserialized</em> across classloaders (Since 1.10.0)</a></h3> * * Mockito introduces serialization across classloader. * * Like with any other form of serialization, all types in the mock hierarchy have to serializable, inclusing answers. * As this serialization mode require considerably more work, this is an opt-in setting. * * <pre class="code"><code class="java"> * // use regular serialization * mock(Book.class, withSettings().serializable()); * * // use serialization across classloaders * mock(Book.class, withSettings().serializable(ACROSS_CLASSLOADERS)); * </code></pre> * * For more details see {@link MockSettings#serializable(SerializableMode)}. * * * * * <h3 id="32">32. <a class="meaningful_link" href="#better_generic_support_with_deep_stubs">Better generic support with deep stubs (Since 1.10.0)</a></h3> * * Deep stubbing has been improved to find generic information if available in the class. * That means that classes like this can be used without having to mock the behavior. * * <pre class="code"><code class="java"> * class Lines extends List&lt;Line&gt; { * // ... * } * * lines = mock(Lines.class, RETURNS_DEEP_STUBS); * * // Now Mockito understand this is not an Object but a Line * Line line = lines.iterator().next(); * </code></pre> * * Please note that in most scenarios a mock returning a mock is wrong. * * * * * <h3 id="33">33. <a class="meaningful_link" href="#mockito_junit_rule">Mockito JUnit rule (Since 1.10.17)</a></h3> * * Mockito now offers a JUnit rule. Until now in JUnit there were two ways to initialize fields annotated by Mockito annotations * such as <code>&#064;{@link Mock}</code>, <code>&#064;{@link Spy}</code>, <code>&#064;{@link InjectMocks}</code>, etc. * * <ul> * <li>Annotating the JUnit test class with a <code>&#064;{@link org.junit.runner.RunWith}({@link MockitoJUnitRunner}.class)</code></li> * <li>Invoking <code>{@link MockitoAnnotations#initMocks(Object)}</code> in the <code>&#064;{@link org.junit.Before}</code> method</li> * </ul> * * Now you can choose to use a rule : * * <pre class="code"><code class="java"> * &#064;RunWith(YetAnotherRunner.class) * public class TheTest { * &#064;Rule public MockitoRule mockito = MockitoJUnit.rule(); * // ... * } * </code></pre> * * For more information see {@link MockitoJUnit#rule()}. * * * * * <h3 id="34">34. <a class="meaningful_link" href="#plugin_switch">Switch <em>on</em> or <em>off</em> plugins (Since 1.10.15)</a></h3> * * An incubating feature made it's way in mockito that will allow to toggle a mockito-plugin. * * More information here {@link org.mockito.plugins.PluginSwitch}. * * * <h3 id="35">35. <a class="meaningful_link" href="#BDD_behavior_verification">Custom verification failure message</a> (Since 2.1.0)</h3> * <p> * Allows specifying a custom message to be printed if verification fails. * <p> * Examples: * <p> * <pre class="code"><code class="java"> * * // will print a custom message on verification failure * verify(mock, description("This will print on failure")).someMethod(); * * // will work with any verification mode * verify(mock, times(2).description("someMethod should be called twice")).someMethod(); * </code></pre> * * <h3 id="36">36. <a class="meaningful_link" href="#Java_8_Lambda_Matching">Java 8 Lambda Matcher Support</a> (Since 2.1.0)</h3> * <p> * You can use Java 8 lambda expressions with {@link ArgumentMatcher} to reduce the dependency on {@link ArgumentCaptor}. * If you need to verify that the input to a function call on a mock was correct, then you would normally * use the {@link ArgumentCaptor} to find the operands used and then do subsequent assertions on them. While * for complex examples this can be useful, it's also long-winded.<p> * Writing a lambda to express the match is quite easy. The argument to your function, when used in conjunction * with argThat, will be passed to the ArgumentMatcher as a strongly typed object, so it is possible * to do anything with it. * <p> * Examples: * <p> * <pre class="code"><code class="java"> * * // verify a list only had strings of a certain length added to it * // note - this will only compile under Java 8 * verify(list, times(2)).add(argThat(string -> string.length() < 5)); * * // Java 7 equivalent - not as neat * verify(list, times(2)).add(argThat(new ArgumentMatcher<String>(){ * public boolean matches(String arg) { * return arg.length() < 5; * } * })); * * // more complex Java 8 example - where you can specify complex verification behaviour functionally * verify(target, times(1)).receiveComplexObject(argThat(obj -> obj.getSubObject().get(0).equals("expected"))); * * // this can also be used when defining the behaviour of a mock under different inputs * // in this case if the input list was fewer than 3 items the mock returns null * when(mock.someMethod(argThat(list -> list.size()<3))).willReturn(null); * </code></pre> * * <h3 id="37">37. <a class="meaningful_link" href="#Java_8_Custom_Answers">Java 8 Custom Answer Support</a> (Since 2.1.0)</h3> * <p> * As the {@link Answer} interface has just one method it is already possible to implement it in Java 8 using * a lambda expression for very simple situations. The more you need to use the parameters of the method call, * the more you need to typecast the arguments from {@link org.mockito.invocation.InvocationOnMock}. * * <p> * Examples: * <p> * <pre class="code"><code class="java"> * // answer by returning 12 every time * doAnswer(invocation -> 12).when(mock).doSomething(); * * // answer by using one of the parameters - converting into the right * // type as your go - in this case, returning the length of the second string parameter * // as the answer. This gets long-winded quickly, with casting of parameters. * doAnswer(invocation -> ((String)invocation.getArgument(1)).length()) * .when(mock).doSomething(anyString(), anyString(), anyString()); * </code></pre> * * For convenience it is possible to write custom answers/actions, which use the parameters to the method call, * as Java 8 lambdas. Even in Java 7 and lower these custom answers based on a typed interface can reduce boilerplate. * In particular, this approach will make it easier to test functions which use callbacks. * * The functions answer and answerVoid can be found in {@link AdditionalAnswers} to create the answer object * using the interfaces in {@link org.mockito.internal.stubbing.answers.AnswerFunctionalInterfaces} support is provided * for functions with up to 5 parameters * * <p> * Examples: * <p> * <pre class="code"><code class="java"> * * // Example interface to be mocked has a function like: * void execute(String operand, Callback callback); * * // the example callback has a function and the class under test * // will depend on the callback being invoked * void receive(String item); * * // Java 8 - style 1 * doAnswer(AdditionalAnswers.<String,Callback>answerVoid((operand, callback) -> callback.receive("dummy")) * .when(mock).execute(anyString(), any(Callback.class)); * * // Java 8 - style 2 - assuming static import of AdditionalAnswers * doAnswer(answerVoid((String operand, Callback callback) -> callback.receive("dummy")) * .when(mock).execute(anyString(), any(Callback.class)); * * // Java 8 - style 3 - where mocking function to is a static member of test class * private static void dummyCallbackImpl(String operation, Callback callback) { * callback.receive("dummy"); * } * * doAnswer(answerVoid(TestClass::dummyCallbackImpl) * .when(mock).execute(anyString(), any(Callback.class)); * * // Java 7 * doAnswer(answerVoid(new AnswerFunctionalInterfaces.VoidAnswer2<String, Callback>() { * public void answer(String operation, Callback callback) { * callback.receive("dummy"); * }})).when(mock).execute(anyString(), any(Callback.class)); * * // returning a value is possible with the answer() function * // and the non-void version of the functional interfaces * // so if the mock interface had a method like * boolean isSameString(String input1, String input2); * * // this could be mocked * // Java 8 * doAnswer(AdditionalAnswers.<Boolean,String,String>answer((input1, input2) -> input1.equals(input2)))) * .when(mock).execute(anyString(), anyString()); * * // Java 7 * doAnswer(answer(new AnswerFunctionalInterfaces.Answer2<String, String, String>() { * public String answer(String input1, String input2) { * return input1 + input2; * }})).when(mock).execute(anyString(), anyString()); * </code></pre> */ @SuppressWarnings("unchecked") public class Mockito extends ArgumentMatchers { static final MockitoCore MOCKITO_CORE = new MockitoCore(); /** * The default <code>Answer</code> of every mock <b>if</b> the mock was not stubbed. * Typically it just returns some empty value. * <p> * {@link Answer} can be used to define the return values of unstubbed invocations. * <p> * This implementation first tries the global configuration. * If there is no global configuration then it uses {@link ReturnsEmptyValues} (returns zeros, empty collections, nulls, etc.) */ public static final Answer<Object> RETURNS_DEFAULTS = Answers.RETURNS_DEFAULTS; /** * Optional <code>Answer</code> to be used with {@link Mockito#mock(Class, Answer)}. * <p> * {@link Answer} can be used to define the return values of unstubbed invocations. * <p> * This implementation can be helpful when working with legacy code. * Unstubbed methods often return null. If your code uses the object returned by an unstubbed call you get a NullPointerException. * This implementation of Answer <b>returns SmartNull instead of null</b>. * <code>SmartNull</code> gives nicer exception message than NPE because it points out the line where unstubbed method was called. You just click on the stack trace. * <p> * <code>ReturnsSmartNulls</code> first tries to return ordinary return values (see {@link ReturnsMoreEmptyValues}) * then it tries to return SmartNull. If the return type is final then plain null is returned. * <p> * <code>ReturnsSmartNulls</code> will be probably the default return values strategy in Mockito 3.0.0 * <p> * Example: * <pre class="code"><code class="java"> * Foo mock = mock(Foo.class, RETURNS_SMART_NULLS); * * //calling unstubbed method here: * Stuff stuff = mock.getStuff(); * * //using object returned by unstubbed call: * stuff.doSomething(); * * //Above doesn't yield NullPointerException this time! * //Instead, SmartNullPointerException is thrown. * //Exception's cause links to unstubbed <i>mock.getStuff()</i> - just click on the stack trace. * </code></pre> */ public static final Answer<Object> RETURNS_SMART_NULLS = Answers.RETURNS_SMART_NULLS; /** * Optional <code>Answer</code> to be used with {@link Mockito#mock(Class, Answer)} * <p> * {@link Answer} can be used to define the return values of unstubbed invocations. * <p> * This implementation can be helpful when working with legacy code. * <p> * ReturnsMocks first tries to return ordinary return values (see {@link ReturnsMoreEmptyValues}) * then it tries to return mocks. If the return type cannot be mocked (e.g. is final) then plain null is returned. * <p> */ public static final Answer<Object> RETURNS_MOCKS = Answers.RETURNS_MOCKS; /** * Optional <code>Answer</code> to be used with {@link Mockito#mock(Class, Answer)}. * <p> * Example that shows how deep stub works: * <pre class="code"><code class="java"> * Foo mock = mock(Foo.class, RETURNS_DEEP_STUBS); * * // note that we're stubbing a chain of methods here: getBar().getName() * when(mock.getBar().getName()).thenReturn("deep"); * * // note that we're chaining method calls: getBar().getName() * assertEquals("deep", mock.getBar().getName()); * </code></pre> * </p> * * <p> * <strong>WARNING: </strong> * This feature should rarely be required for regular clean code! Leave it for legacy code. * Mocking a mock to return a mock, to return a mock, (...), to return something meaningful * hints at violation of Law of Demeter or mocking a value object (a well known anti-pattern). * </p> * * <p> * Good quote I've seen one day on the web: <strong>every time a mock returns a mock a fairy dies</strong>. * </p> * * <p> * Please note that this answer will return existing mocks that matches the stub. This * behavior is ok with deep stubs and allows verification to work on the last mock of the chain. * <pre class="code"><code class="java"> * when(mock.getBar(anyString()).getThingy().getName()).thenReturn("deep"); * * mock.getBar("candy bar").getThingy().getName(); * * assertSame(mock.getBar(anyString()).getThingy().getName(), mock.getBar(anyString()).getThingy().getName()); * verify(mock.getBar("candy bar").getThingy()).getName(); * verify(mock.getBar(anyString()).getThingy()).getName(); * </code></pre> * </p> * * <p> * Verification only works with the last mock in the chain. You can use verification modes. * <pre class="code"><code class="java"> * when(person.getAddress(anyString()).getStreet().getName()).thenReturn("deep"); * when(person.getAddress(anyString()).getStreet(Locale.ITALIAN).getName()).thenReturn("deep"); * when(person.getAddress(anyString()).getStreet(Locale.CHINESE).getName()).thenReturn("deep"); * * person.getAddress("the docks").getStreet().getName(); * person.getAddress("the docks").getStreet().getLongName(); * person.getAddress("the docks").getStreet(Locale.ITALIAN).getName(); * person.getAddress("the docks").getStreet(Locale.CHINESE).getName(); * * // note that we are actually referring to the very last mock in the stubbing chain. * InOrder inOrder = inOrder( * person.getAddress("the docks").getStreet(), * person.getAddress("the docks").getStreet(Locale.CHINESE), * person.getAddress("the docks").getStreet(Locale.ITALIAN) * ); * inOrder.verify(person.getAddress("the docks").getStreet(), times(1)).getName(); * inOrder.verify(person.getAddress("the docks").getStreet()).getLongName(); * inOrder.verify(person.getAddress("the docks").getStreet(Locale.ITALIAN), atLeast(1)).getName(); * inOrder.verify(person.getAddress("the docks").getStreet(Locale.CHINESE)).getName(); * </code></pre> * </p> * * <p> * How deep stub work internally? * <pre class="code"><code class="java"> * //this: * Foo mock = mock(Foo.class, RETURNS_DEEP_STUBS); * when(mock.getBar().getName(), "deep"); * * //is equivalent of * Foo foo = mock(Foo.class); * Bar bar = mock(Bar.class); * when(foo.getBar()).thenReturn(bar); * when(bar.getName()).thenReturn("deep"); * </code></pre> * </p> * * <p> * This feature will not work when any return type of methods included in the chain cannot be mocked * (for example: is a primitive or a final class). This is because of java type system. * </p> */ public static final Answer<Object> RETURNS_DEEP_STUBS = Answers.RETURNS_DEEP_STUBS; /** * Optional <code>Answer</code> to be used with {@link Mockito#mock(Class, Answer)} * <p> * {@link Answer} can be used to define the return values of unstubbed invocations. * <p> * This implementation can be helpful when working with legacy code. * When this implementation is used, unstubbed methods will delegate to the real implementation. * This is a way to create a partial mock object that calls real methods by default. * <p> * As usual you are going to read <b>the partial mock warning</b>: * Object oriented programming is more less tackling complexity by dividing the complexity into separate, specific, SRPy objects. * How does partial mock fit into this paradigm? Well, it just doesn't... * Partial mock usually means that the complexity has been moved to a different method on the same object. * In most cases, this is not the way you want to design your application. * <p> * However, there are rare cases when partial mocks come handy: * dealing with code you cannot change easily (3rd party interfaces, interim refactoring of legacy code etc.) * However, I wouldn't use partial mocks for new, test-driven & well-designed code. * <p> * Example: * <pre class="code"><code class="java"> * Foo mock = mock(Foo.class, CALLS_REAL_METHODS); * * // this calls the real implementation of Foo.getSomething() * value = mock.getSomething(); * * when(mock.getSomething()).thenReturn(fakeValue); * * // now fakeValue is returned * value = mock.getSomething(); * </code></pre> */ public static final Answer<Object> CALLS_REAL_METHODS = Answers.CALLS_REAL_METHODS; /** * Optional <code>Answer</code> to be used with {@link Mockito#mock(Class, Answer)}. * * Allows Builder mocks to return itself whenever a method is invoked that returns a Type equal * to the class or a superclass. * * <p><b>Keep in mind this answer uses the return type of a method. * If this type is assignable to the class of the mock, it will return the mock. * Therefore if you have a method returning a superclass (for example {@code Object}) it will match and return the mock.</b></p> * * Consider a HttpBuilder used in a HttpRequesterWithHeaders. * * <pre class="code"><code class="java"> * public class HttpRequesterWithHeaders { * * private HttpBuilder builder; * * public HttpRequesterWithHeaders(HttpBuilder builder) { * this.builder = builder; * } * * public String request(String uri) { * return builder.withUrl(uri) * .withHeader("Content-type: application/json") * .withHeader("Authorization: Bearer") * .request(); * } * } * * private static class HttpBuilder { * * private String uri; * private List&lt;String&gt; headers; * * public HttpBuilder() { * this.headers = new ArrayList&lt;String&gt;(); * } * * public HttpBuilder withUrl(String uri) { * this.uri = uri; * return this; * } * * public HttpBuilder withHeader(String header) { * this.headers.add(header); * return this; * } * * public String request() { * return uri + headers.toString(); * } * } * </code></pre> * * The following test will succeed * * <pre><code> * &#064;Test * public void use_full_builder_with_terminating_method() { * HttpBuilder builder = mock(HttpBuilder.class, RETURNS_SELF); * HttpRequesterWithHeaders requester = new HttpRequesterWithHeaders(builder); * String response = "StatusCode: 200"; * * when(builder.request()).thenReturn(response); * * assertThat(requester.request("URI")).isEqualTo(response); * } * </code></pre> */ public static final Answer<Object> RETURNS_SELF = Answers.RETURNS_SELF; /** * Creates mock object of given class or interface. * <p> * See examples in javadoc for {@link Mockito} class * * @param classToMock class or interface to mock * @return mock object */ public static <T> T mock(Class<T> classToMock) { return mock(classToMock, withSettings().defaultAnswer(RETURNS_DEFAULTS)); } /** * Specifies mock name. Naming mocks can be helpful for debugging - the name is used in all verification errors. * <p> * Beware that naming mocks is not a solution for complex code which uses too many mocks or collaborators. * <b>If you have too many mocks then refactor the code</b> so that it's easy to test/debug without necessity of naming mocks. * <p> * <b>If you use <code>&#064;Mock</code> annotation then you've got naming mocks for free!</b> <code>&#064;Mock</code> uses field name as mock name. {@link Mock Read more.} * <p> * * See examples in javadoc for {@link Mockito} class * * @param classToMock class or interface to mock * @param name of the mock * @return mock object */ public static <T> T mock(Class<T> classToMock, String name) { return mock(classToMock, withSettings() .name(name) .defaultAnswer(RETURNS_DEFAULTS)); } /** * Returns a MockingDetails instance that enables inspecting a particular object for Mockito related information. * Can be used to find out if given object is a Mockito mock * or to find out if a given mock is a spy or mock. * <p> * In future Mockito versions MockingDetails may grow and provide other useful information about the mock, * e.g. invocations, stubbing info, etc. * * @param toInspect - object to inspect. null input is allowed. * @return A {@link org.mockito.MockingDetails} instance. * @since 1.9.5 */ public static MockingDetails mockingDetails(Object toInspect) { return MOCKITO_CORE.mockingDetails(toInspect); } /** * Creates mock with a specified strategy for its answers to interactions. * It's quite an advanced feature and typically you don't need it to write decent tests. * However it can be helpful when working with legacy systems. * <p> * It is the default answer so it will be used <b>only when you don't</b> stub the method call. * * <pre class="code"><code class="java"> * Foo mock = mock(Foo.class, RETURNS_SMART_NULLS); * Foo mockTwo = mock(Foo.class, new YourOwnAnswer()); * </code></pre> * * <p>See examples in javadoc for {@link Mockito} class</p> * * @param classToMock class or interface to mock * @param defaultAnswer default answer for unstubbed methods * * @return mock object */ public static <T> T mock(Class<T> classToMock, Answer defaultAnswer) { return mock(classToMock, withSettings().defaultAnswer(defaultAnswer)); } /** * Creates a mock with some non-standard settings. * <p> * The number of configuration points for a mock grows * so we need a fluent way to introduce new configuration without adding more and more overloaded Mockito.mock() methods. * Hence {@link MockSettings}. * <pre class="code"><code class="java"> * Listener mock = mock(Listener.class, withSettings() * .name("firstListner").defaultBehavior(RETURNS_SMART_NULLS)); * ); * </code></pre> * <b>Use it carefully and occasionally</b>. What might be reason your test needs non-standard mocks? * Is the code under test so complicated that it requires non-standard mocks? * Wouldn't you prefer to refactor the code under test so it is testable in a simple way? * <p> * See also {@link Mockito#withSettings()} * <p> * See examples in javadoc for {@link Mockito} class * * @param classToMock class or interface to mock * @param mockSettings additional mock settings * @return mock object */ public static <T> T mock(Class<T> classToMock, MockSettings mockSettings) { return MOCKITO_CORE.mock(classToMock, mockSettings); } /** * Creates a spy of the real object. The spy calls <b>real</b> methods unless they are stubbed. * <p> * Real spies should be used <b>carefully and occasionally</b>, for example when dealing with legacy code. * <p> * As usual you are going to read <b>the partial mock warning</b>: * Object oriented programming tackles complexity by dividing the complexity into separate, specific, SRPy objects. * How does partial mock fit into this paradigm? Well, it just doesn't... * Partial mock usually means that the complexity has been moved to a different method on the same object. * In most cases, this is not the way you want to design your application. * <p> * However, there are rare cases when partial mocks come handy: * dealing with code you cannot change easily (3rd party interfaces, interim refactoring of legacy code etc.) * However, I wouldn't use partial mocks for new, test-driven & well-designed code. * <p> * Example: * * <pre class="code"><code class="java"> * List list = new LinkedList(); * List spy = spy(list); * * //optionally, you can stub out some methods: * when(spy.size()).thenReturn(100); * * //using the spy calls <b>real</b> methods * spy.add("one"); * spy.add("two"); * * //prints "one" - the first element of a list * System.out.println(spy.get(0)); * * //size() method was stubbed - 100 is printed * System.out.println(spy.size()); * * //optionally, you can verify * verify(spy).add("one"); * verify(spy).add("two"); * </code></pre> * * <h4>Important gotcha on spying real objects!</h4> * <ol> * <li>Sometimes it's impossible or impractical to use {@link Mockito#when(Object)} for stubbing spies. * Therefore for spies it is recommended to always use <code>doReturn</code>|<code>Answer</code>|<code>Throw()</code>|<code>CallRealMethod</code> * family of methods for stubbing. Example: * * <pre class="code"><code class="java"> * List list = new LinkedList(); * List spy = spy(list); * * //Impossible: real method is called so spy.get(0) throws IndexOutOfBoundsException (the list is yet empty) * when(spy.get(0)).thenReturn("foo"); * * //You have to use doReturn() for stubbing * doReturn("foo").when(spy).get(0); * </code></pre> * </li> * * <li>Mockito <b>*does not*</b> delegate calls to the passed real instance, instead it actually creates a copy of it. * So if you keep the real instance and interact with it, don't expect the spied to be aware of those interaction * and their effect on real instance state. * The corollary is that when an <b>*unstubbed*</b> method is called <b>*on the spy*</b> but <b>*not on the real instance*</b>, * you won't see any effects on the real instance.</li> * * <li>Watch out for final methods. * Mockito doesn't mock final methods so the bottom line is: when you spy on real objects + you try to stub a final method = trouble. * Also you won't be able to verify those method as well. * </li> * </ol> * <p> * See examples in javadoc for {@link Mockito} class * * <p>Note that the spy won't have any annotations of the spied type, because CGLIB won't rewrite them. * It may troublesome for code that rely on the spy to have these annotations.</p> * * * @param object * to spy on * @return a spy of the real object */ public static <T> T spy(T object) { return MOCKITO_CORE.mock((Class<T>) object.getClass(), withSettings() .spiedInstance(object) .defaultAnswer(CALLS_REAL_METHODS)); } /** * Please refer to the documentation of {@link #spy(Object)}. * Overusing spies hints at code design smells. * <p> * This method, in contrast to the original {@link #spy(Object)}, creates a spy based on class instead of an object. * Sometimes it is more convenient to create spy based on the class and avoid providing an instance of a spied object. * This is particularly useful for spying on abstract classes because they cannot be instantiated. * See also {@link MockSettings#useConstructor()}. * <p> * Examples: * <pre class="code"><code class="java"> * SomeAbstract spy = spy(SomeAbstract.class); * * //Robust API, via settings builder: * OtherAbstract spy = mock(OtherAbstract.class, withSettings() * .useConstructor().defaultAnswer(CALLS_REAL_METHODS)); * * //Mocking a non-static inner abstract class: * InnerAbstract spy = mock(InnerAbstract.class, withSettings() * .useConstructor().outerInstance(outerInstance).defaultAnswer(CALLS_REAL_METHODS)); * </code></pre> * * @param classToSpy the class to spy * @param <T> type of the spy * @return a spy of the provided class * @since 1.10.12 */ @Incubating public static <T> T spy(Class<T> classToSpy) { return MOCKITO_CORE.mock(classToSpy, withSettings() .useConstructor() .defaultAnswer(CALLS_REAL_METHODS)); } /** * Enables stubbing methods. Use it when you want the mock to return particular value when particular method is called. * <p> * Simply put: "<b>When</b> the x method is called <b>then</b> return y". * * <p> * Examples: * * <pre class="code"><code class="java"> * <b>when</b>(mock.someMethod()).<b>thenReturn</b>(10); * * //you can use flexible argument matchers, e.g: * when(mock.someMethod(<b>anyString()</b>)).thenReturn(10); * * //setting exception to be thrown: * when(mock.someMethod("some arg")).thenThrow(new RuntimeException()); * * //you can set different behavior for consecutive method calls. * //Last stubbing (e.g: thenReturn("foo")) determines the behavior of further consecutive calls. * when(mock.someMethod("some arg")) * .thenThrow(new RuntimeException()) * .thenReturn("foo"); * * //Alternative, shorter version for consecutive stubbing: * when(mock.someMethod("some arg")) * .thenReturn("one", "two"); * //is the same as: * when(mock.someMethod("some arg")) * .thenReturn("one") * .thenReturn("two"); * * //shorter version for consecutive method calls throwing exceptions: * when(mock.someMethod("some arg")) * .thenThrow(new RuntimeException(), new NullPointerException(); * * </code></pre> * * For stubbing void methods with throwables see: {@link Mockito#doThrow(Throwable...)} * <p> * Stubbing can be overridden: for example common stubbing can go to fixture * setup but the test methods can override it. * Please note that overridding stubbing is a potential code smell that points out too much stubbing. * <p> * Once stubbed, the method will always return stubbed value regardless * of how many times it is called. * <p> * Last stubbing is more important - when you stubbed the same method with * the same arguments many times. * <p> * Although it is possible to verify a stubbed invocation, usually <b>it's just redundant</b>. * Let's say you've stubbed <code>foo.bar()</code>. * If your code cares what <code>foo.bar()</code> returns then something else breaks(often before even <code>verify()</code> gets executed). * If your code doesn't care what <code>get(0)</code> returns then it should not be stubbed. * Not convinced? See <a href="http://monkeyisland.pl/2008/04/26/asking-and-telling">here</a>. * * <p> * See examples in javadoc for {@link Mockito} class * @param methodCall method to be stubbed * @return OngoingStubbing object used to stub fluently. * <strong>Do not</strong> create a reference to this returned object. */ public static <T> OngoingStubbing<T> when(T methodCall) { return MOCKITO_CORE.when(methodCall); } /** * Verifies certain behavior <b>happened once</b>. * <p> * Alias to <code>verify(mock, times(1))</code> E.g: * <pre class="code"><code class="java"> * verify(mock).someMethod("some arg"); * </code></pre> * Above is equivalent to: * <pre class="code"><code class="java"> * verify(mock, times(1)).someMethod("some arg"); * </code></pre> * <p> * Arguments passed are compared using <code>equals()</code> method. * Read about {@link ArgumentCaptor} or {@link ArgumentMatcher} to find out other ways of matching / asserting arguments passed. * <p> * Although it is possible to verify a stubbed invocation, usually <b>it's just redundant</b>. * Let's say you've stubbed <code>foo.bar()</code>. * If your code cares what <code>foo.bar()</code> returns then something else breaks(often before even <code>verify()</code> gets executed). * If your code doesn't care what <code>get(0)</code> returns then it should not be stubbed. * Not convinced? See <a href="http://monkeyisland.pl/2008/04/26/asking-and-telling">here</a>. * * <p> * See examples in javadoc for {@link Mockito} class * * @param mock to be verified * @return mock object itself */ public static <T> T verify(T mock) { return MOCKITO_CORE.verify(mock, times(1)); } /** * Verifies certain behavior happened at least once / exact number of times / never. E.g: * <pre class="code"><code class="java"> * verify(mock, times(5)).someMethod("was called five times"); * * verify(mock, atLeast(2)).someMethod("was called at least two times"); * * //you can use flexible argument matchers, e.g: * verify(mock, atLeastOnce()).someMethod(<b>anyString()</b>); * </code></pre> * * <b>times(1) is the default</b> and can be omitted * <p> * Arguments passed are compared using <code>equals()</code> method. * Read about {@link ArgumentCaptor} or {@link ArgumentMatcher} to find out other ways of matching / asserting arguments passed. * <p> * * @param mock to be verified * @param mode times(x), atLeastOnce() or never() * * @return mock object itself */ public static <T> T verify(T mock, VerificationMode mode) { return MOCKITO_CORE.verify(mock, mode); } /** * Smart Mockito users hardly use this feature because they know it could be a sign of poor tests. * Normally, you don't need to reset your mocks, just create new mocks for each test method. * <p> * Instead of <code>#reset()</code> please consider writing simple, small and focused test methods over lengthy, over-specified tests. * <b>First potential code smell is <code>reset()</code> in the middle of the test method.</b> This probably means you're testing too much. * Follow the whisper of your test methods: "Please keep us small & focused on single behavior". * There are several threads about it on mockito mailing list. * <p> * The only reason we added <code>reset()</code> method is to * make it possible to work with container-injected mocks. * For more information see the FAQ (<a href="https://github.com/mockito/mockito/wiki/FAQ">here</a>). * <p> * <b>Don't harm yourself.</b> <code>reset()</code> in the middle of the test method is a code smell (you're probably testing too much). * <pre class="code"><code class="java"> * List mock = mock(List.class); * when(mock.size()).thenReturn(10); * mock.add(1); * * reset(mock); * //at this point the mock forgot any interactions & stubbing * </code></pre> * * @param <T> The Type of the mocks * @param mocks to be reset */ public static <T> void reset(T ... mocks) { MOCKITO_CORE.reset(mocks); } /** * Use this method in order to only clear invocations, when stubbing is non-trivial. Use-cases can be: * <ul> * <li>You are using a dependency injection framework to inject your mocks.</li> * <li>The mock is used in a stateful scenario. For example a class is Singleton which depends on your mock.</li> * </ul> * * <b>Try to avoid this method at all costs. Only clear invocations if you are unable to efficiently test your program.</b> * @param <T> The type of the mocks * @param mocks The mocks to clear the invocations for */ public static <T> void clearInvocations(T ... mocks) { MOCKITO_CORE.clearInvocations(mocks); } /** * Checks if any of given mocks has any unverified interaction. * <p> * You can use this method after you verified your mocks - to make sure that nothing * else was invoked on your mocks. * <p> * See also {@link Mockito#never()} - it is more explicit and communicates the intent well. * <p> * Stubbed invocations (if called) are also treated as interactions. * <p> * A word of <b>warning</b>: * Some users who did a lot of classic, expect-run-verify mocking tend to use <code>verifyNoMoreInteractions()</code> very often, even in every test method. * <code>verifyNoMoreInteractions()</code> is not recommended to use in every test method. * <code>verifyNoMoreInteractions()</code> is a handy assertion from the interaction testing toolkit. Use it only when it's relevant. * Abusing it leads to overspecified, less maintainable tests. You can find further reading * <a href="http://monkeyisland.pl/2008/07/12/should-i-worry-about-the-unexpected/">here</a>. * <p> * This method will also detect unverified invocations that occurred before the test method, * for example: in <code>setUp()</code>, <code>&#064;Before</code> method or in constructor. * Consider writing nice code that makes interactions only in test methods. * * <p> * Example: * * <pre class="code"><code class="java"> * //interactions * mock.doSomething(); * mock.doSomethingUnexpected(); * * //verification * verify(mock).doSomething(); * * //following will fail because 'doSomethingUnexpected()' is unexpected * verifyNoMoreInteractions(mock); * * </code></pre> * * See examples in javadoc for {@link Mockito} class * * @param mocks to be verified */ public static void verifyNoMoreInteractions(Object... mocks) { MOCKITO_CORE.verifyNoMoreInteractions(mocks); } /** * Verifies that no interactions happened on given mocks. * <pre class="code"><code class="java"> * verifyZeroInteractions(mockOne, mockTwo); * </code></pre> * This method will also detect invocations * that occurred before the test method, for example: in <code>setUp()</code>, <code>&#064;Before</code> method or in constructor. * Consider writing nice code that makes interactions only in test methods. * <p> * See also {@link Mockito#never()} - it is more explicit and communicates the intent well. * <p> * See examples in javadoc for {@link Mockito} class * * @param mocks to be verified */ public static void verifyZeroInteractions(Object... mocks) { MOCKITO_CORE.verifyNoMoreInteractions(mocks); } /** * Use <code>doThrow()</code> when you want to stub the void method with an exception. * <p> * Stubbing voids requires different approach from {@link Mockito#when(Object)} because the compiler * does not like void methods inside brackets... * <p> * Example: * * <pre class="code"><code class="java"> * doThrow(new RuntimeException()).when(mock).someVoidMethod(); * </code></pre> * * @param toBeThrown to be thrown when the stubbed method is called * @return stubber - to select a method for stubbing */ public static Stubber doThrow(Throwable... toBeThrown) { return MOCKITO_CORE.stubber().doThrow(toBeThrown); } /** * Use <code>doThrow()</code> when you want to stub the void method with an exception. * <p> * A new exception instance will be created for each method invocation. * <p> * Stubbing voids requires different approach from {@link Mockito#when(Object)} because the compiler * does not like void methods inside brackets... * <p> * Example: * * <pre class="code"><code class="java"> * doThrow(RuntimeException.class).when(mock).someVoidMethod(); * </code></pre> * * @param toBeThrown to be thrown when the stubbed method is called * @return stubber - to select a method for stubbing * @since 2.1.0 */ public static Stubber doThrow(Class<? extends Throwable> toBeThrown) { return MOCKITO_CORE.stubber().doThrow(toBeThrown); } /** * Same as {@link #doThrow(Class)} but sets consecutive exception classes to be thrown. Remember to use * <code>doThrow()</code> when you want to stub the void method to throw several exception of specified class. * <p> * A new exception instance will be created for each method invocation. * <p> * Stubbing voids requires different approach from {@link Mockito#when(Object)} because the compiler * does not like void methods inside brackets... * <p> * Example: * * <pre class="code"><code class="java"> * doThrow(RuntimeException.class, BigFailure.class).when(mock).someVoidMethod(); * </code></pre> * * @param toBeThrown to be thrown when the stubbed method is called * @param toBeThrownNext next to be thrown when the stubbed method is called * @return stubber - to select a method for stubbing * @since 2.1.0 */ // Additional method helps users of JDK7+ to hide heap pollution / unchecked generics array creation @SuppressWarnings ({"unchecked", "varargs"}) public static Stubber doThrow(Class<? extends Throwable> toBeThrown, Class<? extends Throwable>... toBeThrownNext) { return MOCKITO_CORE.stubber().doThrow(toBeThrown, toBeThrownNext); } /** * Use <code>doCallRealMethod()</code> when you want to call the real implementation of a method. * <p> * As usual you are going to read <b>the partial mock warning</b>: * Object oriented programming is more less tackling complexity by dividing the complexity into separate, specific, SRPy objects. * How does partial mock fit into this paradigm? Well, it just doesn't... * Partial mock usually means that the complexity has been moved to a different method on the same object. * In most cases, this is not the way you want to design your application. * <p> * However, there are rare cases when partial mocks come handy: * dealing with code you cannot change easily (3rd party interfaces, interim refactoring of legacy code etc.) * However, I wouldn't use partial mocks for new, test-driven & well-designed code. * <p> * See also javadoc {@link Mockito#spy(Object)} to find out more about partial mocks. * <b>Mockito.spy() is a recommended way of creating partial mocks.</b> * The reason is it guarantees real methods are called against correctly constructed object because you're responsible for constructing the object passed to spy() method. * <p> * Example: * <pre class="code"><code class="java"> * Foo mock = mock(Foo.class); * doCallRealMethod().when(mock).someVoidMethod(); * * // this will call the real implementation of Foo.someVoidMethod() * mock.someVoidMethod(); * </code></pre> * <p> * See examples in javadoc for {@link Mockito} class * * @return stubber - to select a method for stubbing * @since 1.9.5 */ public static Stubber doCallRealMethod() { return MOCKITO_CORE.stubber().doCallRealMethod(); } /** * Use <code>doAnswer()</code> when you want to stub a void method with generic {@link Answer}. * <p> * Stubbing voids requires different approach from {@link Mockito#when(Object)} because the compiler does not like void methods inside brackets... * <p> * Example: * * <pre class="code"><code class="java"> * doAnswer(new Answer() { * public Object answer(InvocationOnMock invocation) { * Object[] args = invocation.getArguments(); * Mock mock = invocation.getMock(); * return null; * }}) * .when(mock).someMethod(); * </code></pre> * <p> * See examples in javadoc for {@link Mockito} class * * @param answer to answer when the stubbed method is called * @return stubber - to select a method for stubbing */ public static Stubber doAnswer(Answer answer) { return MOCKITO_CORE.stubber().doAnswer(answer); } /** * Use <code>doNothing()</code> for setting void methods to do nothing. <b>Beware that void methods on mocks do nothing by default!</b> * However, there are rare situations when doNothing() comes handy: * <p> * <ol> * <li>Stubbing consecutive calls on a void method: * <pre class="code"><code class="java"> * doNothing(). * doThrow(new RuntimeException()) * .when(mock).someVoidMethod(); * * //does nothing the first time: * mock.someVoidMethod(); * * //throws RuntimeException the next time: * mock.someVoidMethod(); * </code></pre> * </li> * <li>When you spy real objects and you want the void method to do nothing: * <pre class="code"><code class="java"> * List list = new LinkedList(); * List spy = spy(list); * * //let's make clear() do nothing * doNothing().when(spy).clear(); * * spy.add("one"); * * //clear() does nothing, so the list still contains "one" * spy.clear(); * </code></pre> * </li> * </ol> * <p> * See examples in javadoc for {@link Mockito} class * * @return stubber - to select a method for stubbing */ public static Stubber doNothing() { return MOCKITO_CORE.stubber().doNothing(); } /** * Use <code>doReturn()</code> in those rare occasions when you cannot use {@link Mockito#when(Object)}. * <p> * <b>Beware that {@link Mockito#when(Object)} is always recommended for stubbing because it is argument type-safe * and more readable</b> (especially when stubbing consecutive calls). * <p> * Here are those rare occasions when doReturn() comes handy: * <p> * * <ol> * <li>When spying real objects and calling real methods on a spy brings side effects * * <pre class="code"><code class="java"> * List list = new LinkedList(); * List spy = spy(list); * * //Impossible: real method is called so spy.get(0) throws IndexOutOfBoundsException (the list is yet empty) * when(spy.get(0)).thenReturn("foo"); * * //You have to use doReturn() for stubbing: * doReturn("foo").when(spy).get(0); * </code></pre> * </li> * * <li>Overriding a previous exception-stubbing: * <pre class="code"><code class="java"> * when(mock.foo()).thenThrow(new RuntimeException()); * * //Impossible: the exception-stubbed foo() method is called so RuntimeException is thrown. * when(mock.foo()).thenReturn("bar"); * * //You have to use doReturn() for stubbing: * doReturn("bar").when(mock).foo(); * </code></pre> * </li> * </ol> * * Above scenarios shows a tradeoff of Mockito's elegant syntax. Note that the scenarios are very rare, though. * Spying should be sporadic and overriding exception-stubbing is very rare. Not to mention that in general * overridding stubbing is a potential code smell that points out too much stubbing. * <p> * See examples in javadoc for {@link Mockito} class * * @param toBeReturned to be returned when the stubbed method is called * @return stubber - to select a method for stubbing */ public static Stubber doReturn(Object toBeReturned) { return MOCKITO_CORE.stubber().doReturn(toBeReturned); } /** * Same as {@link #doReturn(Object)} but sets consecutive values to be returned. Remember to use * <code>doReturn()</code> in those rare occasions when you cannot use {@link Mockito#when(Object)}. * <p> * <b>Beware that {@link Mockito#when(Object)} is always recommended for stubbing because it is argument type-safe * and more readable</b> (especially when stubbing consecutive calls). * <p> * Here are those rare occasions when doReturn() comes handy: * <p> * * <ol> * <li>When spying real objects and calling real methods on a spy brings side effects * * <pre class="code"><code class="java"> * List list = new LinkedList(); * List spy = spy(list); * * //Impossible: real method is called so spy.get(0) throws IndexOutOfBoundsException (the list is yet empty) * when(spy.get(0)).thenReturn("foo", "bar", "qix"); * * //You have to use doReturn() for stubbing: * doReturn("foo", "bar", "qix").when(spy).get(0); * </code></pre> * </li> * * <li>Overriding a previous exception-stubbing: * <pre class="code"><code class="java"> * when(mock.foo()).thenThrow(new RuntimeException()); * * //Impossible: the exception-stubbed foo() method is called so RuntimeException is thrown. * when(mock.foo()).thenReturn("bar", "foo", "qix"); * * //You have to use doReturn() for stubbing: * doReturn("bar", "foo", "qix").when(mock).foo(); * </code></pre> * </li> * </ol> * * Above scenarios shows a trade-off of Mockito's elegant syntax. Note that the scenarios are very rare, though. * Spying should be sporadic and overriding exception-stubbing is very rare. Not to mention that in general * overridding stubbing is a potential code smell that points out too much stubbing. * <p> * See examples in javadoc for {@link Mockito} class * * @param toBeReturned to be returned when the stubbed method is called * @param toBeReturnedNext to be returned in consecutive calls when the stubbed method is called * @return stubber - to select a method for stubbing * @since 2.1.0 */ @SuppressWarnings({"unchecked", "varargs"}) public static Stubber doReturn(Object toBeReturned, Object... toBeReturnedNext) { return MOCKITO_CORE.stubber().doReturn(toBeReturned, toBeReturnedNext); } /** * Creates {@link org.mockito.InOrder} object that allows verifying mocks in order. * * <pre class="code"><code class="java"> * InOrder inOrder = inOrder(firstMock, secondMock); * * inOrder.verify(firstMock).add("was called first"); * inOrder.verify(secondMock).add("was called second"); * </code></pre> * * Verification in order is flexible - <b>you don't have to verify all interactions</b> one-by-one * but only those that you are interested in testing in order. * <p> * Also, you can create InOrder object passing only mocks that are relevant for in-order verification. * <p> * <code>InOrder</code> verification is 'greedy', but you will hardly ever notice it. * If you want to find out more, read * <a href="https://github.com/mockito/mockito/wiki/Greedy-algorithm-of-verfication-InOrder">this wiki page</a>. * <p> * As of Mockito 1.8.4 you can verifyNoMoreInvocations() in order-sensitive way. Read more: {@link InOrder#verifyNoMoreInteractions()} * <p> * See examples in javadoc for {@link Mockito} class * * @param mocks to be verified in order * * @return InOrder object to be used to verify in order */ public static InOrder inOrder(Object... mocks) { return MOCKITO_CORE.inOrder(mocks); } /** * Ignores stubbed methods of given mocks for the sake of verification. * Sometimes useful when coupled with <code>verifyNoMoreInteractions()</code> or verification <code>inOrder()</code>. * Helps avoiding redundant verification of stubbed calls - typically we're not interested in verifying stubs. * <p> * <b>Warning</b>, <code>ignoreStubs()</code> might lead to overuse of <code>verifyNoMoreInteractions(ignoreStubs(...));</code> * Bear in mind that Mockito does not recommend bombarding every test with <code>verifyNoMoreInteractions()</code> * for the reasons outlined in javadoc for {@link Mockito#verifyNoMoreInteractions(Object...)} * Other words: all <b>*stubbed*</b> methods of given mocks are marked <b>*verified*</b> so that they don't get in a way during verifyNoMoreInteractions(). * <p> * This method <b>changes the input mocks</b>! This method returns input mocks just for convenience. * <p> * Ignored stubs will also be ignored for verification inOrder, including {@link org.mockito.InOrder#verifyNoMoreInteractions()}. * See the second example. * <p> * Example: * <pre class="code"><code class="java"> * //mocking lists for the sake of the example (if you mock List in real you will burn in hell) * List mock1 = mock(List.class), mock2 = mock(List.class); * * //stubbing mocks: * when(mock1.get(0)).thenReturn(10); * when(mock2.get(0)).thenReturn(20); * * //using mocks by calling stubbed get(0) methods: * System.out.println(mock1.get(0)); //prints 10 * System.out.println(mock2.get(0)); //prints 20 * * //using mocks by calling clear() methods: * mock1.clear(); * mock2.clear(); * * //verification: * verify(mock1).clear(); * verify(mock2).clear(); * * //verifyNoMoreInteractions() fails because get() methods were not accounted for. * try { verifyNoMoreInteractions(mock1, mock2); } catch (NoInteractionsWanted e); * * //However, if we ignore stubbed methods then we can verifyNoMoreInteractions() * verifyNoMoreInteractions(ignoreStubs(mock1, mock2)); * * //Remember that ignoreStubs() <b>*changes*</b> the input mocks and returns them for convenience. * </code></pre> * Ignoring stubs can be used with <b>verification in order</b>: * <pre class="code"><code class="java"> * List list = mock(List.class); * when(mock.get(0)).thenReturn("foo"); * * list.add(0); * System.out.println(list.get(0)); //we don't want to verify this * list.clear(); * * InOrder inOrder = inOrder(ignoreStubs(list)); * inOrder.verify(list).add(0); * inOrder.verify(list).clear(); * inOrder.verifyNoMoreInteractions(); * </code></pre> * * @since 1.9.0 * @param mocks input mocks that will be changed * @return the same mocks that were passed in as parameters */ public static Object[] ignoreStubs(Object... mocks) { return MOCKITO_CORE.ignoreStubs(mocks); } /** * Allows verifying exact number of invocations. E.g: * <pre class="code"><code class="java"> * verify(mock, times(2)).someMethod("some arg"); * </code></pre> * * See examples in javadoc for {@link Mockito} class * * @param wantedNumberOfInvocations wanted number of invocations * * @return verification mode */ public static VerificationMode times(int wantedNumberOfInvocations) { return VerificationModeFactory.times(wantedNumberOfInvocations); } /** * Alias to <code>times(0)</code>, see {@link Mockito#times(int)} * <p> * Verifies that interaction did not happen. E.g: * <pre class="code"><code class="java"> * verify(mock, never()).someMethod(); * </code></pre> * * <p> * If you want to verify there were NO interactions with the mock * check out {@link Mockito#verifyZeroInteractions(Object...)} * or {@link Mockito#verifyNoMoreInteractions(Object...)} * <p> * See examples in javadoc for {@link Mockito} class * * @return verification mode */ public static VerificationMode never() { return times(0); } /** * Allows at-least-once verification. E.g: * <pre class="code"><code class="java"> * verify(mock, atLeastOnce()).someMethod("some arg"); * </code></pre> * Alias to <code>atLeast(1)</code>. * <p> * See examples in javadoc for {@link Mockito} class * * @return verification mode */ public static VerificationMode atLeastOnce() { return VerificationModeFactory.atLeastOnce(); } /** * Allows at-least-x verification. E.g: * <pre class="code"><code class="java"> * verify(mock, atLeast(3)).someMethod("some arg"); * </code></pre> * * See examples in javadoc for {@link Mockito} class * * @param minNumberOfInvocations minimum number of invocations * * @return verification mode */ public static VerificationMode atLeast(int minNumberOfInvocations) { return VerificationModeFactory.atLeast(minNumberOfInvocations); } /** * Allows at-most-x verification. E.g: * <pre class="code"><code class="java"> * verify(mock, atMost(3)).someMethod("some arg"); * </code></pre> * * See examples in javadoc for {@link Mockito} class * * @param maxNumberOfInvocations max number of invocations * * @return verification mode */ public static VerificationMode atMost(int maxNumberOfInvocations) { return VerificationModeFactory.atMost(maxNumberOfInvocations); } /** * Allows non-greedy verification in order. For example * <pre class="code"><code class="java"> * inOrder.verify( mock, calls( 2 )).someMethod( "some arg" ); * </code></pre> * <ul> * <li>will not fail if the method is called 3 times, unlike times( 2 )</li> * <li>will not mark the third invocation as verified, unlike atLeast( 2 )</li> * </ul> * This verification mode can only be used with in order verification. * @param wantedNumberOfInvocations number of invocations to verify * @return verification mode */ public static VerificationMode calls( int wantedNumberOfInvocations ){ return VerificationModeFactory.calls( wantedNumberOfInvocations ); } /** * Allows checking if given method was the only one invoked. E.g: * <pre class="code"><code class="java"> * verify(mock, only()).someMethod(); * //above is a shorthand for following 2 lines of code: * verify(mock).someMethod(); * verifyNoMoreInvocations(mock); * </code></pre> * * <p> * See also {@link Mockito#verifyNoMoreInteractions(Object...)} * <p> * See examples in javadoc for {@link Mockito} class * * @return verification mode */ public static VerificationMode only() { return VerificationModeFactory.only(); } /** * Allows verifying with timeout. It causes a verify to wait for a specified period of time for a desired * interaction rather than fails immediately if has not already happened. May be useful for testing in concurrent * conditions. * <p> * This differs from {@link Mockito#after after()} in that after() will wait the full period, unless * the final test result is known early (e.g. if a never() fails), whereas timeout() will stop early as soon * as verification passes, producing different behaviour when used with times(2), for example, which can pass * and then later fail. In that case, timeout would pass as soon as times(2) passes, whereas after would run until * times(2) failed, and then fail. * <p> * This feature should be used rarely - figure out a better way of testing your multi-threaded system. * <pre class="code"><code class="java"> * //passes when someMethod() is called within given time span * verify(mock, timeout(100)).someMethod(); * //above is an alias to: * verify(mock, timeout(100).times(1)).someMethod(); * * //passes as soon as someMethod() has been called 2 times before the given timeout * verify(mock, timeout(100).times(2)).someMethod(); * * //equivalent: this also passes as soon as someMethod() has been called 2 times before the given timeout * verify(mock, timeout(100).atLeast(2)).someMethod(); * * //verifies someMethod() within given time span using given verification mode * //useful only if you have your own custom verification modes. * verify(mock, new Timeout(100, yourOwnVerificationMode)).someMethod(); * </code></pre> * * See examples in javadoc for {@link Mockito} class * * @param millis - time span in milliseconds * * @return verification mode */ public static VerificationWithTimeout timeout(long millis) { return new Timeout(millis, VerificationModeFactory.times(1)); } /** * Allows verifying over a given period. It causes a verify to wait for a specified period of time for a desired * interaction rather than failing immediately if has not already happened. May be useful for testing in concurrent * conditions. * <p> * This differs from {@link Mockito#timeout timeout()} in that after() will wait the full period, whereas timeout() * will stop early as soon as verification passes, producing different behaviour when used with times(2), for example, * which can pass and then later fail. In that case, timeout would pass as soon as times(2) passes, whereas after would * run the full time, which point it will fail, as times(2) has failed. * <p> * This feature should be used rarely - figure out a better way of testing your multi-threaded system. * <p> * Not yet implemented to work with InOrder verification. * <pre class="code"><code class="java"> * //passes after 100ms, if someMethod() has only been called once at that time. * verify(mock, after(100)).someMethod(); * //above is an alias to: * verify(mock, after(100).times(1)).someMethod(); * * //passes if someMethod() is called <b>*exactly*</b> 2 times after the given timespan * verify(mock, after(100).times(2)).someMethod(); * * //passes if someMethod() has not been called after the given timespan * verify(mock, after(100).never()).someMethod(); * * //verifies someMethod() after a given time span using given verification mode * //useful only if you have your own custom verification modes. * verify(mock, new After(100, yourOwnVerificationMode)).someMethod(); * </code></pre> * * See examples in javadoc for {@link Mockito} class * * @param millis - time span in milliseconds * * @return verification mode */ public static VerificationAfterDelay after(long millis) { return new After(millis, VerificationModeFactory.times(1)); } /** * First of all, in case of any trouble, I encourage you to read the Mockito FAQ: <a href="https://github.com/mockito/mockito/wiki/FAQ">https://github.com/mockito/mockito/wiki/FAQ</a> * <p> * In case of questions you may also post to mockito mailing list: <a href="http://groups.google.com/group/mockito">http://groups.google.com/group/mockito</a> * <p> * <code>validateMockitoUsage()</code> <b>explicitly validates</b> the framework state to detect invalid use of Mockito. * However, this feature is optional <b>because Mockito validates the usage all the time...</b> but there is a gotcha so read on. * <p> * Examples of incorrect use: * <pre class="code"><code class="java"> * //Oops, thenReturn() part is missing: * when(mock.get()); * * //Oops, verified method call is inside verify() where it should be on the outside: * verify(mock.execute()); * * //Oops, missing method to verify: * verify(mock); * </code></pre> * * Mockito throws exceptions if you misuse it so that you know if your tests are written correctly. * The gotcha is that Mockito does the validation <b>next time</b> you use the framework (e.g. next time you verify, stub, call mock etc.). * But even though the exception might be thrown in the next test, * the exception <b>message contains a navigable stack trace element</b> with location of the defect. * Hence you can click and find the place where Mockito was misused. * <p> * Sometimes though, you might want to validate the framework usage explicitly. * For example, one of the users wanted to put <code>validateMockitoUsage()</code> in his <code>&#064;After</code> method * so that he knows immediately when he misused Mockito. * Without it, he would have known about it not sooner than <b>next time</b> he used the framework. * One more benefit of having <code>validateMockitoUsage()</code> in <code>&#064;After</code> is that jUnit runner and rule will always fail in the test method with defect * whereas ordinary 'next-time' validation might fail the <b>next</b> test method. * But even though JUnit might report next test as red, don't worry about it * and just click at navigable stack trace element in the exception message to instantly locate the place where you misused mockito. * <p> * <b>Both built-in runner: {@link MockitoJUnitRunner} and rule: {@link MockitoRule}</b> do validateMockitoUsage() after each test method. * <p> * Bear in mind that <b>usually you don't have to <code>validateMockitoUsage()</code></b> * and framework validation triggered on next-time basis should be just enough, * mainly because of enhanced exception message with clickable location of defect. * However, I would recommend validateMockitoUsage() if you already have sufficient test infrastructure * (like your own runner or base class for all tests) because adding a special action to <code>&#064;After</code> has zero cost. * <p> * See examples in javadoc for {@link Mockito} class */ public static void validateMockitoUsage() { MOCKITO_CORE.validateMockitoUsage(); } /** * Allows mock creation with additional mock settings. * <p> * Don't use it too often. * Consider writing simple tests that use simple mocks. * Repeat after me: simple tests push simple, KISSy, readable & maintainable code. * If you cannot write a test in a simple way - refactor the code under test. * <p> * Examples of mock settings: * <pre class="code"><code class="java"> * //Creates mock with different default answer & name * Foo mock = mock(Foo.class, withSettings() * .defaultAnswer(RETURNS_SMART_NULLS) * .name("cool mockie")); * * //Creates mock with different default answer, descriptive name and extra interfaces * Foo mock = mock(Foo.class, withSettings() * .defaultAnswer(RETURNS_SMART_NULLS) * .name("cool mockie") * .extraInterfaces(Bar.class)); * </code></pre> * {@link MockSettings} has been introduced for two reasons. * Firstly, to make it easy to add another mock settings when the demand comes. * Secondly, to enable combining different mock settings without introducing zillions of overloaded mock() methods. * <p> * See javadoc for {@link MockSettings} to learn about possible mock settings. * <p> * * @return mock settings instance with defaults. */ public static MockSettings withSettings() { return new MockSettingsImpl().defaultAnswer(RETURNS_DEFAULTS); } /** * Adds a description to be printed if verification fails. * <pre class="code"><code class="java"> * verify(mock, description("This will print on failure")).someMethod("some arg"); * </code></pre> * @param description The description to print on failure. * @return verification mode * @since 2.1.0 */ public static VerificationMode description(String description) { return times(1).description(description); } /** * This API will move soon to a different place. * See <a href="https://github.com/mockito/mockito/issues/577">issue 577</a>. */ @Deprecated static MockitoDebugger debug() { return new MockitoDebuggerImpl(); } /** * For advanced users or framework integrators. See {@link MockitoFramework} class. * * @since 2.1.0 */ @Incubating public static MockitoFramework framework() { return new DefaultMockitoFramework(); } }
src/main/java/org/mockito/Mockito.java
/* * Copyright (c) 2007 Mockito contributors * This program is made available under the terms of the MIT License. */ package org.mockito; import org.mockito.internal.framework.DefaultMockitoFramework; import org.mockito.internal.MockitoCore; import org.mockito.internal.creation.MockSettingsImpl; import org.mockito.internal.debugging.MockitoDebuggerImpl; import org.mockito.internal.stubbing.defaultanswers.ReturnsEmptyValues; import org.mockito.internal.stubbing.defaultanswers.ReturnsMoreEmptyValues; import org.mockito.internal.verification.VerificationModeFactory; import org.mockito.mock.SerializableMode; import org.mockito.runners.MockitoJUnitRunner; import org.mockito.stubbing.*; import org.mockito.verification.*; import org.mockito.junit.*; /** * <p align="left"><img src="logo.png" srcset="[email protected] 2x" alt="Mockito logo"/></p> * The Mockito library enables mock creation, verification and stubbing. * <p> * This javadoc content is also available on the <a href="http://mockito.org">http://mockito.org</a> web page. * All documentation is kept in javadocs because it guarantees consistency between what's on the web and what's in the source code. * It allows access to documentation straight from the IDE even if you work offline. * It motivates Mockito developers to keep documentation up-to-date with the code that they write, * every day, with every commit. * * <h1>Contents</h1> * * <b> * <a href="#0">0. Migrating to Mockito 2</a><br/> * <a href="#1">1. Let's verify some behaviour! </a><br/> * <a href="#2">2. How about some stubbing? </a><br/> * <a href="#3">3. Argument matchers </a><br/> * <a href="#4">4. Verifying exact number of invocations / at least once / never </a><br/> * <a href="#5">5. Stubbing void methods with exceptions </a><br/> * <a href="#6">6. Verification in order </a><br/> * <a href="#7">7. Making sure interaction(s) never happened on mock </a><br/> * <a href="#8">8. Finding redundant invocations </a><br/> * <a href="#9">9. Shorthand for mocks creation - <code>&#064;Mock</code> annotation </a><br/> * <a href="#10">10. Stubbing consecutive calls (iterator-style stubbing) </a><br/> * <a href="#11">11. Stubbing with callbacks </a><br/> * <a href="#12">12. <code>doReturn()</code>|<code>doThrow()</code>|<code>doAnswer()</code>|<code>doNothing()</code>|<code>doCallRealMethod()</code> family of methods</a><br/> * <a href="#13">13. Spying on real objects </a><br/> * <a href="#14">14. Changing default return values of unstubbed invocations (Since 1.7) </a><br/> * <a href="#15">15. Capturing arguments for further assertions (Since 1.8.0) </a><br/> * <a href="#16">16. Real partial mocks (Since 1.8.0) </a><br/> * <a href="#17">17. Resetting mocks (Since 1.8.0) </a><br/> * <a href="#18">18. Troubleshooting & validating framework usage (Since 1.8.0) </a><br/> * <a href="#19">19. Aliases for behavior driven development (Since 1.8.0) </a><br/> * <a href="#20">20. Serializable mocks (Since 1.8.1) </a><br/> * <a href="#21">21. New annotations: <code>&#064;Captor</code>, <code>&#064;Spy</code>, <code>&#064;InjectMocks</code> (Since 1.8.3) </a><br/> * <a href="#22">22. Verification with timeout (Since 1.8.5) </a><br/> * <a href="#23">23. Automatic instantiation of <code>&#064;Spies</code>, <code>&#064;InjectMocks</code> and constructor injection goodness (Since 1.9.0)</a><br/> * <a href="#24">24. One-liner stubs (Since 1.9.0)</a><br/> * <a href="#25">25. Verification ignoring stubs (Since 1.9.0)</a><br/> * <a href="#26">26. Mocking details (Since 1.9.5)</a><br/> * <a href="#27">27. Delegate calls to real instance (Since 1.9.5)</a><br/> * <a href="#28">28. <code>MockMaker</code> API (Since 1.9.5)</a><br/> * <a href="#29">29. BDD style verification (Since 1.10.0)</a><br/> * <a href="#30">30. Spying or mocking abstract classes (Since 1.10.12)</a><br/> * <a href="#31">31. Mockito mocks can be <em>serialized</em> / <em>deserialized</em> across classloaders (Since 1.10.0)</a></h3><br/> * <a href="#32">32. Better generic support with deep stubs (Since 1.10.0)</a></h3><br/> * <a href="#32">33. Mockito JUnit rule (Since 1.10.17)</a><br/> * <a href="#34">34. Switch <em>on</em> or <em>off</em> plugins (Since 1.10.15)</a><br/> * <a href="#35">35. (new) Custom verification failure message (Since 2.1.0)</a><br/> * <a href="#36">36. (new) Java 8 Lambda Matcher Support (Since 2.1.0)</a><br/> * <a href="#37">37. (new) Java 8 Custom Answer Support (Since 2.1.0)</a><br/> * </b> * * <h3 id="0">0. <a class="meaningful_link" href="#mockito2">Migrating to Mockito 2</a></h3> * * In order to continue improving Mockito and further improve the unit testing experience, we want you to upgrade to 2.1.0! * Mockito follows <a href="http://semver.org/">semantic versioning</a> and contains breaking changes only on major version upgrades. * In the lifecycle of a library, breaking changes are necessary * to roll out a set of brand new features that alter the existing behavior or even change the API. * For a comprehensive guide on the new release including incompatible changes, * see '<a href="https://github.com/mockito/mockito/wiki/What%27s-new-in-Mockito-2">What's new in Mockito 2</a>' wiki page. * We hope that you enjoy Mockito 2! * * <h3 id="1">1. <a class="meaningful_link" href="#verification">Let's verify some behaviour!</a></h3> * * The following examples mock a List, because most people are familiar with the interface (such as the * <code>add()</code>, <code>get()</code>, <code>clear()</code> methods). <br> * In reality, please don't mock the List class. Use a real instance instead. * * <pre class="code"><code class="java"> * //Let's import Mockito statically so that the code looks clearer * import static org.mockito.Mockito.*; * * //mock creation * List mockedList = mock(List.class); * * //using mock object * mockedList.add("one"); * mockedList.clear(); * * //verification * verify(mockedList).add("one"); * verify(mockedList).clear(); * </code></pre> * * <p> * Once created, a mock will remember all interactions. Then you can selectively * verify whatever interactions you are interested in. * * * * * <h3 id="2">2. <a class="meaningful_link" href="#stubbing">How about some stubbing?</a></h3> * * <pre class="code"><code class="java"> * //You can mock concrete classes, not just interfaces * LinkedList mockedList = mock(LinkedList.class); * * //stubbing * when(mockedList.get(0)).thenReturn("first"); * when(mockedList.get(1)).thenThrow(new RuntimeException()); * * //following prints "first" * System.out.println(mockedList.get(0)); * * //following throws runtime exception * System.out.println(mockedList.get(1)); * * //following prints "null" because get(999) was not stubbed * System.out.println(mockedList.get(999)); * * //Although it is possible to verify a stubbed invocation, usually <b>it's just redundant</b> * //If your code cares what get(0) returns, then something else breaks (often even before verify() gets executed). * //If your code doesn't care what get(0) returns, then it should not be stubbed. Not convinced? See <a href="http://monkeyisland.pl/2008/04/26/asking-and-telling">here</a>. * verify(mockedList).get(0); * </code></pre> * * <ul> * <li> By default, for all methods that return a value, a mock will return either null, a * a primitive/primitive wrapper value, or an empty collection, as appropriate. * For example 0 for an int/Integer and false for a boolean/Boolean. </li> * * <li> Stubbing can be overridden: for example common stubbing can go to * fixture setup but the test methods can override it. * Please note that overridding stubbing is a potential code smell that points out too much stubbing</li> * * <li> Once stubbed, the method will always return a stubbed value, regardless * of how many times it is called. </li> * * <li> Last stubbing is more important - when you stubbed the same method with * the same arguments many times. * Other words: <b>the order of stubbing matters</b> but it is only meaningful rarely, * e.g. when stubbing exactly the same method calls or sometimes when argument matchers are used, etc.</li> * * </ul> * * * * <h3 id="3">3. <a class="meaningful_link" href="#argument_matchers">Argument matchers</a></h3> * * Mockito verifies argument values in natural java style: by using an <code>equals()</code> method. * Sometimes, when extra flexibility is required then you might use argument matchers: * * <pre class="code"><code class="java"> * //stubbing using built-in anyInt() argument matcher * when(mockedList.get(anyInt())).thenReturn("element"); * * //stubbing using custom matcher (let's say isValid() returns your own matcher implementation): * when(mockedList.contains(argThat(isValid()))).thenReturn("element"); * * //following prints "element" * System.out.println(mockedList.get(999)); * * //<b>you can also verify using an argument matcher</b> * verify(mockedList).get(anyInt()); * * //<b>argument matchers can also be written as Java 8 Lambdas</b> * verify(mockedList).add(someString -> someString.length() > 5); * * </code></pre> * * <p> * Argument matchers allow flexible verification or stubbing. * {@link ArgumentMatchers Click here} {@link org.mockito.hamcrest.MockitoHamcrest or here} to see more built-in matchers * and examples of <b>custom argument matchers / hamcrest matchers</b>. * <p> * For information solely on <b>custom argument matchers</b> check out javadoc for {@link ArgumentMatcher} class. * <p> * Be reasonable with using complicated argument matching. * The natural matching style using <code>equals()</code> with occasional <code>anyX()</code> matchers tend to give clean & simple tests. * Sometimes it's just better to refactor the code to allow <code>equals()</code> matching or even implement <code>equals()</code> method to help out with testing. * <p> * Also, read <a href="#15">section 15</a> or javadoc for {@link ArgumentCaptor} class. * {@link ArgumentCaptor} is a special implementation of an argument matcher that captures argument values for further assertions. * <p> * <b>Warning on argument matchers:</b> * <p> * If you are using argument matchers, <b>all arguments</b> have to be provided * by matchers. * <p> The following example shows verification but the same applies to stubbing: * * <pre class="code"><code class="java"> * verify(mock).someMethod(anyInt(), anyString(), <b>eq("third argument")</b>); * //above is correct - eq() is also an argument matcher * * verify(mock).someMethod(anyInt(), anyString(), <b>"third argument"</b>); * //above is incorrect - exception will be thrown because third argument is given without an argument matcher. * </code></pre> * * <p> * Matcher methods like <code>anyObject()</code>, <code>eq()</code> <b>do not</b> return matchers. * Internally, they record a matcher on a stack and return a dummy value (usually null). * This implementation is due to static type safety imposed by the java compiler. * The consequence is that you cannot use <code>anyObject()</code>, <code>eq()</code> methods outside of verified/stubbed method. * * * * * <h3 id="4">4. <a class="meaningful_link" href="#exact_verification">Verifying exact number of invocations</a> / * <a class="meaningful_link" href="#at_least_verification">at least x</a> / never</h3> * * <pre class="code"><code class="java"> * //using mock * mockedList.add("once"); * * mockedList.add("twice"); * mockedList.add("twice"); * * mockedList.add("three times"); * mockedList.add("three times"); * mockedList.add("three times"); * * //following two verifications work exactly the same - times(1) is used by default * verify(mockedList).add("once"); * verify(mockedList, times(1)).add("once"); * * //exact number of invocations verification * verify(mockedList, times(2)).add("twice"); * verify(mockedList, times(3)).add("three times"); * * //verification using never(). never() is an alias to times(0) * verify(mockedList, never()).add("never happened"); * * //verification using atLeast()/atMost() * verify(mockedList, atLeastOnce()).add("three times"); * verify(mockedList, atLeast(2)).add("five times"); * verify(mockedList, atMost(5)).add("three times"); * * </code></pre> * * <p> * <b>times(1) is the default.</b> Therefore using times(1) explicitly can be * omitted. * * * * * <h3 id="5">5. <a class="meaningful_link" href="#stubbing_with_exceptions">Stubbing void methods with exceptions</a></h3> * * <pre class="code"><code class="java"> * doThrow(new RuntimeException()).when(mockedList).clear(); * * //following throws RuntimeException: * mockedList.clear(); * </code></pre> * * Read more about <code>doThrow()</code>|<code>doAnswer()</code> family of methods in <a href="#12">section 12</a>. * <p> * * <h3 id="6">6. <a class="meaningful_link" href="#in_order_verification">Verification in order</a></h3> * * <pre class="code"><code class="java"> * // A. Single mock whose methods must be invoked in a particular order * List singleMock = mock(List.class); * * //using a single mock * singleMock.add("was added first"); * singleMock.add("was added second"); * * //create an inOrder verifier for a single mock * InOrder inOrder = inOrder(singleMock); * * //following will make sure that add is first called with "was added first, then with "was added second" * inOrder.verify(singleMock).add("was added first"); * inOrder.verify(singleMock).add("was added second"); * * // B. Multiple mocks that must be used in a particular order * List firstMock = mock(List.class); * List secondMock = mock(List.class); * * //using mocks * firstMock.add("was called first"); * secondMock.add("was called second"); * * //create inOrder object passing any mocks that need to be verified in order * InOrder inOrder = inOrder(firstMock, secondMock); * * //following will make sure that firstMock was called before secondMock * inOrder.verify(firstMock).add("was called first"); * inOrder.verify(secondMock).add("was called second"); * * // Oh, and A + B can be mixed together at will * </code></pre> * * Verification in order is flexible - <b>you don't have to verify all * interactions</b> one-by-one but only those that you are interested in * testing in order. * <p> * Also, you can create an InOrder object passing only the mocks that are relevant for * in-order verification. * * * * * <h3 id="7">7. <a class="meaningful_link" href="#never_verification">Making sure interaction(s) never happened on mock</a></h3> * * <pre class="code"><code class="java"> * //using mocks - only mockOne is interacted * mockOne.add("one"); * * //ordinary verification * verify(mockOne).add("one"); * * //verify that method was never called on a mock * verify(mockOne, never()).add("two"); * * //verify that other mocks were not interacted * verifyZeroInteractions(mockTwo, mockThree); * * </code></pre> * * * * * <h3 id="8">8. <a class="meaningful_link" href="#finding_redundant_invocations">Finding redundant invocations</a></h3> * * <pre class="code"><code class="java"> * //using mocks * mockedList.add("one"); * mockedList.add("two"); * * verify(mockedList).add("one"); * * //following verification will fail * verifyNoMoreInteractions(mockedList); * </code></pre> * * A word of <b>warning</b>: * Some users who did a lot of classic, expect-run-verify mocking tend to use <code>verifyNoMoreInteractions()</code> very often, even in every test method. * <code>verifyNoMoreInteractions()</code> is not recommended to use in every test method. * <code>verifyNoMoreInteractions()</code> is a handy assertion from the interaction testing toolkit. Use it only when it's relevant. * Abusing it leads to <strong>overspecified</strong>, <strong>less maintainable</strong> tests. You can find further reading * <a href="http://monkeyisland.pl/2008/07/12/should-i-worry-about-the-unexpected/">here</a>. * * <p> * See also {@link Mockito#never()} - it is more explicit and * communicates the intent well. * <p> * * * * * <h3 id="9">9. <a class="meaningful_link" href="#mock_annotation">Shorthand for mocks creation - <code>&#064;Mock</code> annotation</a></h3> * * <ul> * <li>Minimizes repetitive mock creation code.</li> * <li>Makes the test class more readable.</li> * <li>Makes the verification error easier to read because the <b>field name</b> * is used to identify the mock.</li> * </ul> * * <pre class="code"><code class="java"> * public class ArticleManagerTest { * * &#064;Mock private ArticleCalculator calculator; * &#064;Mock private ArticleDatabase database; * &#064;Mock private UserProvider userProvider; * * private ArticleManager manager; * </code></pre> * * <b>Important!</b> This needs to be somewhere in the base class or a test * runner: * * <pre class="code"><code class="java"> * MockitoAnnotations.initMocks(testClass); * </code></pre> * * You can use built-in runner: {@link MockitoJUnitRunner} or a rule: {@link MockitoRule}. * <p> * Read more here: {@link MockitoAnnotations} * * * * * <h3 id="10">10. <a class="meaningful_link" href="#stubbing_consecutive_calls">Stubbing consecutive calls</a> (iterator-style stubbing)</h3> * * Sometimes we need to stub with different return value/exception for the same * method call. Typical use case could be mocking iterators. * Original version of Mockito did not have this feature to promote simple mocking. * For example, instead of iterators one could use {@link Iterable} or simply * collections. Those offer natural ways of stubbing (e.g. using real * collections). In rare scenarios stubbing consecutive calls could be useful, * though: * <p> * * <pre class="code"><code class="java"> * when(mock.someMethod("some arg")) * .thenThrow(new RuntimeException()) * .thenReturn("foo"); * * //First call: throws runtime exception: * mock.someMethod("some arg"); * * //Second call: prints "foo" * System.out.println(mock.someMethod("some arg")); * * //Any consecutive call: prints "foo" as well (last stubbing wins). * System.out.println(mock.someMethod("some arg")); * </code></pre> * * Alternative, shorter version of consecutive stubbing: * * <pre class="code"><code class="java"> * when(mock.someMethod("some arg")) * .thenReturn("one", "two", "three"); * </code></pre> * * * * * <h3 id="11">11. <a class="meaningful_link" href="#answer_stubs">Stubbing with callbacks</a></h3> * * Allows stubbing with generic {@link Answer} interface. * <p> * Yet another controversial feature which was not included in Mockito * originally. We recommend simply stubbing with <code>thenReturn()</code> or * <code>thenThrow()</code>, which should be enough to test/test-drive * any clean & simple code. However, if you do have a need to stub with the generic Answer interface, here is an example: * * <pre class="code"><code class="java"> * when(mock.someMethod(anyString())).thenAnswer(new Answer() { * Object answer(InvocationOnMock invocation) { * Object[] args = invocation.getArguments(); * Object mock = invocation.getMock(); * return "called with arguments: " + args; * } * }); * * //the following prints "called with arguments: foo" * System.out.println(mock.someMethod("foo")); * </code></pre> * * * * * <h3 id="12">12. <a class="meaningful_link" href="#do_family_methods_stubs"><code>doReturn()</code>|<code>doThrow()</code>| * <code>doAnswer()</code>|<code>doNothing()</code>|<code>doCallRealMethod()</code> family of methods</a></h3> * * Stubbing void methods requires a different approach from {@link Mockito#when(Object)} because the compiler does not * like void methods inside brackets... * <p> * Use <code>doThrow()</code> when you want to stub a void method with an exception: * <pre class="code"><code class="java"> * doThrow(new RuntimeException()).when(mockedList).clear(); * * //following throws RuntimeException: * mockedList.clear(); * </code></pre> * </p> * * <p> * You can use <code>doThrow()</code>, <code>doAnswer()</code>, <code>doNothing()</code>, <code>doReturn()</code> * and <code>doCallRealMethod()</code> in place of the corresponding call with <code>when()</code>, for any method. * It is necessary when you * <ul> * <li>stub void methods</li> * <li>stub methods on spy objects (see below)</li> * <li>stub the same method more than once, to change the behaviour of a mock in the middle of a test.</li> * </ul> * but you may prefer to use these methods in place of the alternative with <code>when()</code>, for all of your stubbing calls. * <p> * Read more about these methods: * <p> * {@link Mockito#doReturn(Object)} * <p> * {@link Mockito#doThrow(Throwable...)} * <p> * {@link Mockito#doThrow(Class)} * <p> * {@link Mockito#doAnswer(Answer)} * <p> * {@link Mockito#doNothing()} * <p> * {@link Mockito#doCallRealMethod()} * * * * * <h3 id="13">13. <a class="meaningful_link" href="#spy">Spying on real objects</a></h3> * * You can create spies of real objects. When you use the spy then the <b>real</b> methods are called * (unless a method was stubbed). * <p> * Real spies should be used <b>carefully and occasionally</b>, for example when dealing with legacy code. * * <p> * Spying on real objects can be associated with "partial mocking" concept. * <b>Before the release 1.8</b>, Mockito spies were not real partial mocks. * The reason was we thought partial mock is a code smell. * At some point we found legitimate use cases for partial mocks * (3rd party interfaces, interim refactoring of legacy code, the full article is <a href= * "http://monkeyisland.pl/2009/01/13/subclass-and-override-vs-partial-mocking-vs-refactoring" * >here</a>) * <p> * * <pre class="code"><code class="java"> * List list = new LinkedList(); * List spy = spy(list); * * //optionally, you can stub out some methods: * when(spy.size()).thenReturn(100); * * //using the spy calls <b>*real*</b> methods * spy.add("one"); * spy.add("two"); * * //prints "one" - the first element of a list * System.out.println(spy.get(0)); * * //size() method was stubbed - 100 is printed * System.out.println(spy.size()); * * //optionally, you can verify * verify(spy).add("one"); * verify(spy).add("two"); * </code></pre> * * <h4>Important gotcha on spying real objects!</h4> * <ol> * <li>Sometimes it's impossible or impractical to use {@link Mockito#when(Object)} for stubbing spies. * Therefore when using spies please consider <code>doReturn</code>|<code>Answer</code>|<code>Throw()</code> family of * methods for stubbing. Example: * * <pre class="code"><code class="java"> * List list = new LinkedList(); * List spy = spy(list); * * //Impossible: real method is called so spy.get(0) throws IndexOutOfBoundsException (the list is yet empty) * when(spy.get(0)).thenReturn("foo"); * * //You have to use doReturn() for stubbing * doReturn("foo").when(spy).get(0); * </code></pre> * </li> * * <li>Mockito <b>*does not*</b> delegate calls to the passed real instance, instead it actually creates a copy of it. * So if you keep the real instance and interact with it, don't expect the spied to be aware of those interaction * and their effect on real instance state. * The corollary is that when an <b>*unstubbed*</b> method is called <b>*on the spy*</b> but <b>*not on the real instance*</b>, * you won't see any effects on the real instance. * </li> * * <li>Watch out for final methods. * Mockito doesn't mock final methods so the bottom line is: when you spy on real objects + you try to stub a final method = trouble. * Also you won't be able to verify those method as well. * </li> * </ol> * * * * * <h3 id="14">14. Changing <a class="meaningful_link" href="#defaultreturn">default return values of unstubbed invocations</a> (Since 1.7)</h3> * * You can create a mock with specified strategy for its return values. * It's quite an advanced feature and typically you don't need it to write decent tests. * However, it can be helpful for working with <b>legacy systems</b>. * <p> * It is the default answer so it will be used <b>only when you don't</b> stub the method call. * * <pre class="code"><code class="java"> * Foo mock = mock(Foo.class, Mockito.RETURNS_SMART_NULLS); * Foo mockTwo = mock(Foo.class, new YourOwnAnswer()); * </code></pre> * * <p> * Read more about this interesting implementation of <i>Answer</i>: {@link Mockito#RETURNS_SMART_NULLS} * * * * * <h3 id="15">15. <a class="meaningful_link" href="#captors">Capturing arguments</a> for further assertions (Since 1.8.0)</h3> * * Mockito verifies argument values in natural java style: by using an <code>equals()</code> method. * This is also the recommended way of matching arguments because it makes tests clean & simple. * In some situations though, it is helpful to assert on certain arguments after the actual verification. * For example: * <pre class="code"><code class="java"> * ArgumentCaptor&lt;Person&gt; argument = ArgumentCaptor.forClass(Person.class); * verify(mock).doSomething(argument.capture()); * assertEquals("John", argument.getValue().getName()); * </code></pre> * * <b>Warning:</b> it is recommended to use ArgumentCaptor with verification <b>but not</b> with stubbing. * Using ArgumentCaptor with stubbing may decrease test readability because captor is created outside of assert (aka verify or 'then') block. * Also it may reduce defect localization because if stubbed method was not called then no argument is captured. * <p> * In a way ArgumentCaptor is related to custom argument matchers (see javadoc for {@link ArgumentMatcher} class). * Both techniques can be used for making sure certain arguments where passed to mocks. * However, ArgumentCaptor may be a better fit if: * <ul> * <li>custom argument matcher is not likely to be reused</li> * <li>you just need it to assert on argument values to complete verification</li> * </ul> * Custom argument matchers via {@link ArgumentMatcher} are usually better for stubbing. * * * * * <h3 id="16">16. <a class="meaningful_link" href="#partial_mocks">Real partial mocks</a> (Since 1.8.0)</h3> * * Finally, after many internal debates & discussions on the mailing list, partial mock support was added to Mockito. * Previously we considered partial mocks as code smells. However, we found a legitimate use case for partial mocks - more reading: * <a href="http://monkeyisland.pl/2009/01/13/subclass-and-override-vs-partial-mocking-vs-refactoring">here</a> * <p> * <b>Before release 1.8</b> <code>spy()</code> was not producing real partial mocks and it was confusing for some users. * Read more about spying: <a href="#13">here</a> or in javadoc for {@link Mockito#spy(Object)} method. * <p> * <pre class="code"><code class="java"> * //you can create partial mock with spy() method: * List list = spy(new LinkedList()); * * //you can enable partial mock capabilities selectively on mocks: * Foo mock = mock(Foo.class); * //Be sure the real implementation is 'safe'. * //If real implementation throws exceptions or depends on specific state of the object then you're in trouble. * when(mock.someMethod()).thenCallRealMethod(); * </code></pre> * * As usual you are going to read <b>the partial mock warning</b>: * Object oriented programming is more less tackling complexity by dividing the complexity into separate, specific, SRPy objects. * How does partial mock fit into this paradigm? Well, it just doesn't... * Partial mock usually means that the complexity has been moved to a different method on the same object. * In most cases, this is not the way you want to design your application. * <p> * However, there are rare cases when partial mocks come handy: * dealing with code you cannot change easily (3rd party interfaces, interim refactoring of legacy code etc.) * However, I wouldn't use partial mocks for new, test-driven & well-designed code. * * * * * <h3 id="17">17. <a class="meaningful_link" href="#resetting_mocks">Resetting mocks</a> (Since 1.8.0)</h3> * * Smart Mockito users hardly use this feature because they know it could be a sign of poor tests. * Normally, you don't need to reset your mocks, just create new mocks for each test method. * <p> * Instead of <code>reset()</code> please consider writing simple, small and focused test methods over lengthy, over-specified tests. * <b>First potential code smell is <code>reset()</code> in the middle of the test method.</b> This probably means you're testing too much. * Follow the whisper of your test methods: "Please keep us small & focused on single behavior". * There are several threads about it on mockito mailing list. * <p> * The only reason we added <code>reset()</code> method is to * make it possible to work with container-injected mocks. * For more information see FAQ (<a href="https://github.com/mockito/mockito/wiki/FAQ">here</a>). * <p> * <b>Don't harm yourself.</b> <code>reset()</code> in the middle of the test method is a code smell (you're probably testing too much). * <pre class="code"><code class="java"> * List mock = mock(List.class); * when(mock.size()).thenReturn(10); * mock.add(1); * * reset(mock); * //at this point the mock forgot any interactions & stubbing * </code></pre> * * * * * <h3 id="18">18. <a class="meaningful_link" href="#framework_validation">Troubleshooting & validating framework usage</a> (Since 1.8.0)</h3> * * First of all, in case of any trouble, I encourage you to read the Mockito FAQ: * <a href="https://github.com/mockito/mockito/wiki/FAQ">https://github.com/mockito/mockito/wiki/FAQ</a> * <p> * In case of questions you may also post to mockito mailing list: * <a href="http://groups.google.com/group/mockito">http://groups.google.com/group/mockito</a> * <p> * Next, you should know that Mockito validates if you use it correctly <b>all the time</b>. * However, there's a gotcha so please read the javadoc for {@link Mockito#validateMockitoUsage()} * * * * * <h3 id="19">19. <a class="meaningful_link" href="#bdd_mockito">Aliases for behavior driven development</a> (Since 1.8.0)</h3> * * Behavior Driven Development style of writing tests uses <b>//given //when //then</b> comments as fundamental parts of your test methods. * This is exactly how we write our tests and we warmly encourage you to do so! * <p> * Start learning about BDD here: <a href="http://en.wikipedia.org/wiki/Behavior_Driven_Development">http://en.wikipedia.org/wiki/Behavior_Driven_Development</a> * <p> * The problem is that current stubbing api with canonical role of <b>when</b> word does not integrate nicely with <b>//given //when //then</b> comments. * It's because stubbing belongs to <b>given</b> component of the test and not to the <b>when</b> component of the test. * Hence {@link BDDMockito} class introduces an alias so that you stub method calls with {@link BDDMockito#given(Object)} method. * Now it really nicely integrates with the <b>given</b> component of a BDD style test! * <p> * Here is how the test might look like: * <pre class="code"><code class="java"> * import static org.mockito.BDDMockito.*; * * Seller seller = mock(Seller.class); * Shop shop = new Shop(seller); * * public void shouldBuyBread() throws Exception { * //given * given(seller.askForBread()).willReturn(new Bread()); * * //when * Goods goods = shop.buyBread(); * * //then * assertThat(goods, containBread()); * } * </code></pre> * * * * * <h3 id="20">20. <a class="meaningful_link" href="#serializable_mocks">Serializable mocks</a> (Since 1.8.1)</h3> * * Mocks can be made serializable. With this feature you can use a mock in a place that requires dependencies to be serializable. * <p> * WARNING: This should be rarely used in unit testing. * <p> * The behaviour was implemented for a specific use case of a BDD spec that had an unreliable external dependency. This * was in a web environment and the objects from the external dependency were being serialized to pass between layers. * <p> * To create serializable mock use {@link MockSettings#serializable()}: * <pre class="code"><code class="java"> * List serializableMock = mock(List.class, withSettings().serializable()); * </code></pre> * <p> * The mock can be serialized assuming all the normal <a href='http://java.sun.com/j2se/1.5.0/docs/api/java/io/Serializable.html'> * serialization requirements</a> are met by the class. * <p> * Making a real object spy serializable is a bit more effort as the spy(...) method does not have an overloaded version * which accepts MockSettings. No worries, you will hardly ever use it. * * <pre class="code"><code class="java"> * List&lt;Object&gt; list = new ArrayList&lt;Object&gt;(); * List&lt;Object&gt; spy = mock(ArrayList.class, withSettings() * .spiedInstance(list) * .defaultAnswer(CALLS_REAL_METHODS) * .serializable()); * </code></pre> * * * * * <h3 id="21">21. New annotations: <a class="meaningful_link" href="#captor_annotation"><code>&#064;Captor</code></a>, * <a class="meaningful_link" href="#spy_annotation"><code>&#064;Spy</code></a>, * <a class="meaningful_link" href="#injectmocks_annotation"><code>&#064;InjectMocks</code></a> (Since 1.8.3)</h3> * * <p> * Release 1.8.3 brings new annotations that may be helpful on occasion: * * <ul> * <li>&#064;{@link Captor} simplifies creation of {@link ArgumentCaptor} * - useful when the argument to capture is a nasty generic class and you want to avoid compiler warnings * <li>&#064;{@link Spy} - you can use it instead {@link Mockito#spy(Object)}. * <li>&#064;{@link InjectMocks} - injects mock or spy fields into tested object automatically. * </ul> * * <p> * Note that &#064;{@link InjectMocks} can also be used in combination with the &#064;{@link Spy} annotation, it means * that Mockito will inject mocks into the partial mock under test. This complexity is another good reason why you * should only use partial mocks as a last resort. See point 16 about partial mocks. * * <p> * All new annotations are <b>*only*</b> processed on {@link MockitoAnnotations#initMocks(Object)}. * Just like for &#064;{@link Mock} annotation you can use the built-in runner: {@link MockitoJUnitRunner} or rule: * {@link MockitoRule}. * <p> * * * * * <h3 id="22">22. <a class="meaningful_link" href="#verification_timeout">Verification with timeout</a> (Since 1.8.5)</h3> * <p> * Allows verifying with timeout. It causes a verify to wait for a specified period of time for a desired * interaction rather than fails immediately if had not already happened. May be useful for testing in concurrent * conditions. * <p> * This feature should be used rarely - figure out a better way of testing your multi-threaded system. * <p> * Not yet implemented to work with InOrder verification. * <p> * Examples: * <p> * <pre class="code"><code class="java"> * //passes when someMethod() is called within given time span * verify(mock, timeout(100)).someMethod(); * //above is an alias to: * verify(mock, timeout(100).times(1)).someMethod(); * * //passes when someMethod() is called <b>*exactly*</b> 2 times within given time span * verify(mock, timeout(100).times(2)).someMethod(); * * //passes when someMethod() is called <b>*at least*</b> 2 times within given time span * verify(mock, timeout(100).atLeast(2)).someMethod(); * * //verifies someMethod() within given time span using given verification mode * //useful only if you have your own custom verification modes. * verify(mock, new Timeout(100, yourOwnVerificationMode)).someMethod(); * </code></pre> * * * * * <h3 id="23">23. <a class="meaningful_link" href="#automatic_instantiation">Automatic instantiation of <code>&#064;Spies</code>, * <code>&#064;InjectMocks</code></a> and <a class="meaningful_link" href="#constructor_injection">constructor injection goodness</a> (Since 1.9.0)</h3> * * <p> * Mockito will now try to instantiate &#064;{@link Spy} and will instantiate &#064;{@link InjectMocks} fields * using <b>constructor</b> injection, <b>setter</b> injection, or <b>field</b> injection. * <p> * To take advantage of this feature you need to use {@link MockitoAnnotations#initMocks(Object)}, {@link MockitoJUnitRunner} * or {@link MockitoRule}. * <p> * Read more about available tricks and the rules of injection in the javadoc for {@link InjectMocks} * <pre class="code"><code class="java"> * //instead: * &#064;Spy BeerDrinker drinker = new BeerDrinker(); * //you can write: * &#064;Spy BeerDrinker drinker; * * //same applies to &#064;InjectMocks annotation: * &#064;InjectMocks LocalPub; * </code></pre> * * * * * <h3 id="24">24. <a class="meaningful_link" href="#one_liner_stub">One-liner stubs</a> (Since 1.9.0)</h3> * <p> * Mockito will now allow you to create mocks when stubbing. * Basically, it allows to create a stub in one line of code. * This can be helpful to keep test code clean. * For example, some boring stub can be created & stubbed at field initialization in a test: * <pre class="code"><code class="java"> * public class CarTest { * Car boringStubbedCar = when(mock(Car.class).shiftGear()).thenThrow(EngineNotStarted.class).getMock(); * * &#064;Test public void should... {} * </code></pre> * * * * * <h3 id="25">25. <a class="meaningful_link" href="#ignore_stubs_verification">Verification ignoring stubs</a> (Since 1.9.0)</h3> * <p> * Mockito will now allow to ignore stubbing for the sake of verification. * Sometimes useful when coupled with <code>verifyNoMoreInteractions()</code> or verification <code>inOrder()</code>. * Helps avoiding redundant verification of stubbed calls - typically we're not interested in verifying stubs. * <p> * <b>Warning</b>, <code>ignoreStubs()</code> might lead to overuse of verifyNoMoreInteractions(ignoreStubs(...)); * Bear in mind that Mockito does not recommend bombarding every test with <code>verifyNoMoreInteractions()</code> * for the reasons outlined in javadoc for {@link Mockito#verifyNoMoreInteractions(Object...)} * <p>Some examples: * <pre class="code"><code class="java"> * verify(mock).foo(); * verify(mockTwo).bar(); * * //ignores all stubbed methods: * verifyNoMoreInteractions(ignoreStubs(mock, mockTwo)); * * //creates InOrder that will ignore stubbed * InOrder inOrder = inOrder(ignoreStubs(mock, mockTwo)); * inOrder.verify(mock).foo(); * inOrder.verify(mockTwo).bar(); * inOrder.verifyNoMoreInteractions(); * </code></pre> * <p> * Advanced examples and more details can be found in javadoc for {@link Mockito#ignoreStubs(Object...)} * * * * * <h3 id="26">26. <a class="meaningful_link" href="#mocking_details">Mocking details</a> (Since 1.9.5)</h3> * <p> * To identify whether a particular object is a mock or a spy: * <pre class="code"><code class="java"> * Mockito.mockingDetails(someObject).isMock(); * Mockito.mockingDetails(someObject).isSpy(); * </code></pre> * Both the {@link MockingDetails#isMock} and {@link MockingDetails#isSpy()} methods return <code>boolean</code>. * As a spy is just a different kind of mock, <code>isMock()</code> returns true if the object is a spy. * In future Mockito versions MockingDetails may grow and provide other useful information about the mock, * e.g. invocations, stubbing info, etc. * * * * * <h3 id="27">27. <a class="meaningful_link" href="#delegating_call_to_real_instance">Delegate calls to real instance</a> (Since 1.9.5)</h3> * * <p>Useful for spies or partial mocks of objects <strong>that are difficult to mock or spy</strong> using the usual spy API. * Since Mockito 1.10.11, the delegate may or may not be of the same type as the mock. * If the type is different, a matching method needs to be found on delegate type otherwise an exception is thrown. * * Possible use cases for this feature: * <ul> * <li>Final classes but with an interface</li> * <li>Already custom proxied object</li> * <li>Special objects with a finalize method, i.e. to avoid executing it 2 times</li> * </ul> * * <p>The difference with the regular spy: * <ul> * <li> * The regular spy ({@link #spy(Object)}) contains <strong>all</strong> state from the spied instance * and the methods are invoked on the spy. The spied instance is only used at mock creation to copy the state from. * If you call a method on a regular spy and it internally calls other methods on this spy, those calls are remembered * for verifications, and they can be effectively stubbed. * </li> * <li> * The mock that delegates simply delegates all methods to the delegate. * The delegate is used all the time as methods are delegated onto it. * If you call a method on a mock that delegates and it internally calls other methods on this mock, * those calls are <strong>not</strong> remembered for verifications, stubbing does not have effect on them, too. * Mock that delegates is less powerful than the regular spy but it is useful when the regular spy cannot be created. * </li> * </ul> * * <p> * See more information in docs for {@link AdditionalAnswers#delegatesTo(Object)}. * * * * * <h3 id="28">28. <a class="meaningful_link" href="#mock_maker_plugin"><code>MockMaker</code> API</a> (Since 1.9.5)</h3> * <p>Driven by requirements and patches from Google Android guys Mockito now offers an extension point * that allows replacing the proxy generation engine. By default, Mockito uses <a href="https://github.com/raphw/byte-buddy">Byte Buddy</a> * to create dynamic proxies. * <p>The extension point is for advanced users that want to extend Mockito. For example, it is now possible * to use Mockito for Android testing with a help of <a href="https://github.com/crittercism/dexmaker">dexmaker</a>. * <p>For more details, motivations and examples please refer to * the docs for {@link org.mockito.plugins.MockMaker}. * * * * * <h3 id="29">29. <a class="meaningful_link" href="#BDD_behavior_verification">(new) BDD style verification</a> (Since 1.10.0)</h3> * * Enables Behavior Driven Development (BDD) style verification by starting verification with the BDD <b>then</b> keyword. * * <pre class="code"><code class="java"> * given(dog.bark()).willReturn(2); * * // when * ... * * then(person).should(times(2)).ride(bike); * </code></pre> * * For more information and an example see {@link BDDMockito#then(Object)}} * * * * * <h3 id="30">30. <a class="meaningful_link" href="#spying_abstract_classes">(new) Spying or mocking abstract classes (Since 1.10.12)</a></h3> * * It is now possible to conveniently spy on abstract classes. Note that overusing spies hints at code design smells (see {@link #spy(Object)}). * <p> * Previously, spying was only possible on instances of objects. * New API makes it possible to use constructor when creating an instance of the mock. * This is particularly useful for mocking abstract classes because the user is no longer required to provide an instance of the abstract class. * At the moment, only parameter-less constructor is supported, let us know if it is not enough. * * <pre class="code"><code class="java"> * //convenience API, new overloaded spy() method: * SomeAbstract spy = spy(SomeAbstract.class); * * //Robust API, via settings builder: * OtherAbstract spy = mock(OtherAbstract.class, withSettings() * .useConstructor().defaultAnswer(CALLS_REAL_METHODS)); * * //Mocking a non-static inner abstract class: * InnerAbstract spy = mock(InnerAbstract.class, withSettings() * .useConstructor().outerInstance(outerInstance).defaultAnswer(CALLS_REAL_METHODS)); * </code></pre> * * For more information please see {@link MockSettings#useConstructor()}. * * * * * <h3 id="31">31. <a class="meaningful_link" href="#serilization_across_classloader">(new) Mockito mocks can be <em>serialized</em> / <em>deserialized</em> across classloaders (Since 1.10.0)</a></h3> * * Mockito introduces serialization across classloader. * * Like with any other form of serialization, all types in the mock hierarchy have to serializable, inclusing answers. * As this serialization mode require considerably more work, this is an opt-in setting. * * <pre class="code"><code class="java"> * // use regular serialization * mock(Book.class, withSettings().serializable()); * * // use serialization across classloaders * mock(Book.class, withSettings().serializable(ACROSS_CLASSLOADERS)); * </code></pre> * * For more details see {@link MockSettings#serializable(SerializableMode)}. * * * * * <h3 id="32">32. <a class="meaningful_link" href="#better_generic_support_with_deep_stubs">(new) Better generic support with deep stubs (Since 1.10.0)</a></h3> * * Deep stubbing has been improved to find generic information if available in the class. * That means that classes like this can be used without having to mock the behavior. * * <pre class="code"><code class="java"> * class Lines extends List&lt;Line&gt; { * // ... * } * * lines = mock(Lines.class, RETURNS_DEEP_STUBS); * * // Now Mockito understand this is not an Object but a Line * Line line = lines.iterator().next(); * </code></pre> * * Please note that in most scenarios a mock returning a mock is wrong. * * * * * <h3 id="33">33. <a class="meaningful_link" href="#mockito_junit_rule">(new) Mockito JUnit rule (Since 1.10.17)</a></h3> * * Mockito now offers a JUnit rule. Until now in JUnit there were two ways to initialize fields annotated by Mockito annotations * such as <code>&#064;{@link Mock}</code>, <code>&#064;{@link Spy}</code>, <code>&#064;{@link InjectMocks}</code>, etc. * * <ul> * <li>Annotating the JUnit test class with a <code>&#064;{@link org.junit.runner.RunWith}({@link MockitoJUnitRunner}.class)</code></li> * <li>Invoking <code>{@link MockitoAnnotations#initMocks(Object)}</code> in the <code>&#064;{@link org.junit.Before}</code> method</li> * </ul> * * Now you can choose to use a rule : * * <pre class="code"><code class="java"> * &#064;RunWith(YetAnotherRunner.class) * public class TheTest { * &#064;Rule public MockitoRule mockito = MockitoJUnit.rule(); * // ... * } * </code></pre> * * For more information see {@link MockitoJUnit#rule()}. * * * * * <h3 id="34">34. <a class="meaningful_link" href="#plugin_switch">(new) Switch <em>on</em> or <em>off</em> plugins (Since 1.10.15)</a></h3> * * An incubating feature made it's way in mockito that will allow to toggle a mockito-plugin. * * More information here {@link org.mockito.plugins.PluginSwitch}. * * * <h3 id="35">35. <a class="meaningful_link" href="#BDD_behavior_verification">Custom verification failure message</a> (Since 2.1.0)</h3> * <p> * Allows specifying a custom message to be printed if verification fails. * <p> * Examples: * <p> * <pre class="code"><code class="java"> * * // will print a custom message on verification failure * verify(mock, description("This will print on failure")).someMethod(); * * // will work with any verification mode * verify(mock, times(2).description("someMethod should be called twice")).someMethod(); * </code></pre> * * <h3 id="36">36. <a class="meaningful_link" href="#Java_8_Lambda_Matching">Java 8 Lambda Matcher Support</a> (Since 2.1.0)</h3> * <p> * You can use Java 8 lambda expressions with {@link ArgumentMatcher} to reduce the dependency on {@link ArgumentCaptor}. * If you need to verify that the input to a function call on a mock was correct, then you would normally * use the {@link ArgumentCaptor} to find the operands used and then do subsequent assertions on them. While * for complex examples this can be useful, it's also long-winded.<p> * Writing a lambda to express the match is quite easy. The argument to your function, when used in conjunction * with argThat, will be passed to the ArgumentMatcher as a strongly typed object, so it is possible * to do anything with it. * <p> * Examples: * <p> * <pre class="code"><code class="java"> * * // verify a list only had strings of a certain length added to it * // note - this will only compile under Java 8 * verify(list, times(2)).add(argThat(string -> string.length() < 5)); * * // Java 7 equivalent - not as neat * verify(list, times(2)).add(argThat(new ArgumentMatcher<String>(){ * public boolean matches(String arg) { * return arg.length() < 5; * } * })); * * // more complex Java 8 example - where you can specify complex verification behaviour functionally * verify(target, times(1)).receiveComplexObject(argThat(obj -> obj.getSubObject().get(0).equals("expected"))); * * // this can also be used when defining the behaviour of a mock under different inputs * // in this case if the input list was fewer than 3 items the mock returns null * when(mock.someMethod(argThat(list -> list.size()<3))).willReturn(null); * </code></pre> * * <h3 id="37">37. <a class="meaningful_link" href="#Java_8_Custom_Answers">Java 8 Custom Answer Support</a> (Since 2.1.0)</h3> * <p> * As the {@link Answer} interface has just one method it is already possible to implement it in Java 8 using * a lambda expression for very simple situations. The more you need to use the parameters of the method call, * the more you need to typecast the arguments from {@link org.mockito.invocation.InvocationOnMock}. * * <p> * Examples: * <p> * <pre class="code"><code class="java"> * // answer by returning 12 every time * doAnswer(invocation -> 12).when(mock).doSomething(); * * // answer by using one of the parameters - converting into the right * // type as your go - in this case, returning the length of the second string parameter * // as the answer. This gets long-winded quickly, with casting of parameters. * doAnswer(invocation -> ((String)invocation.getArgument(1)).length()) * .when(mock).doSomething(anyString(), anyString(), anyString()); * </code></pre> * * For convenience it is possible to write custom answers/actions, which use the parameters to the method call, * as Java 8 lambdas. Even in Java 7 and lower these custom answers based on a typed interface can reduce boilerplate. * In particular, this approach will make it easier to test functions which use callbacks. * * The functions answer and answerVoid can be found in {@link AdditionalAnswers} to create the answer object * using the interfaces in {@link org.mockito.internal.stubbing.answers.AnswerFunctionalInterfaces} support is provided * for functions with up to 5 parameters * * <p> * Examples: * <p> * <pre class="code"><code class="java"> * * // Example interface to be mocked has a function like: * void execute(String operand, Callback callback); * * // the example callback has a function and the class under test * // will depend on the callback being invoked * void receive(String item); * * // Java 8 - style 1 * doAnswer(AdditionalAnswers.<String,Callback>answerVoid((operand, callback) -> callback.receive("dummy")) * .when(mock).execute(anyString(), any(Callback.class)); * * // Java 8 - style 2 - assuming static import of AdditionalAnswers * doAnswer(answerVoid((String operand, Callback callback) -> callback.receive("dummy")) * .when(mock).execute(anyString(), any(Callback.class)); * * // Java 8 - style 3 - where mocking function to is a static member of test class * private static void dummyCallbackImpl(String operation, Callback callback) { * callback.receive("dummy"); * } * * doAnswer(answerVoid(TestClass::dummyCallbackImpl) * .when(mock).execute(anyString(), any(Callback.class)); * * // Java 7 * doAnswer(answerVoid(new AnswerFunctionalInterfaces.VoidAnswer2<String, Callback>() { * public void answer(String operation, Callback callback) { * callback.receive("dummy"); * }})).when(mock).execute(anyString(), any(Callback.class)); * * // returning a value is possible with the answer() function * // and the non-void version of the functional interfaces * // so if the mock interface had a method like * boolean isSameString(String input1, String input2); * * // this could be mocked * // Java 8 * doAnswer(AdditionalAnswers.<Boolean,String,String>answer((input1, input2) -> input1.equals(input2)))) * .when(mock).execute(anyString(), anyString()); * * // Java 7 * doAnswer(answer(new AnswerFunctionalInterfaces.Answer2<String, String, String>() { * public String answer(String input1, String input2) { * return input1 + input2; * }})).when(mock).execute(anyString(), anyString()); * </code></pre> */ @SuppressWarnings("unchecked") public class Mockito extends ArgumentMatchers { static final MockitoCore MOCKITO_CORE = new MockitoCore(); /** * The default <code>Answer</code> of every mock <b>if</b> the mock was not stubbed. * Typically it just returns some empty value. * <p> * {@link Answer} can be used to define the return values of unstubbed invocations. * <p> * This implementation first tries the global configuration. * If there is no global configuration then it uses {@link ReturnsEmptyValues} (returns zeros, empty collections, nulls, etc.) */ public static final Answer<Object> RETURNS_DEFAULTS = Answers.RETURNS_DEFAULTS; /** * Optional <code>Answer</code> to be used with {@link Mockito#mock(Class, Answer)}. * <p> * {@link Answer} can be used to define the return values of unstubbed invocations. * <p> * This implementation can be helpful when working with legacy code. * Unstubbed methods often return null. If your code uses the object returned by an unstubbed call you get a NullPointerException. * This implementation of Answer <b>returns SmartNull instead of null</b>. * <code>SmartNull</code> gives nicer exception message than NPE because it points out the line where unstubbed method was called. You just click on the stack trace. * <p> * <code>ReturnsSmartNulls</code> first tries to return ordinary return values (see {@link ReturnsMoreEmptyValues}) * then it tries to return SmartNull. If the return type is final then plain null is returned. * <p> * <code>ReturnsSmartNulls</code> will be probably the default return values strategy in Mockito 3.0.0 * <p> * Example: * <pre class="code"><code class="java"> * Foo mock = mock(Foo.class, RETURNS_SMART_NULLS); * * //calling unstubbed method here: * Stuff stuff = mock.getStuff(); * * //using object returned by unstubbed call: * stuff.doSomething(); * * //Above doesn't yield NullPointerException this time! * //Instead, SmartNullPointerException is thrown. * //Exception's cause links to unstubbed <i>mock.getStuff()</i> - just click on the stack trace. * </code></pre> */ public static final Answer<Object> RETURNS_SMART_NULLS = Answers.RETURNS_SMART_NULLS; /** * Optional <code>Answer</code> to be used with {@link Mockito#mock(Class, Answer)} * <p> * {@link Answer} can be used to define the return values of unstubbed invocations. * <p> * This implementation can be helpful when working with legacy code. * <p> * ReturnsMocks first tries to return ordinary return values (see {@link ReturnsMoreEmptyValues}) * then it tries to return mocks. If the return type cannot be mocked (e.g. is final) then plain null is returned. * <p> */ public static final Answer<Object> RETURNS_MOCKS = Answers.RETURNS_MOCKS; /** * Optional <code>Answer</code> to be used with {@link Mockito#mock(Class, Answer)}. * <p> * Example that shows how deep stub works: * <pre class="code"><code class="java"> * Foo mock = mock(Foo.class, RETURNS_DEEP_STUBS); * * // note that we're stubbing a chain of methods here: getBar().getName() * when(mock.getBar().getName()).thenReturn("deep"); * * // note that we're chaining method calls: getBar().getName() * assertEquals("deep", mock.getBar().getName()); * </code></pre> * </p> * * <p> * <strong>WARNING: </strong> * This feature should rarely be required for regular clean code! Leave it for legacy code. * Mocking a mock to return a mock, to return a mock, (...), to return something meaningful * hints at violation of Law of Demeter or mocking a value object (a well known anti-pattern). * </p> * * <p> * Good quote I've seen one day on the web: <strong>every time a mock returns a mock a fairy dies</strong>. * </p> * * <p> * Please note that this answer will return existing mocks that matches the stub. This * behavior is ok with deep stubs and allows verification to work on the last mock of the chain. * <pre class="code"><code class="java"> * when(mock.getBar(anyString()).getThingy().getName()).thenReturn("deep"); * * mock.getBar("candy bar").getThingy().getName(); * * assertSame(mock.getBar(anyString()).getThingy().getName(), mock.getBar(anyString()).getThingy().getName()); * verify(mock.getBar("candy bar").getThingy()).getName(); * verify(mock.getBar(anyString()).getThingy()).getName(); * </code></pre> * </p> * * <p> * Verification only works with the last mock in the chain. You can use verification modes. * <pre class="code"><code class="java"> * when(person.getAddress(anyString()).getStreet().getName()).thenReturn("deep"); * when(person.getAddress(anyString()).getStreet(Locale.ITALIAN).getName()).thenReturn("deep"); * when(person.getAddress(anyString()).getStreet(Locale.CHINESE).getName()).thenReturn("deep"); * * person.getAddress("the docks").getStreet().getName(); * person.getAddress("the docks").getStreet().getLongName(); * person.getAddress("the docks").getStreet(Locale.ITALIAN).getName(); * person.getAddress("the docks").getStreet(Locale.CHINESE).getName(); * * // note that we are actually referring to the very last mock in the stubbing chain. * InOrder inOrder = inOrder( * person.getAddress("the docks").getStreet(), * person.getAddress("the docks").getStreet(Locale.CHINESE), * person.getAddress("the docks").getStreet(Locale.ITALIAN) * ); * inOrder.verify(person.getAddress("the docks").getStreet(), times(1)).getName(); * inOrder.verify(person.getAddress("the docks").getStreet()).getLongName(); * inOrder.verify(person.getAddress("the docks").getStreet(Locale.ITALIAN), atLeast(1)).getName(); * inOrder.verify(person.getAddress("the docks").getStreet(Locale.CHINESE)).getName(); * </code></pre> * </p> * * <p> * How deep stub work internally? * <pre class="code"><code class="java"> * //this: * Foo mock = mock(Foo.class, RETURNS_DEEP_STUBS); * when(mock.getBar().getName(), "deep"); * * //is equivalent of * Foo foo = mock(Foo.class); * Bar bar = mock(Bar.class); * when(foo.getBar()).thenReturn(bar); * when(bar.getName()).thenReturn("deep"); * </code></pre> * </p> * * <p> * This feature will not work when any return type of methods included in the chain cannot be mocked * (for example: is a primitive or a final class). This is because of java type system. * </p> */ public static final Answer<Object> RETURNS_DEEP_STUBS = Answers.RETURNS_DEEP_STUBS; /** * Optional <code>Answer</code> to be used with {@link Mockito#mock(Class, Answer)} * <p> * {@link Answer} can be used to define the return values of unstubbed invocations. * <p> * This implementation can be helpful when working with legacy code. * When this implementation is used, unstubbed methods will delegate to the real implementation. * This is a way to create a partial mock object that calls real methods by default. * <p> * As usual you are going to read <b>the partial mock warning</b>: * Object oriented programming is more less tackling complexity by dividing the complexity into separate, specific, SRPy objects. * How does partial mock fit into this paradigm? Well, it just doesn't... * Partial mock usually means that the complexity has been moved to a different method on the same object. * In most cases, this is not the way you want to design your application. * <p> * However, there are rare cases when partial mocks come handy: * dealing with code you cannot change easily (3rd party interfaces, interim refactoring of legacy code etc.) * However, I wouldn't use partial mocks for new, test-driven & well-designed code. * <p> * Example: * <pre class="code"><code class="java"> * Foo mock = mock(Foo.class, CALLS_REAL_METHODS); * * // this calls the real implementation of Foo.getSomething() * value = mock.getSomething(); * * when(mock.getSomething()).thenReturn(fakeValue); * * // now fakeValue is returned * value = mock.getSomething(); * </code></pre> */ public static final Answer<Object> CALLS_REAL_METHODS = Answers.CALLS_REAL_METHODS; /** * Optional <code>Answer</code> to be used with {@link Mockito#mock(Class, Answer)}. * * Allows Builder mocks to return itself whenever a method is invoked that returns a Type equal * to the class or a superclass. * * <p><b>Keep in mind this answer uses the return type of a method. * If this type is assignable to the class of the mock, it will return the mock. * Therefore if you have a method returning a superclass (for example {@code Object}) it will match and return the mock.</b></p> * * Consider a HttpBuilder used in a HttpRequesterWithHeaders. * * <pre class="code"><code class="java"> * public class HttpRequesterWithHeaders { * * private HttpBuilder builder; * * public HttpRequesterWithHeaders(HttpBuilder builder) { * this.builder = builder; * } * * public String request(String uri) { * return builder.withUrl(uri) * .withHeader("Content-type: application/json") * .withHeader("Authorization: Bearer") * .request(); * } * } * * private static class HttpBuilder { * * private String uri; * private List&lt;String&gt; headers; * * public HttpBuilder() { * this.headers = new ArrayList&lt;String&gt;(); * } * * public HttpBuilder withUrl(String uri) { * this.uri = uri; * return this; * } * * public HttpBuilder withHeader(String header) { * this.headers.add(header); * return this; * } * * public String request() { * return uri + headers.toString(); * } * } * </code></pre> * * The following test will succeed * * <pre><code> * &#064;Test * public void use_full_builder_with_terminating_method() { * HttpBuilder builder = mock(HttpBuilder.class, RETURNS_SELF); * HttpRequesterWithHeaders requester = new HttpRequesterWithHeaders(builder); * String response = "StatusCode: 200"; * * when(builder.request()).thenReturn(response); * * assertThat(requester.request("URI")).isEqualTo(response); * } * </code></pre> */ public static final Answer<Object> RETURNS_SELF = Answers.RETURNS_SELF; /** * Creates mock object of given class or interface. * <p> * See examples in javadoc for {@link Mockito} class * * @param classToMock class or interface to mock * @return mock object */ public static <T> T mock(Class<T> classToMock) { return mock(classToMock, withSettings().defaultAnswer(RETURNS_DEFAULTS)); } /** * Specifies mock name. Naming mocks can be helpful for debugging - the name is used in all verification errors. * <p> * Beware that naming mocks is not a solution for complex code which uses too many mocks or collaborators. * <b>If you have too many mocks then refactor the code</b> so that it's easy to test/debug without necessity of naming mocks. * <p> * <b>If you use <code>&#064;Mock</code> annotation then you've got naming mocks for free!</b> <code>&#064;Mock</code> uses field name as mock name. {@link Mock Read more.} * <p> * * See examples in javadoc for {@link Mockito} class * * @param classToMock class or interface to mock * @param name of the mock * @return mock object */ public static <T> T mock(Class<T> classToMock, String name) { return mock(classToMock, withSettings() .name(name) .defaultAnswer(RETURNS_DEFAULTS)); } /** * Returns a MockingDetails instance that enables inspecting a particular object for Mockito related information. * Can be used to find out if given object is a Mockito mock * or to find out if a given mock is a spy or mock. * <p> * In future Mockito versions MockingDetails may grow and provide other useful information about the mock, * e.g. invocations, stubbing info, etc. * * @param toInspect - object to inspect. null input is allowed. * @return A {@link org.mockito.MockingDetails} instance. * @since 1.9.5 */ public static MockingDetails mockingDetails(Object toInspect) { return MOCKITO_CORE.mockingDetails(toInspect); } /** * Creates mock with a specified strategy for its answers to interactions. * It's quite an advanced feature and typically you don't need it to write decent tests. * However it can be helpful when working with legacy systems. * <p> * It is the default answer so it will be used <b>only when you don't</b> stub the method call. * * <pre class="code"><code class="java"> * Foo mock = mock(Foo.class, RETURNS_SMART_NULLS); * Foo mockTwo = mock(Foo.class, new YourOwnAnswer()); * </code></pre> * * <p>See examples in javadoc for {@link Mockito} class</p> * * @param classToMock class or interface to mock * @param defaultAnswer default answer for unstubbed methods * * @return mock object */ public static <T> T mock(Class<T> classToMock, Answer defaultAnswer) { return mock(classToMock, withSettings().defaultAnswer(defaultAnswer)); } /** * Creates a mock with some non-standard settings. * <p> * The number of configuration points for a mock grows * so we need a fluent way to introduce new configuration without adding more and more overloaded Mockito.mock() methods. * Hence {@link MockSettings}. * <pre class="code"><code class="java"> * Listener mock = mock(Listener.class, withSettings() * .name("firstListner").defaultBehavior(RETURNS_SMART_NULLS)); * ); * </code></pre> * <b>Use it carefully and occasionally</b>. What might be reason your test needs non-standard mocks? * Is the code under test so complicated that it requires non-standard mocks? * Wouldn't you prefer to refactor the code under test so it is testable in a simple way? * <p> * See also {@link Mockito#withSettings()} * <p> * See examples in javadoc for {@link Mockito} class * * @param classToMock class or interface to mock * @param mockSettings additional mock settings * @return mock object */ public static <T> T mock(Class<T> classToMock, MockSettings mockSettings) { return MOCKITO_CORE.mock(classToMock, mockSettings); } /** * Creates a spy of the real object. The spy calls <b>real</b> methods unless they are stubbed. * <p> * Real spies should be used <b>carefully and occasionally</b>, for example when dealing with legacy code. * <p> * As usual you are going to read <b>the partial mock warning</b>: * Object oriented programming tackles complexity by dividing the complexity into separate, specific, SRPy objects. * How does partial mock fit into this paradigm? Well, it just doesn't... * Partial mock usually means that the complexity has been moved to a different method on the same object. * In most cases, this is not the way you want to design your application. * <p> * However, there are rare cases when partial mocks come handy: * dealing with code you cannot change easily (3rd party interfaces, interim refactoring of legacy code etc.) * However, I wouldn't use partial mocks for new, test-driven & well-designed code. * <p> * Example: * * <pre class="code"><code class="java"> * List list = new LinkedList(); * List spy = spy(list); * * //optionally, you can stub out some methods: * when(spy.size()).thenReturn(100); * * //using the spy calls <b>real</b> methods * spy.add("one"); * spy.add("two"); * * //prints "one" - the first element of a list * System.out.println(spy.get(0)); * * //size() method was stubbed - 100 is printed * System.out.println(spy.size()); * * //optionally, you can verify * verify(spy).add("one"); * verify(spy).add("two"); * </code></pre> * * <h4>Important gotcha on spying real objects!</h4> * <ol> * <li>Sometimes it's impossible or impractical to use {@link Mockito#when(Object)} for stubbing spies. * Therefore for spies it is recommended to always use <code>doReturn</code>|<code>Answer</code>|<code>Throw()</code>|<code>CallRealMethod</code> * family of methods for stubbing. Example: * * <pre class="code"><code class="java"> * List list = new LinkedList(); * List spy = spy(list); * * //Impossible: real method is called so spy.get(0) throws IndexOutOfBoundsException (the list is yet empty) * when(spy.get(0)).thenReturn("foo"); * * //You have to use doReturn() for stubbing * doReturn("foo").when(spy).get(0); * </code></pre> * </li> * * <li>Mockito <b>*does not*</b> delegate calls to the passed real instance, instead it actually creates a copy of it. * So if you keep the real instance and interact with it, don't expect the spied to be aware of those interaction * and their effect on real instance state. * The corollary is that when an <b>*unstubbed*</b> method is called <b>*on the spy*</b> but <b>*not on the real instance*</b>, * you won't see any effects on the real instance.</li> * * <li>Watch out for final methods. * Mockito doesn't mock final methods so the bottom line is: when you spy on real objects + you try to stub a final method = trouble. * Also you won't be able to verify those method as well. * </li> * </ol> * <p> * See examples in javadoc for {@link Mockito} class * * <p>Note that the spy won't have any annotations of the spied type, because CGLIB won't rewrite them. * It may troublesome for code that rely on the spy to have these annotations.</p> * * * @param object * to spy on * @return a spy of the real object */ public static <T> T spy(T object) { return MOCKITO_CORE.mock((Class<T>) object.getClass(), withSettings() .spiedInstance(object) .defaultAnswer(CALLS_REAL_METHODS)); } /** * Please refer to the documentation of {@link #spy(Object)}. * Overusing spies hints at code design smells. * <p> * This method, in contrast to the original {@link #spy(Object)}, creates a spy based on class instead of an object. * Sometimes it is more convenient to create spy based on the class and avoid providing an instance of a spied object. * This is particularly useful for spying on abstract classes because they cannot be instantiated. * See also {@link MockSettings#useConstructor()}. * <p> * Examples: * <pre class="code"><code class="java"> * SomeAbstract spy = spy(SomeAbstract.class); * * //Robust API, via settings builder: * OtherAbstract spy = mock(OtherAbstract.class, withSettings() * .useConstructor().defaultAnswer(CALLS_REAL_METHODS)); * * //Mocking a non-static inner abstract class: * InnerAbstract spy = mock(InnerAbstract.class, withSettings() * .useConstructor().outerInstance(outerInstance).defaultAnswer(CALLS_REAL_METHODS)); * </code></pre> * * @param classToSpy the class to spy * @param <T> type of the spy * @return a spy of the provided class * @since 1.10.12 */ @Incubating public static <T> T spy(Class<T> classToSpy) { return MOCKITO_CORE.mock(classToSpy, withSettings() .useConstructor() .defaultAnswer(CALLS_REAL_METHODS)); } /** * Enables stubbing methods. Use it when you want the mock to return particular value when particular method is called. * <p> * Simply put: "<b>When</b> the x method is called <b>then</b> return y". * * <p> * Examples: * * <pre class="code"><code class="java"> * <b>when</b>(mock.someMethod()).<b>thenReturn</b>(10); * * //you can use flexible argument matchers, e.g: * when(mock.someMethod(<b>anyString()</b>)).thenReturn(10); * * //setting exception to be thrown: * when(mock.someMethod("some arg")).thenThrow(new RuntimeException()); * * //you can set different behavior for consecutive method calls. * //Last stubbing (e.g: thenReturn("foo")) determines the behavior of further consecutive calls. * when(mock.someMethod("some arg")) * .thenThrow(new RuntimeException()) * .thenReturn("foo"); * * //Alternative, shorter version for consecutive stubbing: * when(mock.someMethod("some arg")) * .thenReturn("one", "two"); * //is the same as: * when(mock.someMethod("some arg")) * .thenReturn("one") * .thenReturn("two"); * * //shorter version for consecutive method calls throwing exceptions: * when(mock.someMethod("some arg")) * .thenThrow(new RuntimeException(), new NullPointerException(); * * </code></pre> * * For stubbing void methods with throwables see: {@link Mockito#doThrow(Throwable...)} * <p> * Stubbing can be overridden: for example common stubbing can go to fixture * setup but the test methods can override it. * Please note that overridding stubbing is a potential code smell that points out too much stubbing. * <p> * Once stubbed, the method will always return stubbed value regardless * of how many times it is called. * <p> * Last stubbing is more important - when you stubbed the same method with * the same arguments many times. * <p> * Although it is possible to verify a stubbed invocation, usually <b>it's just redundant</b>. * Let's say you've stubbed <code>foo.bar()</code>. * If your code cares what <code>foo.bar()</code> returns then something else breaks(often before even <code>verify()</code> gets executed). * If your code doesn't care what <code>get(0)</code> returns then it should not be stubbed. * Not convinced? See <a href="http://monkeyisland.pl/2008/04/26/asking-and-telling">here</a>. * * <p> * See examples in javadoc for {@link Mockito} class * @param methodCall method to be stubbed * @return OngoingStubbing object used to stub fluently. * <strong>Do not</strong> create a reference to this returned object. */ public static <T> OngoingStubbing<T> when(T methodCall) { return MOCKITO_CORE.when(methodCall); } /** * Verifies certain behavior <b>happened once</b>. * <p> * Alias to <code>verify(mock, times(1))</code> E.g: * <pre class="code"><code class="java"> * verify(mock).someMethod("some arg"); * </code></pre> * Above is equivalent to: * <pre class="code"><code class="java"> * verify(mock, times(1)).someMethod("some arg"); * </code></pre> * <p> * Arguments passed are compared using <code>equals()</code> method. * Read about {@link ArgumentCaptor} or {@link ArgumentMatcher} to find out other ways of matching / asserting arguments passed. * <p> * Although it is possible to verify a stubbed invocation, usually <b>it's just redundant</b>. * Let's say you've stubbed <code>foo.bar()</code>. * If your code cares what <code>foo.bar()</code> returns then something else breaks(often before even <code>verify()</code> gets executed). * If your code doesn't care what <code>get(0)</code> returns then it should not be stubbed. * Not convinced? See <a href="http://monkeyisland.pl/2008/04/26/asking-and-telling">here</a>. * * <p> * See examples in javadoc for {@link Mockito} class * * @param mock to be verified * @return mock object itself */ public static <T> T verify(T mock) { return MOCKITO_CORE.verify(mock, times(1)); } /** * Verifies certain behavior happened at least once / exact number of times / never. E.g: * <pre class="code"><code class="java"> * verify(mock, times(5)).someMethod("was called five times"); * * verify(mock, atLeast(2)).someMethod("was called at least two times"); * * //you can use flexible argument matchers, e.g: * verify(mock, atLeastOnce()).someMethod(<b>anyString()</b>); * </code></pre> * * <b>times(1) is the default</b> and can be omitted * <p> * Arguments passed are compared using <code>equals()</code> method. * Read about {@link ArgumentCaptor} or {@link ArgumentMatcher} to find out other ways of matching / asserting arguments passed. * <p> * * @param mock to be verified * @param mode times(x), atLeastOnce() or never() * * @return mock object itself */ public static <T> T verify(T mock, VerificationMode mode) { return MOCKITO_CORE.verify(mock, mode); } /** * Smart Mockito users hardly use this feature because they know it could be a sign of poor tests. * Normally, you don't need to reset your mocks, just create new mocks for each test method. * <p> * Instead of <code>#reset()</code> please consider writing simple, small and focused test methods over lengthy, over-specified tests. * <b>First potential code smell is <code>reset()</code> in the middle of the test method.</b> This probably means you're testing too much. * Follow the whisper of your test methods: "Please keep us small & focused on single behavior". * There are several threads about it on mockito mailing list. * <p> * The only reason we added <code>reset()</code> method is to * make it possible to work with container-injected mocks. * For more information see the FAQ (<a href="https://github.com/mockito/mockito/wiki/FAQ">here</a>). * <p> * <b>Don't harm yourself.</b> <code>reset()</code> in the middle of the test method is a code smell (you're probably testing too much). * <pre class="code"><code class="java"> * List mock = mock(List.class); * when(mock.size()).thenReturn(10); * mock.add(1); * * reset(mock); * //at this point the mock forgot any interactions & stubbing * </code></pre> * * @param <T> The Type of the mocks * @param mocks to be reset */ public static <T> void reset(T ... mocks) { MOCKITO_CORE.reset(mocks); } /** * Use this method in order to only clear invocations, when stubbing is non-trivial. Use-cases can be: * <ul> * <li>You are using a dependency injection framework to inject your mocks.</li> * <li>The mock is used in a stateful scenario. For example a class is Singleton which depends on your mock.</li> * </ul> * * <b>Try to avoid this method at all costs. Only clear invocations if you are unable to efficiently test your program.</b> * @param <T> The type of the mocks * @param mocks The mocks to clear the invocations for */ public static <T> void clearInvocations(T ... mocks) { MOCKITO_CORE.clearInvocations(mocks); } /** * Checks if any of given mocks has any unverified interaction. * <p> * You can use this method after you verified your mocks - to make sure that nothing * else was invoked on your mocks. * <p> * See also {@link Mockito#never()} - it is more explicit and communicates the intent well. * <p> * Stubbed invocations (if called) are also treated as interactions. * <p> * A word of <b>warning</b>: * Some users who did a lot of classic, expect-run-verify mocking tend to use <code>verifyNoMoreInteractions()</code> very often, even in every test method. * <code>verifyNoMoreInteractions()</code> is not recommended to use in every test method. * <code>verifyNoMoreInteractions()</code> is a handy assertion from the interaction testing toolkit. Use it only when it's relevant. * Abusing it leads to overspecified, less maintainable tests. You can find further reading * <a href="http://monkeyisland.pl/2008/07/12/should-i-worry-about-the-unexpected/">here</a>. * <p> * This method will also detect unverified invocations that occurred before the test method, * for example: in <code>setUp()</code>, <code>&#064;Before</code> method or in constructor. * Consider writing nice code that makes interactions only in test methods. * * <p> * Example: * * <pre class="code"><code class="java"> * //interactions * mock.doSomething(); * mock.doSomethingUnexpected(); * * //verification * verify(mock).doSomething(); * * //following will fail because 'doSomethingUnexpected()' is unexpected * verifyNoMoreInteractions(mock); * * </code></pre> * * See examples in javadoc for {@link Mockito} class * * @param mocks to be verified */ public static void verifyNoMoreInteractions(Object... mocks) { MOCKITO_CORE.verifyNoMoreInteractions(mocks); } /** * Verifies that no interactions happened on given mocks. * <pre class="code"><code class="java"> * verifyZeroInteractions(mockOne, mockTwo); * </code></pre> * This method will also detect invocations * that occurred before the test method, for example: in <code>setUp()</code>, <code>&#064;Before</code> method or in constructor. * Consider writing nice code that makes interactions only in test methods. * <p> * See also {@link Mockito#never()} - it is more explicit and communicates the intent well. * <p> * See examples in javadoc for {@link Mockito} class * * @param mocks to be verified */ public static void verifyZeroInteractions(Object... mocks) { MOCKITO_CORE.verifyNoMoreInteractions(mocks); } /** * Use <code>doThrow()</code> when you want to stub the void method with an exception. * <p> * Stubbing voids requires different approach from {@link Mockito#when(Object)} because the compiler * does not like void methods inside brackets... * <p> * Example: * * <pre class="code"><code class="java"> * doThrow(new RuntimeException()).when(mock).someVoidMethod(); * </code></pre> * * @param toBeThrown to be thrown when the stubbed method is called * @return stubber - to select a method for stubbing */ public static Stubber doThrow(Throwable... toBeThrown) { return MOCKITO_CORE.stubber().doThrow(toBeThrown); } /** * Use <code>doThrow()</code> when you want to stub the void method with an exception. * <p> * A new exception instance will be created for each method invocation. * <p> * Stubbing voids requires different approach from {@link Mockito#when(Object)} because the compiler * does not like void methods inside brackets... * <p> * Example: * * <pre class="code"><code class="java"> * doThrow(RuntimeException.class).when(mock).someVoidMethod(); * </code></pre> * * @param toBeThrown to be thrown when the stubbed method is called * @return stubber - to select a method for stubbing * @since 2.1.0 */ public static Stubber doThrow(Class<? extends Throwable> toBeThrown) { return MOCKITO_CORE.stubber().doThrow(toBeThrown); } /** * Same as {@link #doThrow(Class)} but sets consecutive exception classes to be thrown. Remember to use * <code>doThrow()</code> when you want to stub the void method to throw several exception of specified class. * <p> * A new exception instance will be created for each method invocation. * <p> * Stubbing voids requires different approach from {@link Mockito#when(Object)} because the compiler * does not like void methods inside brackets... * <p> * Example: * * <pre class="code"><code class="java"> * doThrow(RuntimeException.class, BigFailure.class).when(mock).someVoidMethod(); * </code></pre> * * @param toBeThrown to be thrown when the stubbed method is called * @param toBeThrownNext next to be thrown when the stubbed method is called * @return stubber - to select a method for stubbing * @since 2.1.0 */ // Additional method helps users of JDK7+ to hide heap pollution / unchecked generics array creation @SuppressWarnings ({"unchecked", "varargs"}) public static Stubber doThrow(Class<? extends Throwable> toBeThrown, Class<? extends Throwable>... toBeThrownNext) { return MOCKITO_CORE.stubber().doThrow(toBeThrown, toBeThrownNext); } /** * Use <code>doCallRealMethod()</code> when you want to call the real implementation of a method. * <p> * As usual you are going to read <b>the partial mock warning</b>: * Object oriented programming is more less tackling complexity by dividing the complexity into separate, specific, SRPy objects. * How does partial mock fit into this paradigm? Well, it just doesn't... * Partial mock usually means that the complexity has been moved to a different method on the same object. * In most cases, this is not the way you want to design your application. * <p> * However, there are rare cases when partial mocks come handy: * dealing with code you cannot change easily (3rd party interfaces, interim refactoring of legacy code etc.) * However, I wouldn't use partial mocks for new, test-driven & well-designed code. * <p> * See also javadoc {@link Mockito#spy(Object)} to find out more about partial mocks. * <b>Mockito.spy() is a recommended way of creating partial mocks.</b> * The reason is it guarantees real methods are called against correctly constructed object because you're responsible for constructing the object passed to spy() method. * <p> * Example: * <pre class="code"><code class="java"> * Foo mock = mock(Foo.class); * doCallRealMethod().when(mock).someVoidMethod(); * * // this will call the real implementation of Foo.someVoidMethod() * mock.someVoidMethod(); * </code></pre> * <p> * See examples in javadoc for {@link Mockito} class * * @return stubber - to select a method for stubbing * @since 1.9.5 */ public static Stubber doCallRealMethod() { return MOCKITO_CORE.stubber().doCallRealMethod(); } /** * Use <code>doAnswer()</code> when you want to stub a void method with generic {@link Answer}. * <p> * Stubbing voids requires different approach from {@link Mockito#when(Object)} because the compiler does not like void methods inside brackets... * <p> * Example: * * <pre class="code"><code class="java"> * doAnswer(new Answer() { * public Object answer(InvocationOnMock invocation) { * Object[] args = invocation.getArguments(); * Mock mock = invocation.getMock(); * return null; * }}) * .when(mock).someMethod(); * </code></pre> * <p> * See examples in javadoc for {@link Mockito} class * * @param answer to answer when the stubbed method is called * @return stubber - to select a method for stubbing */ public static Stubber doAnswer(Answer answer) { return MOCKITO_CORE.stubber().doAnswer(answer); } /** * Use <code>doNothing()</code> for setting void methods to do nothing. <b>Beware that void methods on mocks do nothing by default!</b> * However, there are rare situations when doNothing() comes handy: * <p> * <ol> * <li>Stubbing consecutive calls on a void method: * <pre class="code"><code class="java"> * doNothing(). * doThrow(new RuntimeException()) * .when(mock).someVoidMethod(); * * //does nothing the first time: * mock.someVoidMethod(); * * //throws RuntimeException the next time: * mock.someVoidMethod(); * </code></pre> * </li> * <li>When you spy real objects and you want the void method to do nothing: * <pre class="code"><code class="java"> * List list = new LinkedList(); * List spy = spy(list); * * //let's make clear() do nothing * doNothing().when(spy).clear(); * * spy.add("one"); * * //clear() does nothing, so the list still contains "one" * spy.clear(); * </code></pre> * </li> * </ol> * <p> * See examples in javadoc for {@link Mockito} class * * @return stubber - to select a method for stubbing */ public static Stubber doNothing() { return MOCKITO_CORE.stubber().doNothing(); } /** * Use <code>doReturn()</code> in those rare occasions when you cannot use {@link Mockito#when(Object)}. * <p> * <b>Beware that {@link Mockito#when(Object)} is always recommended for stubbing because it is argument type-safe * and more readable</b> (especially when stubbing consecutive calls). * <p> * Here are those rare occasions when doReturn() comes handy: * <p> * * <ol> * <li>When spying real objects and calling real methods on a spy brings side effects * * <pre class="code"><code class="java"> * List list = new LinkedList(); * List spy = spy(list); * * //Impossible: real method is called so spy.get(0) throws IndexOutOfBoundsException (the list is yet empty) * when(spy.get(0)).thenReturn("foo"); * * //You have to use doReturn() for stubbing: * doReturn("foo").when(spy).get(0); * </code></pre> * </li> * * <li>Overriding a previous exception-stubbing: * <pre class="code"><code class="java"> * when(mock.foo()).thenThrow(new RuntimeException()); * * //Impossible: the exception-stubbed foo() method is called so RuntimeException is thrown. * when(mock.foo()).thenReturn("bar"); * * //You have to use doReturn() for stubbing: * doReturn("bar").when(mock).foo(); * </code></pre> * </li> * </ol> * * Above scenarios shows a tradeoff of Mockito's elegant syntax. Note that the scenarios are very rare, though. * Spying should be sporadic and overriding exception-stubbing is very rare. Not to mention that in general * overridding stubbing is a potential code smell that points out too much stubbing. * <p> * See examples in javadoc for {@link Mockito} class * * @param toBeReturned to be returned when the stubbed method is called * @return stubber - to select a method for stubbing */ public static Stubber doReturn(Object toBeReturned) { return MOCKITO_CORE.stubber().doReturn(toBeReturned); } /** * Same as {@link #doReturn(Object)} but sets consecutive values to be returned. Remember to use * <code>doReturn()</code> in those rare occasions when you cannot use {@link Mockito#when(Object)}. * <p> * <b>Beware that {@link Mockito#when(Object)} is always recommended for stubbing because it is argument type-safe * and more readable</b> (especially when stubbing consecutive calls). * <p> * Here are those rare occasions when doReturn() comes handy: * <p> * * <ol> * <li>When spying real objects and calling real methods on a spy brings side effects * * <pre class="code"><code class="java"> * List list = new LinkedList(); * List spy = spy(list); * * //Impossible: real method is called so spy.get(0) throws IndexOutOfBoundsException (the list is yet empty) * when(spy.get(0)).thenReturn("foo", "bar", "qix"); * * //You have to use doReturn() for stubbing: * doReturn("foo", "bar", "qix").when(spy).get(0); * </code></pre> * </li> * * <li>Overriding a previous exception-stubbing: * <pre class="code"><code class="java"> * when(mock.foo()).thenThrow(new RuntimeException()); * * //Impossible: the exception-stubbed foo() method is called so RuntimeException is thrown. * when(mock.foo()).thenReturn("bar", "foo", "qix"); * * //You have to use doReturn() for stubbing: * doReturn("bar", "foo", "qix").when(mock).foo(); * </code></pre> * </li> * </ol> * * Above scenarios shows a trade-off of Mockito's elegant syntax. Note that the scenarios are very rare, though. * Spying should be sporadic and overriding exception-stubbing is very rare. Not to mention that in general * overridding stubbing is a potential code smell that points out too much stubbing. * <p> * See examples in javadoc for {@link Mockito} class * * @param toBeReturned to be returned when the stubbed method is called * @param toBeReturnedNext to be returned in consecutive calls when the stubbed method is called * @return stubber - to select a method for stubbing * @since 2.1.0 */ @SuppressWarnings({"unchecked", "varargs"}) public static Stubber doReturn(Object toBeReturned, Object... toBeReturnedNext) { return MOCKITO_CORE.stubber().doReturn(toBeReturned, toBeReturnedNext); } /** * Creates {@link org.mockito.InOrder} object that allows verifying mocks in order. * * <pre class="code"><code class="java"> * InOrder inOrder = inOrder(firstMock, secondMock); * * inOrder.verify(firstMock).add("was called first"); * inOrder.verify(secondMock).add("was called second"); * </code></pre> * * Verification in order is flexible - <b>you don't have to verify all interactions</b> one-by-one * but only those that you are interested in testing in order. * <p> * Also, you can create InOrder object passing only mocks that are relevant for in-order verification. * <p> * <code>InOrder</code> verification is 'greedy', but you will hardly ever notice it. * If you want to find out more, read * <a href="https://github.com/mockito/mockito/wiki/Greedy-algorithm-of-verfication-InOrder">this wiki page</a>. * <p> * As of Mockito 1.8.4 you can verifyNoMoreInvocations() in order-sensitive way. Read more: {@link InOrder#verifyNoMoreInteractions()} * <p> * See examples in javadoc for {@link Mockito} class * * @param mocks to be verified in order * * @return InOrder object to be used to verify in order */ public static InOrder inOrder(Object... mocks) { return MOCKITO_CORE.inOrder(mocks); } /** * Ignores stubbed methods of given mocks for the sake of verification. * Sometimes useful when coupled with <code>verifyNoMoreInteractions()</code> or verification <code>inOrder()</code>. * Helps avoiding redundant verification of stubbed calls - typically we're not interested in verifying stubs. * <p> * <b>Warning</b>, <code>ignoreStubs()</code> might lead to overuse of <code>verifyNoMoreInteractions(ignoreStubs(...));</code> * Bear in mind that Mockito does not recommend bombarding every test with <code>verifyNoMoreInteractions()</code> * for the reasons outlined in javadoc for {@link Mockito#verifyNoMoreInteractions(Object...)} * Other words: all <b>*stubbed*</b> methods of given mocks are marked <b>*verified*</b> so that they don't get in a way during verifyNoMoreInteractions(). * <p> * This method <b>changes the input mocks</b>! This method returns input mocks just for convenience. * <p> * Ignored stubs will also be ignored for verification inOrder, including {@link org.mockito.InOrder#verifyNoMoreInteractions()}. * See the second example. * <p> * Example: * <pre class="code"><code class="java"> * //mocking lists for the sake of the example (if you mock List in real you will burn in hell) * List mock1 = mock(List.class), mock2 = mock(List.class); * * //stubbing mocks: * when(mock1.get(0)).thenReturn(10); * when(mock2.get(0)).thenReturn(20); * * //using mocks by calling stubbed get(0) methods: * System.out.println(mock1.get(0)); //prints 10 * System.out.println(mock2.get(0)); //prints 20 * * //using mocks by calling clear() methods: * mock1.clear(); * mock2.clear(); * * //verification: * verify(mock1).clear(); * verify(mock2).clear(); * * //verifyNoMoreInteractions() fails because get() methods were not accounted for. * try { verifyNoMoreInteractions(mock1, mock2); } catch (NoInteractionsWanted e); * * //However, if we ignore stubbed methods then we can verifyNoMoreInteractions() * verifyNoMoreInteractions(ignoreStubs(mock1, mock2)); * * //Remember that ignoreStubs() <b>*changes*</b> the input mocks and returns them for convenience. * </code></pre> * Ignoring stubs can be used with <b>verification in order</b>: * <pre class="code"><code class="java"> * List list = mock(List.class); * when(mock.get(0)).thenReturn("foo"); * * list.add(0); * System.out.println(list.get(0)); //we don't want to verify this * list.clear(); * * InOrder inOrder = inOrder(ignoreStubs(list)); * inOrder.verify(list).add(0); * inOrder.verify(list).clear(); * inOrder.verifyNoMoreInteractions(); * </code></pre> * * @since 1.9.0 * @param mocks input mocks that will be changed * @return the same mocks that were passed in as parameters */ public static Object[] ignoreStubs(Object... mocks) { return MOCKITO_CORE.ignoreStubs(mocks); } /** * Allows verifying exact number of invocations. E.g: * <pre class="code"><code class="java"> * verify(mock, times(2)).someMethod("some arg"); * </code></pre> * * See examples in javadoc for {@link Mockito} class * * @param wantedNumberOfInvocations wanted number of invocations * * @return verification mode */ public static VerificationMode times(int wantedNumberOfInvocations) { return VerificationModeFactory.times(wantedNumberOfInvocations); } /** * Alias to <code>times(0)</code>, see {@link Mockito#times(int)} * <p> * Verifies that interaction did not happen. E.g: * <pre class="code"><code class="java"> * verify(mock, never()).someMethod(); * </code></pre> * * <p> * If you want to verify there were NO interactions with the mock * check out {@link Mockito#verifyZeroInteractions(Object...)} * or {@link Mockito#verifyNoMoreInteractions(Object...)} * <p> * See examples in javadoc for {@link Mockito} class * * @return verification mode */ public static VerificationMode never() { return times(0); } /** * Allows at-least-once verification. E.g: * <pre class="code"><code class="java"> * verify(mock, atLeastOnce()).someMethod("some arg"); * </code></pre> * Alias to <code>atLeast(1)</code>. * <p> * See examples in javadoc for {@link Mockito} class * * @return verification mode */ public static VerificationMode atLeastOnce() { return VerificationModeFactory.atLeastOnce(); } /** * Allows at-least-x verification. E.g: * <pre class="code"><code class="java"> * verify(mock, atLeast(3)).someMethod("some arg"); * </code></pre> * * See examples in javadoc for {@link Mockito} class * * @param minNumberOfInvocations minimum number of invocations * * @return verification mode */ public static VerificationMode atLeast(int minNumberOfInvocations) { return VerificationModeFactory.atLeast(minNumberOfInvocations); } /** * Allows at-most-x verification. E.g: * <pre class="code"><code class="java"> * verify(mock, atMost(3)).someMethod("some arg"); * </code></pre> * * See examples in javadoc for {@link Mockito} class * * @param maxNumberOfInvocations max number of invocations * * @return verification mode */ public static VerificationMode atMost(int maxNumberOfInvocations) { return VerificationModeFactory.atMost(maxNumberOfInvocations); } /** * Allows non-greedy verification in order. For example * <pre class="code"><code class="java"> * inOrder.verify( mock, calls( 2 )).someMethod( "some arg" ); * </code></pre> * <ul> * <li>will not fail if the method is called 3 times, unlike times( 2 )</li> * <li>will not mark the third invocation as verified, unlike atLeast( 2 )</li> * </ul> * This verification mode can only be used with in order verification. * @param wantedNumberOfInvocations number of invocations to verify * @return verification mode */ public static VerificationMode calls( int wantedNumberOfInvocations ){ return VerificationModeFactory.calls( wantedNumberOfInvocations ); } /** * Allows checking if given method was the only one invoked. E.g: * <pre class="code"><code class="java"> * verify(mock, only()).someMethod(); * //above is a shorthand for following 2 lines of code: * verify(mock).someMethod(); * verifyNoMoreInvocations(mock); * </code></pre> * * <p> * See also {@link Mockito#verifyNoMoreInteractions(Object...)} * <p> * See examples in javadoc for {@link Mockito} class * * @return verification mode */ public static VerificationMode only() { return VerificationModeFactory.only(); } /** * Allows verifying with timeout. It causes a verify to wait for a specified period of time for a desired * interaction rather than fails immediately if has not already happened. May be useful for testing in concurrent * conditions. * <p> * This differs from {@link Mockito#after after()} in that after() will wait the full period, unless * the final test result is known early (e.g. if a never() fails), whereas timeout() will stop early as soon * as verification passes, producing different behaviour when used with times(2), for example, which can pass * and then later fail. In that case, timeout would pass as soon as times(2) passes, whereas after would run until * times(2) failed, and then fail. * <p> * This feature should be used rarely - figure out a better way of testing your multi-threaded system. * <pre class="code"><code class="java"> * //passes when someMethod() is called within given time span * verify(mock, timeout(100)).someMethod(); * //above is an alias to: * verify(mock, timeout(100).times(1)).someMethod(); * * //passes as soon as someMethod() has been called 2 times before the given timeout * verify(mock, timeout(100).times(2)).someMethod(); * * //equivalent: this also passes as soon as someMethod() has been called 2 times before the given timeout * verify(mock, timeout(100).atLeast(2)).someMethod(); * * //verifies someMethod() within given time span using given verification mode * //useful only if you have your own custom verification modes. * verify(mock, new Timeout(100, yourOwnVerificationMode)).someMethod(); * </code></pre> * * See examples in javadoc for {@link Mockito} class * * @param millis - time span in milliseconds * * @return verification mode */ public static VerificationWithTimeout timeout(long millis) { return new Timeout(millis, VerificationModeFactory.times(1)); } /** * Allows verifying over a given period. It causes a verify to wait for a specified period of time for a desired * interaction rather than failing immediately if has not already happened. May be useful for testing in concurrent * conditions. * <p> * This differs from {@link Mockito#timeout timeout()} in that after() will wait the full period, whereas timeout() * will stop early as soon as verification passes, producing different behaviour when used with times(2), for example, * which can pass and then later fail. In that case, timeout would pass as soon as times(2) passes, whereas after would * run the full time, which point it will fail, as times(2) has failed. * <p> * This feature should be used rarely - figure out a better way of testing your multi-threaded system. * <p> * Not yet implemented to work with InOrder verification. * <pre class="code"><code class="java"> * //passes after 100ms, if someMethod() has only been called once at that time. * verify(mock, after(100)).someMethod(); * //above is an alias to: * verify(mock, after(100).times(1)).someMethod(); * * //passes if someMethod() is called <b>*exactly*</b> 2 times after the given timespan * verify(mock, after(100).times(2)).someMethod(); * * //passes if someMethod() has not been called after the given timespan * verify(mock, after(100).never()).someMethod(); * * //verifies someMethod() after a given time span using given verification mode * //useful only if you have your own custom verification modes. * verify(mock, new After(100, yourOwnVerificationMode)).someMethod(); * </code></pre> * * See examples in javadoc for {@link Mockito} class * * @param millis - time span in milliseconds * * @return verification mode */ public static VerificationAfterDelay after(long millis) { return new After(millis, VerificationModeFactory.times(1)); } /** * First of all, in case of any trouble, I encourage you to read the Mockito FAQ: <a href="https://github.com/mockito/mockito/wiki/FAQ">https://github.com/mockito/mockito/wiki/FAQ</a> * <p> * In case of questions you may also post to mockito mailing list: <a href="http://groups.google.com/group/mockito">http://groups.google.com/group/mockito</a> * <p> * <code>validateMockitoUsage()</code> <b>explicitly validates</b> the framework state to detect invalid use of Mockito. * However, this feature is optional <b>because Mockito validates the usage all the time...</b> but there is a gotcha so read on. * <p> * Examples of incorrect use: * <pre class="code"><code class="java"> * //Oops, thenReturn() part is missing: * when(mock.get()); * * //Oops, verified method call is inside verify() where it should be on the outside: * verify(mock.execute()); * * //Oops, missing method to verify: * verify(mock); * </code></pre> * * Mockito throws exceptions if you misuse it so that you know if your tests are written correctly. * The gotcha is that Mockito does the validation <b>next time</b> you use the framework (e.g. next time you verify, stub, call mock etc.). * But even though the exception might be thrown in the next test, * the exception <b>message contains a navigable stack trace element</b> with location of the defect. * Hence you can click and find the place where Mockito was misused. * <p> * Sometimes though, you might want to validate the framework usage explicitly. * For example, one of the users wanted to put <code>validateMockitoUsage()</code> in his <code>&#064;After</code> method * so that he knows immediately when he misused Mockito. * Without it, he would have known about it not sooner than <b>next time</b> he used the framework. * One more benefit of having <code>validateMockitoUsage()</code> in <code>&#064;After</code> is that jUnit runner and rule will always fail in the test method with defect * whereas ordinary 'next-time' validation might fail the <b>next</b> test method. * But even though JUnit might report next test as red, don't worry about it * and just click at navigable stack trace element in the exception message to instantly locate the place where you misused mockito. * <p> * <b>Both built-in runner: {@link MockitoJUnitRunner} and rule: {@link MockitoRule}</b> do validateMockitoUsage() after each test method. * <p> * Bear in mind that <b>usually you don't have to <code>validateMockitoUsage()</code></b> * and framework validation triggered on next-time basis should be just enough, * mainly because of enhanced exception message with clickable location of defect. * However, I would recommend validateMockitoUsage() if you already have sufficient test infrastructure * (like your own runner or base class for all tests) because adding a special action to <code>&#064;After</code> has zero cost. * <p> * See examples in javadoc for {@link Mockito} class */ public static void validateMockitoUsage() { MOCKITO_CORE.validateMockitoUsage(); } /** * Allows mock creation with additional mock settings. * <p> * Don't use it too often. * Consider writing simple tests that use simple mocks. * Repeat after me: simple tests push simple, KISSy, readable & maintainable code. * If you cannot write a test in a simple way - refactor the code under test. * <p> * Examples of mock settings: * <pre class="code"><code class="java"> * //Creates mock with different default answer & name * Foo mock = mock(Foo.class, withSettings() * .defaultAnswer(RETURNS_SMART_NULLS) * .name("cool mockie")); * * //Creates mock with different default answer, descriptive name and extra interfaces * Foo mock = mock(Foo.class, withSettings() * .defaultAnswer(RETURNS_SMART_NULLS) * .name("cool mockie") * .extraInterfaces(Bar.class)); * </code></pre> * {@link MockSettings} has been introduced for two reasons. * Firstly, to make it easy to add another mock settings when the demand comes. * Secondly, to enable combining different mock settings without introducing zillions of overloaded mock() methods. * <p> * See javadoc for {@link MockSettings} to learn about possible mock settings. * <p> * * @return mock settings instance with defaults. */ public static MockSettings withSettings() { return new MockSettingsImpl().defaultAnswer(RETURNS_DEFAULTS); } /** * Adds a description to be printed if verification fails. * <pre class="code"><code class="java"> * verify(mock, description("This will print on failure")).someMethod("some arg"); * </code></pre> * @param description The description to print on failure. * @return verification mode * @since 2.1.0 */ public static VerificationMode description(String description) { return times(1).description(description); } /** * This API will move soon to a different place. * See <a href="https://github.com/mockito/mockito/issues/577">issue 577</a>. */ @Deprecated static MockitoDebugger debug() { return new MockitoDebuggerImpl(); } /** * For advanced users or framework integrators. See {@link MockitoFramework} class. * * @since 2.1.0 */ @Incubating public static MockitoFramework framework() { return new DefaultMockitoFramework(); } }
Cleaned up Mockito javadoc: Removed 'new' tags altogether as the version number already is a better indicator for recent changes
src/main/java/org/mockito/Mockito.java
Cleaned up Mockito javadoc: Removed 'new' tags altogether as the version number already is a better indicator for recent changes
Java
mit
8b6940b77a10209f47d4171bf7428014a8334aff
0
peterLaurence/TileView
package com.qozix.tileview.tiles; import android.os.Process; import android.support.annotation.NonNull; import android.util.Log; import java.io.InterruptedIOException; import java.lang.ref.WeakReference; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.concurrent.Future; import java.util.concurrent.LinkedBlockingDeque; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; public class TileRenderPoolExecutor extends ThreadPoolExecutor { private static final int KEEP_ALIVE_TIME = 1; private static final TimeUnit KEEP_ALIVE_TIME_UNIT = TimeUnit.SECONDS; private static final int CORE_POOL_SIZE = Runtime.getRuntime().availableProcessors(); private static final int MAXIMUM_POOL_SIZE = Runtime.getRuntime().availableProcessors(); private WeakReference<TileCanvasViewGroup> mTileCanvasViewGroupWeakReference; private List<TileRenderTask> mExecutingTileRenderTasks; private List<TileRenderTask> mCancelledTileRenderTasks; public TileRenderPoolExecutor() { super( CORE_POOL_SIZE, MAXIMUM_POOL_SIZE, KEEP_ALIVE_TIME, KEEP_ALIVE_TIME_UNIT, new LinkedBlockingDeque<Runnable>() ); mExecutingTileRenderTasks = (List) Collections.synchronizedList( new LinkedList<>() ); mCancelledTileRenderTasks = (List) Collections.synchronizedList( new LinkedList<>() ); } public void cancel() { if( getQueue().size() > 0 || getActiveCount() > 0 ) { Log.d( "DEBUG", "queue 0, active count 0, dispatch render cancelled event" ); TileCanvasViewGroup tileCanvasViewGroup = mTileCanvasViewGroupWeakReference.get(); if( tileCanvasViewGroup != null ) { tileCanvasViewGroup.onRenderTaskCancelled(); Log.d( "DEBUG", "render cancelled event dispatched" ); } } //getQueue().clear(); mCancelledTileRenderTasks.addAll( mExecutingTileRenderTasks ); mExecutingTileRenderTasks.clear(); stopCancelledTasks(); } @Override public void shutdown() { cancel(); super.shutdown(); } @Override @NonNull public List<Runnable> shutdownNow() { cancel(); return super.shutdownNow(); } private void stopCancelledTasks() { for( TileRenderTask tileRenderTask : mCancelledTileRenderTasks ) { tileRenderTask.cancel(); } mCancelledTileRenderTasks.clear(); } public void queue( TileCanvasViewGroup tileCanvasViewGroup, List<Tile> renderList ) { synchronized( this ) { Log.d( "DEBUG", "TileRenderPoolExecutor.queue, before intersection: " + mExecutingTileRenderTasks.size() + ", " + mCancelledTileRenderTasks.size() ); List<TileRenderTask> completeTasks = new LinkedList<>(); for( TileRenderTask tileRenderTask : mExecutingTileRenderTasks ) { if( tileRenderTask.runnable.isDone()){ completeTasks.add( tileRenderTask ); } else { if( renderList.contains( tileRenderTask.tile ) ) { renderList.remove( tileRenderTask.tile ); } else { mCancelledTileRenderTasks.add( tileRenderTask ); } } } mExecutingTileRenderTasks.removeAll( completeTasks ); mExecutingTileRenderTasks.removeAll( mCancelledTileRenderTasks ); Log.d( "DEBUG", "TileRenderPoolExecutor.queue, after intersection: " + mExecutingTileRenderTasks.size() + ", " + mCancelledTileRenderTasks.size() ); stopCancelledTasks(); if( renderList.size() > 0 ) { mTileCanvasViewGroupWeakReference = new WeakReference<>( tileCanvasViewGroup ); tileCanvasViewGroup.onRenderTaskPreExecute(); for( Tile tile : renderList ) { if( isShutdownOrTerminating() ) { return; } TileRenderTask task = new TileRenderTask(); task.tile = tile; task.runnable = new TileRenderRunnable(); task.runnable.setTile( tile ); task.runnable.setTileCanvasViewGroup( tileCanvasViewGroup ); task.runnable.setTileRenderPoolExecutor( this ); task.future = submit( task.runnable ); mExecutingTileRenderTasks.add( task ); } } } } private TileRenderTask getTaskByTile( Tile tile ) { if( tile != null ) { for( TileRenderTask tileRenderTask : mExecutingTileRenderTasks ) { if( tile.equals( tileRenderTask.tile ) ) { return tileRenderTask; } } } return null; } public void removeTaskFromCurrentlyExecutingList( TileRenderTask tileRenderTask ) { synchronized( this ) { Log.d( "DEBUG", "should be removing task: " + mExecutingTileRenderTasks.size() ); mExecutingTileRenderTasks.remove( tileRenderTask ); Log.d( "DEBUG", "task should have been removed: " + mExecutingTileRenderTasks.size() ); } } public boolean isShutdownOrTerminating() { return isShutdown() || isTerminating() || isTerminated(); } @Override protected void afterExecute( Runnable runnable, Throwable throwable ) { synchronized( this ) { Log.d( "DEBUG", "afterExecute" ); super.afterExecute( runnable, throwable ); if( getQueue().size() == 0 && getActiveCount() == 1 ) { mExecutingTileRenderTasks.clear(); TileCanvasViewGroup tileCanvasViewGroup = mTileCanvasViewGroupWeakReference.get(); if( tileCanvasViewGroup != null ) { tileCanvasViewGroup.onRenderTaskPostExecute(); Log.d( "DEBUG", "afterExecute should send onRenderTaskPostExecute" ); Log.d( "DEBUG", "executing tasks: " + mExecutingTileRenderTasks.size() ); } } } } private static class TileRenderTask { public Future future; public TileRenderRunnable runnable; public Tile tile; public void cancel() { future.cancel( true ); runnable.cancel(); tile.destroy( true ); } } private static class TileRenderRunnable implements Runnable { private WeakReference<TileRenderPoolExecutor> mTileRenderPoolExecutorWeakReference; private WeakReference<TileCanvasViewGroup> mTileCanvasViewGroupWeakReference; private WeakReference<Tile> mTileWeakReference; private volatile boolean mCancelled = false; private volatile boolean mComplete = false; public TileRenderRunnable() { } public void cancel() { mCancelled = true; } public void markComplete() { mComplete = true; /* TileRenderPoolExecutor tileRenderPoolExecutor = getTileRenderPoolExecutor(); if( tileRenderPoolExecutor != null ) { Tile tile = getTile(); if( tile != null ) { TileRenderTask tileRenderTask = tileRenderPoolExecutor.getTaskByTile( tile ); tileRenderPoolExecutor.removeTaskFromCurrentlyExecutingList( tileRenderTask ); } } */ } public boolean isComplete(){ return mComplete; } public boolean isDone(){ return mCancelled || mComplete; } public void setTileRenderPoolExecutor( TileRenderPoolExecutor tileRenderPoolExecutor ) { mTileRenderPoolExecutorWeakReference = new WeakReference<>( tileRenderPoolExecutor ); } public TileRenderPoolExecutor getTileRenderPoolExecutor() { if( mTileRenderPoolExecutorWeakReference != null ) { return mTileRenderPoolExecutorWeakReference.get(); } return null; } public void setTileCanvasViewGroup( TileCanvasViewGroup tileCanvasViewGroup ) { mTileCanvasViewGroupWeakReference = new WeakReference<>( tileCanvasViewGroup ); } public TileCanvasViewGroup getTileCanvasViewGroup() { if( mTileCanvasViewGroupWeakReference != null ) { return mTileCanvasViewGroupWeakReference.get(); } return null; } public void setTile( Tile tile ) { mTileWeakReference = new WeakReference<>( tile ); } public Tile getTile() { if( mTileWeakReference != null ) { return mTileWeakReference.get(); } return null; } public boolean renderTile() { if( mCancelled ) { return false; } Process.setThreadPriority( Process.THREAD_PRIORITY_BACKGROUND ); final Thread thread = Thread.currentThread(); if( thread.isInterrupted() ) { return false; } TileCanvasViewGroup tileCanvasViewGroup = getTileCanvasViewGroup(); if( tileCanvasViewGroup == null ) { return false; } if( tileCanvasViewGroup.getRenderIsCancelled() ) { return false; } Tile tile = getTile(); if( tile == null ) { return false; } try { tileCanvasViewGroup.generateTileBitmap( tile ); } catch( InterruptedIOException e ) { Thread.currentThread().interrupt(); return false; } catch( Exception e ) { return false; } if( mCancelled || tile.getBitmap() == null || thread.isInterrupted() || tileCanvasViewGroup.getRenderIsCancelled() ) { tile.destroy( true ); return false; } tileCanvasViewGroup.addTileToCurrentTileCanvasView( tile ); return true; } @Override public void run() { renderTile(); markComplete(); } } }
tileview/src/main/java/com/qozix/tileview/tiles/TileRenderPoolExecutor.java
package com.qozix.tileview.tiles; import android.os.Process; import android.support.annotation.NonNull; import android.util.Log; import java.io.InterruptedIOException; import java.lang.ref.WeakReference; import java.util.LinkedList; import java.util.List; import java.util.concurrent.Future; import java.util.concurrent.LinkedBlockingDeque; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; public class TileRenderPoolExecutor extends ThreadPoolExecutor { private static final int KEEP_ALIVE_TIME = 1; private static final TimeUnit KEEP_ALIVE_TIME_UNIT = TimeUnit.SECONDS; private static final int CORE_POOL_SIZE = Runtime.getRuntime().availableProcessors(); private static final int MAXIMUM_POOL_SIZE = Runtime.getRuntime().availableProcessors(); private WeakReference<TileCanvasViewGroup> mTileCanvasViewGroupWeakReference; private List<TileRenderTask> mExecutingTileRenderTasks = new LinkedList<>(); private List<TileRenderTask> mCancelledTileRenderTasks = new LinkedList<>(); public TileRenderPoolExecutor() { super( CORE_POOL_SIZE, MAXIMUM_POOL_SIZE, KEEP_ALIVE_TIME, KEEP_ALIVE_TIME_UNIT, new LinkedBlockingDeque<Runnable>() ); } public void cancel() { if( getQueue().size() > 0 || getActiveCount() > 0 ) { Log.d( "DEBUG", "queue 0, active count 0, dispatch render cancelled event" ); TileCanvasViewGroup tileCanvasViewGroup = mTileCanvasViewGroupWeakReference.get(); if( tileCanvasViewGroup != null ) { tileCanvasViewGroup.onRenderTaskCancelled(); Log.d( "DEBUG", "render cancelled event dispatched" ); } } //getQueue().clear(); mCancelledTileRenderTasks.addAll( mExecutingTileRenderTasks ); mExecutingTileRenderTasks.clear(); stopCancelledTasks(); } @Override public void shutdown() { cancel(); super.shutdown(); } @Override @NonNull public List<Runnable> shutdownNow() { cancel(); return super.shutdownNow(); } private void stopCancelledTasks() { for( TileRenderTask tileRenderTask : mCancelledTileRenderTasks ) { tileRenderTask.cancel(); } mCancelledTileRenderTasks.clear(); } public void queue( TileCanvasViewGroup tileCanvasViewGroup, List<Tile> renderList ) { Log.d( "DEBUG", "TileRenderPoolExecutor.queue, before intersection: " + mExecutingTileRenderTasks.size() + ", " + mCancelledTileRenderTasks.size() ); for( TileRenderTask tileRenderTask : mExecutingTileRenderTasks ) { if( renderList.contains( tileRenderTask.tile ) ) { renderList.remove( tileRenderTask.tile ); } else { mCancelledTileRenderTasks.add( tileRenderTask ); } } Log.d( "DEBUG", "TileRenderPoolExecutor.queue, after intersection: " + mExecutingTileRenderTasks.size() + ", " + mCancelledTileRenderTasks.size() ); stopCancelledTasks(); if( renderList.size() > 0 ) { mTileCanvasViewGroupWeakReference = new WeakReference<>( tileCanvasViewGroup ); tileCanvasViewGroup.onRenderTaskPreExecute(); for( Tile tile : renderList ) { if( isShutdownOrTerminating() ) { return; } TileRenderTask task = new TileRenderTask(); task.tile = tile; task.runnable = new TileRenderRunnable(); task.runnable.setTile( tile ); task.runnable.setTileCanvasViewGroup( tileCanvasViewGroup ); task.runnable.setTileRenderPoolExecutor( this ); task.future = submit( task.runnable ); mExecutingTileRenderTasks.add( task ); } } } private TileRenderTask getTaskByTile( Tile tile ) { if( tile != null ) { for( TileRenderTask tileRenderTask : mExecutingTileRenderTasks ) { if( tile.equals( tileRenderTask.tile ) ) { return tileRenderTask; } } } return null; } public void removeTaskFromCurrentlyExecutingList( TileRenderTask tileRenderTask ) { Log.d( "DEBUG", "should be removing task: " + mExecutingTileRenderTasks.size() ); mExecutingTileRenderTasks.remove( tileRenderTask ); Log.d( "DEBUG", "task should have been removed: " + mExecutingTileRenderTasks.size() ); } public boolean isShutdownOrTerminating() { return isShutdown() || isTerminating() || isTerminated(); } @Override protected void afterExecute( Runnable runnable, Throwable throwable ) { synchronized(this) { Log.d( "DEBUG", "afterExecute" ); super.afterExecute( runnable, throwable ); if( getQueue().size() == 0 && getActiveCount() == 1 ) { mExecutingTileRenderTasks.clear(); TileCanvasViewGroup tileCanvasViewGroup = mTileCanvasViewGroupWeakReference.get(); if( tileCanvasViewGroup != null ) { tileCanvasViewGroup.onRenderTaskPostExecute(); Log.d( "DEBUG", "afterExecute should send onRenderTaskPostExecute" ); Log.d( "DEBUG", "executing tasks: " + mExecutingTileRenderTasks.size() ); } } } } private static class TileRenderTask { public Future future; public TileRenderRunnable runnable; public Tile tile; public void cancel() { future.cancel( true ); runnable.cancel(); tile.destroy( true ); } } private static class TileRenderRunnable implements Runnable { private WeakReference<TileRenderPoolExecutor> mTileRenderPoolExecutorWeakReference; private WeakReference<TileCanvasViewGroup> mTileCanvasViewGroupWeakReference; private WeakReference<Tile> mTileWeakReference; private volatile boolean mCancelled = false; public TileRenderRunnable() { } public void markComplete() { TileRenderPoolExecutor tileRenderPoolExecutor = getTileRenderPoolExecutor(); if( tileRenderPoolExecutor != null ) { Tile tile = getTile(); if( tile != null ) { TileRenderTask tileRenderTask = tileRenderPoolExecutor.getTaskByTile( tile ); tileRenderPoolExecutor.removeTaskFromCurrentlyExecutingList( tileRenderTask ); } } } public void setTileRenderPoolExecutor( TileRenderPoolExecutor tileRenderPoolExecutor ) { mTileRenderPoolExecutorWeakReference = new WeakReference<>( tileRenderPoolExecutor ); } public TileRenderPoolExecutor getTileRenderPoolExecutor() { if( mTileRenderPoolExecutorWeakReference != null ) { return mTileRenderPoolExecutorWeakReference.get(); } return null; } public void setTileCanvasViewGroup( TileCanvasViewGroup tileCanvasViewGroup ) { mTileCanvasViewGroupWeakReference = new WeakReference<>( tileCanvasViewGroup ); } public TileCanvasViewGroup getTileCanvasViewGroup() { if( mTileCanvasViewGroupWeakReference != null ) { return mTileCanvasViewGroupWeakReference.get(); } return null; } public void setTile( Tile tile ) { mTileWeakReference = new WeakReference<>( tile ); } public Tile getTile() { if( mTileWeakReference != null ) { return mTileWeakReference.get(); } return null; } public void cancel() { mCancelled = true; } public boolean renderTile() { if( mCancelled ) { return false; } Process.setThreadPriority( Process.THREAD_PRIORITY_BACKGROUND ); final Thread thread = Thread.currentThread(); if( thread.isInterrupted() ) { return false; } TileCanvasViewGroup tileCanvasViewGroup = getTileCanvasViewGroup(); if( tileCanvasViewGroup == null ) { return false; } if( tileCanvasViewGroup.getRenderIsCancelled() ) { return false; } Tile tile = getTile(); if( tile == null ) { return false; } try { tileCanvasViewGroup.generateTileBitmap( tile ); } catch( InterruptedIOException e ) { Thread.currentThread().interrupt(); return false; } catch( Exception e ) { return false; } if( mCancelled || tile.getBitmap() == null || thread.isInterrupted() || tileCanvasViewGroup.getRenderIsCancelled() ) { tile.destroy( true ); return false; } tileCanvasViewGroup.addTileToCurrentTileCanvasView( tile ); return true; } @Override public void run() { boolean rendered = renderTile(); Log.d( "DEBUG", "was rendered:" + rendered); if( rendered ) { markComplete(); } } } }
working
tileview/src/main/java/com/qozix/tileview/tiles/TileRenderPoolExecutor.java
working
Java
mit
d415aafd6c7ba2a19f4539b0ae535ae6da9ea6d0
0
horrorho/InflatableDonkey
/* * The MIT License * * Copyright 2015 Ahseya. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.github.horrorho.inflatabledonkey.args; import java.time.format.DateTimeFormatter; import java.util.Optional; import net.jcip.annotations.Immutable; /** * Configuration properties. * * @author Ahseya */ @Immutable public enum Property { APP_NAME("InflatableDonkey"), ARGS_HELP, ARGS_TOKEN, ARGS_VERSION, AUTHENTICATION_APPLEID, AUTHENTICATION_PASSWORD, AUTHENTICATION_TOKEN, PATH_PROTOC("protoc"), PATH_CHUNK_STORE("chunks"), PATH_CHUNK_STORE_SUBSPLIT("3"), SELECT_DEVICE_INDEX("0"), SELECT_SNAPSHOT_INDEX("0"), SELECT_MANIFEST_INDEX("0"), PROPERTIES_RESOURCE("/inflatable_donkey.properties"); public static DateTimeFormatter commandLineInputDateTimeFormatter() { return DateTimeFormatter.ISO_DATE; } public static DateTimeFormatter outputDateTimeFormatter() { return DateTimeFormatter.RFC_1123_DATE_TIME; } private final String defaultValue; private Property() { this(null); } private Property(String defaultValue) { this.defaultValue = defaultValue; } public String defaultValue() { return defaultValue; } public Optional<Integer> intValue() { try { return Optional.of(Integer.parseInt(defaultValue)); } catch (NumberFormatException ex) { return Optional.empty(); } } }
src/main/java/com/github/horrorho/inflatabledonkey/args/Property.java
/* * The MIT License * * Copyright 2015 Ahseya. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.github.horrorho.inflatabledonkey.args; import java.time.format.DateTimeFormatter; import java.util.Optional; import net.jcip.annotations.Immutable; /** * Configuration properties. * * @author Ahseya */ @Immutable public enum Property { APP_NAME("InflatableDonkey"), ARGS_HELP, ARGS_TOKEN, ARGS_VERSION, AUTHENTICATION_APPLEID, AUTHENTICATION_PASSWORD, AUTHENTICATION_TOKEN, DECRYPTION_BLOCK_LENGTH("4096"), PATH_PROTOC("protoc"), PATH_CHUNK_STORE("chunks"), PATH_CHUNK_STORE_SUBSPLIT("3"), SELECT_DEVICE_INDEX("0"), SELECT_SNAPSHOT_INDEX("0"), SELECT_MANIFEST_INDEX("0"), PROPERTIES_RESOURCE("/inflatable_donkey.properties"); public static DateTimeFormatter commandLineInputDateTimeFormatter() { return DateTimeFormatter.ISO_DATE; } public static DateTimeFormatter outputDateTimeFormatter() { return DateTimeFormatter.RFC_1123_DATE_TIME; } private final String defaultValue; private Property() { this(null); } private Property(String defaultValue) { this.defaultValue = defaultValue; } public String defaultValue() { return defaultValue; } public Optional<Integer> intValue() { try { return Optional.of(Integer.parseInt(defaultValue)); } catch (NumberFormatException ex) { return Optional.empty(); } } }
Hardwired block decrypter block length: 0x1000
src/main/java/com/github/horrorho/inflatabledonkey/args/Property.java
Hardwired block decrypter block length: 0x1000
Java
epl-1.0
586669d346884e7cce9f96b143a6b30cc72f89cc
0
gnodet/wikitext
/******************************************************************************* * Copyright (c) 2004, 2008 Tasktop Technologies and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Tasktop Technologies - initial API and implementation *******************************************************************************/ package org.eclipse.mylyn.tests; import junit.framework.Test; import junit.framework.TestSuite; import org.eclipse.mylyn.tasks.tests.TasksUtilTest; /** * @author Mik Kersten */ public class AllHeadlessStandaloneTests { public static Test suite() { TestSuite suite = new TestSuite("Tests not requiring Eclipse Workbench"); // disabled due to failure: bug 257972 // suite.addTestSuite(ContextExternalizerTest.class); // suite.addTestSuite(DegreeOfInterestTest.class); // suite.addTestSuite(ContextTest.class); // suite.addTestSuite(TaskListStandaloneTest.class); suite.addTestSuite(TasksUtilTest.class); suite.addTest(org.eclipse.mylyn.wikitext.tests.HeadlessStandaloneTests.suite()); return suite; } }
org.eclipse.mylyn.tests/src/org/eclipse/mylyn/tests/AllHeadlessStandaloneTests.java
/******************************************************************************* * Copyright (c) 2004, 2008 Tasktop Technologies and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Tasktop Technologies - initial API and implementation *******************************************************************************/ package org.eclipse.mylyn.tests; import junit.framework.Test; import junit.framework.TestSuite; import org.eclipse.mylyn.context.tests.ContextTest; import org.eclipse.mylyn.context.tests.DegreeOfInterestTest; import org.eclipse.mylyn.tasks.tests.TasksUtilTest; /** * @author Mik Kersten */ public class AllHeadlessStandaloneTests { public static Test suite() { TestSuite suite = new TestSuite("Tests not requiring Eclipse Workbench"); // $JUnit-BEGIN$ // suite.addTestSuite(ContextExternalizerTest.class); suite.addTestSuite(DegreeOfInterestTest.class); suite.addTestSuite(ContextTest.class); suite.addTestSuite(TasksUtilTest.class); // XXX: refactor // suite.addTestSuite(TaskListStandaloneTest.class); // $JUnit-END$ suite.addTest(org.eclipse.mylyn.wikitext.tests.HeadlessStandaloneTests.suite()); return suite; } }
NEW - bug 257972: fix standalone tests https://bugs.eclipse.org/bugs/show_bug.cgi?id=257972
org.eclipse.mylyn.tests/src/org/eclipse/mylyn/tests/AllHeadlessStandaloneTests.java
NEW - bug 257972: fix standalone tests https://bugs.eclipse.org/bugs/show_bug.cgi?id=257972
Java
lgpl-2.1
7a7d3b3ae5678a0f81793bb86896b7d1a1fb49e7
0
juanmjacobs/kettle,juanmjacobs/kettle,cwarden/kettle,juanmjacobs/kettle,cwarden/kettle,cwarden/kettle
/********************************************************************** ** ** ** This code belongs to the KETTLE project. ** ** ** ** Kettle, from version 2.2 on, is released into the public domain ** ** under the Lesser GNU Public License (LGPL). ** ** ** ** For more details, please read the document LICENSE.txt, included ** ** in this project ** ** ** ** http://www.kettle.be ** ** [email protected] ** ** ** **********************************************************************/ package org.pentaho.di.core; import java.util.ArrayList; import java.util.Enumeration; import java.util.Hashtable; import java.util.regex.Pattern; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMetaAndData; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.core.xml.XMLInterface; import org.pentaho.di.repository.Repository; import org.w3c.dom.Node; /** This class describes a condition in a general meaning. A condition can either be<p> <p> 1) Atomic (a=10, B='aa')<p> 2) Composite ( NOT Condition1 AND Condition2 OR Condition3 )<p> <p> If the nr of atomic conditions is 0, the condition is atomic, otherwise it's Composit.<p> Precedence doesn't exist. Conditions are evaluated in the order in which they are found.<p> A condition can be negated or not.<p> <p> @author Matt @since 8-06-2004 */ public class Condition implements Cloneable, XMLInterface { public static final String[] operators = new String[] { "-", "OR", "AND", "NOT", "OR NOT", "AND NOT", "XOR" }; public static final int OPERATOR_NONE = 0; public static final int OPERATOR_OR = 1; public static final int OPERATOR_AND = 2; public static final int OPERATOR_NOT = 3; public static final int OPERATOR_OR_NOT = 4; public static final int OPERATOR_AND_NOT = 5; public static final int OPERATOR_XOR = 6; public static final String[] functions = new String[] { "=", "<>", "<", "<=", ">", ">=", "REGEXP", "IS NULL", "IS NOT NULL", "IN LIST", "CONTAINS", "STARTS WITH", "ENDS WITH" }; public static final int FUNC_EQUAL = 0; public static final int FUNC_NOT_EQUAL = 1; public static final int FUNC_SMALLER = 2; public static final int FUNC_SMALLER_EQUAL = 3; public static final int FUNC_LARGER = 4; public static final int FUNC_LARGER_EQUAL = 5; public static final int FUNC_REGEXP = 6; public static final int FUNC_NULL = 7; public static final int FUNC_NOT_NULL = 8; public static final int FUNC_IN_LIST = 9; public static final int FUNC_CONTAINS = 10; public static final int FUNC_STARTS_WITH = 11; public static final int FUNC_ENDS_WITH = 12; // // These parameters allow for: // value = othervalue // value = 'A' // NOT value = othervalue // private long id; private boolean negate; private int operator; private String left_valuename; private int function; private String right_valuename; private ValueMetaAndData right_exact; private long id_right_exact; private int left_fieldnr; private int right_fieldnr; private ArrayList<Condition> list; private String right_string; public Condition() { list = new ArrayList<Condition>(); this.operator = OPERATOR_NONE; this.negate = false; left_fieldnr = -2; right_fieldnr = -2; id=-1L; } public Condition(String valuename, int function, String valuename2, ValueMetaAndData exact) { this(); this.left_valuename = valuename; this.function = function; this.right_valuename = valuename2; this.right_exact = exact; clearFieldPositions(); } public Condition(int operator, String valuename, int function, String valuename2, ValueMetaAndData exact) { this(); this.operator = operator; this.left_valuename = valuename; this.function = function; this.right_valuename = valuename2; this.right_exact = exact; clearFieldPositions(); } public Condition(boolean negate, String valuename, int function, String valuename2, ValueMetaAndData exact) { this(valuename, function, valuename2, exact); this.negate = negate; } /** * Returns the database ID of this Condition if a repository was used before. * * @return the ID of the db connection. */ public long getID() { return id; } /** * Set the database ID for this Condition in the repository. * @param id The ID to set on this condition. * */ public void setID(long id) { this.id = id; } public Object clone() { Condition retval = null; retval = new Condition(); retval.negate = negate; retval.operator = operator; if (isComposite()) { for (int i=0;i<nrConditions();i++) { Condition c = getCondition(i); Condition cCopy = (Condition)c.clone(); retval.addCondition(cCopy); } } else { retval.negate = negate; retval.left_valuename = left_valuename; retval.operator = operator; retval.right_valuename = right_valuename; retval.function = function; if (right_exact!=null) { retval.right_exact = (ValueMetaAndData) right_exact.clone(); } else { retval.right_exact = null; } } return retval; } public void setOperator(int operator) { this.operator = operator; } public int getOperator() { return operator; } public String getOperatorDesc() { return Const.rightPad(operators[operator], 7); } public static final int getOperator(String description) { if (description==null) return OPERATOR_NONE; for (int i=1;i<operators.length;i++) { if (operators[i].equalsIgnoreCase(Const.trim(description))) return i; } return OPERATOR_NONE; } public static final String[] getOperators() { String retval[] = new String[operators.length-1]; for (int i=1;i<operators.length;i++) { retval[i-1] = operators[i]; } return retval; } public static final String[] getRealOperators() { return new String[] { "OR", "AND", "OR NOT", "AND NOT", "XOR" }; } public void setLeftValuename(String left_valuename) { this.left_valuename = left_valuename; } public String getLeftValuename() { return left_valuename; } public int getFunction() { return function; } public void setFunction( int function ) { this.function = function; } public String getFunctionDesc() { return functions[function]; } public static final int getFunction(String description) { for (int i=1;i<functions.length;i++) { if (functions[i].equalsIgnoreCase(Const.trim(description))) return i; } return FUNC_EQUAL; } public void setRightValuename(String right_valuename) { this.right_valuename = right_valuename; } public String getRightValuename() { return right_valuename; } public void setRightExact(ValueMetaAndData right_exact) { this.right_exact = right_exact; } public ValueMetaAndData getRightExact() { return right_exact; } public String getRightExactString() { if (right_exact == null) return null; return right_exact.toString(); } /** * Get the id of the RightExact Value in the repository * @return The id of the RightExact Value in the repository */ public long getRightExactID() { return id_right_exact; } /** * Set the database ID for the RightExact Value in the repository. * @param id_right_exact The ID to set on this Value. * */ public void setRightExactID(long id_right_exact) { this.id_right_exact = id_right_exact; } public boolean isAtomic() { return list.size()==0; } public boolean isComposite() { return list.size()!=0; } public boolean isNegated() { return negate; } public void setNegated(boolean negate) { this.negate = negate; } public void negate() { setNegated(!isNegated()); } /** * A condition is empty when the condition is atomic and no left field is specified. */ public boolean isEmpty() { return (isAtomic() && left_valuename==null); } /** * We cache the position of a value in a row. * If ever we want to change the rowtype, we need to * clear these cached field positions... */ public void clearFieldPositions() { left_fieldnr = -1; right_fieldnr = -1; } // // Evaluate the condition... // public boolean evaluate(RowMetaInterface rowMeta, Object[] r) { // Start of evaluate boolean retval = false; // If we have 0 items in the list, evaluate the current condition // Otherwise, evaluate all sub-conditions // try { if (isAtomic()) { // Get fieldnrs left value // // Check out the fieldnrs if we don't have them... if (left_valuename!=null && left_valuename.length()>0 && left_fieldnr<-1) left_fieldnr = rowMeta.indexOfValue(left_valuename); // Get fieldnrs right value // if (right_valuename!=null && right_valuename.length()>0 && right_fieldnr<-1) right_fieldnr = rowMeta.indexOfValue(right_valuename); // Get fieldnrs left field ValueMetaInterface fieldMeta = null; Object field=null; if (left_fieldnr>=0) { fieldMeta = rowMeta.getValueMeta(left_fieldnr); field = r[left_fieldnr]; // JIRA PDI-38 // if (field==null) // { // throw new KettleException("Unable to find field ["+left_valuename+"] in the input row!"); // } } else return false; //no fields to evaluate // Get fieldnrs right exact ValueMetaInterface fieldMeta2 = right_exact!=null ? right_exact.getValueMeta() : null; Object field2 = right_exact!=null ? right_exact.getValueData() : null; if (field2==null && right_fieldnr>=0) { fieldMeta2 = rowMeta.getValueMeta(right_fieldnr); field2 = r[right_fieldnr]; // JIRA PDI-38 // if (field2==null) // { // throw new KettleException("Unable to find field ["+right_valuename+"] in the input row!"); // } } // if (field==null) // { // throw new KettleException("Unable to find value for field ["+left_valuename+"] in the input row!"); // } // This condition goes too as field2 can indeed be null, just not fieldMeta2 // if (field2==null && function!=FUNC_NULL && function!=FUNC_NOT_NULL) // { // throw new KettleException("Unable to find value for field ["+right_valuename+"] in the input row!"); // } // Evaluate switch(function) { case FUNC_EQUAL : retval = (fieldMeta.compare(field, fieldMeta2, field2)==0); break; case FUNC_NOT_EQUAL : retval = (fieldMeta.compare(field, fieldMeta2, field2)!=0); break; case FUNC_SMALLER : retval = (fieldMeta.compare(field, fieldMeta2, field2)< 0); break; case FUNC_SMALLER_EQUAL : retval = (fieldMeta.compare(field, fieldMeta2, field2)<=0); break; case FUNC_LARGER : retval = (fieldMeta.compare(field, fieldMeta2, field2)> 0); break; case FUNC_LARGER_EQUAL : retval = (fieldMeta.compare(field, fieldMeta2, field2)>=0); break; case FUNC_REGEXP : if (fieldMeta.isNull(field) || field2==null) { retval = false; } else { retval = Pattern.matches(fieldMeta2.getString(field2), fieldMeta.getString(field)); } break; case FUNC_NULL : retval = (fieldMeta.isNull(field)); break; case FUNC_NOT_NULL : retval = (!fieldMeta.isNull(field)); break; case FUNC_IN_LIST : String list[] = Const.splitString(fieldMeta2.getString(field2), ';'); retval = Const.indexOfString(fieldMeta.getCompatibleString(field), list)>=0; // Compatible string doesn't pad with 0's etc. break; case FUNC_CONTAINS : retval = fieldMeta.getString(field)!=null?fieldMeta.getString(field).indexOf(fieldMeta2.getString(field2))>=0:false; break; case FUNC_STARTS_WITH : retval = fieldMeta.getString(field)!=null?fieldMeta.getString(field).startsWith(fieldMeta2.getString(field2)):false; break; case FUNC_ENDS_WITH : String string = fieldMeta.getString(field); if (!Const.isEmpty(string)) { if (right_string==null && field2!=null) right_string=fieldMeta2.getString(field2); if (right_string!=null) { retval = string.endsWith(fieldMeta2.getString(field2)); } else { retval = false; } } else { retval = false; } // retval = fieldMeta.getString(field)!=null?fieldMeta.getString(field).endsWith(fieldMeta2.getString(field2)):false; break; default: break; } // Only NOT makes sense, the rest doesn't, so ignore!!!! // Optionally negate // if (isNegated()) retval=!retval; } else { // Composite : get first Condition cb0 = list.get(0); retval = cb0.evaluate(rowMeta, r); // Loop over the conditions listed below. // for (int i=1;i<list.size();i++) { // Composite : evaluate #i // Condition cb = list.get(i); boolean cmp = cb.evaluate(rowMeta, r); switch (cb.getOperator()) { case Condition.OPERATOR_OR : retval = retval || cmp; break; case Condition.OPERATOR_AND : retval = retval && cmp; break; case Condition.OPERATOR_OR_NOT : retval = retval || ( !cmp ); break; case Condition.OPERATOR_AND_NOT : retval = retval && ( !cmp ); break; case Condition.OPERATOR_XOR : retval = retval ^ cmp; break; default: break; } } // Composite: optionally negate if (isNegated()) retval=!retval; } } catch(Exception e) { throw new RuntimeException("Unexpected error evaluation condition ["+toString()+"]", e); } return retval; } public void addCondition(Condition cb) { if (isAtomic() && getLeftValuename()!=null) { /* Copy current atomic setup... * */ Condition current = new Condition(getLeftValuename(), getFunction(), getRightValuename(), getRightExact()); current.setNegated(isNegated()); setNegated(false); list.add(current); } else // Set default operator if not on first position... if (isComposite() && list.size()>0 && cb.getOperator()==OPERATOR_NONE) { cb.setOperator(OPERATOR_AND); } list.add( cb ); } public void addCondition(int idx, Condition cb) { if (isAtomic() && getLeftValuename()!=null) { /* Copy current atomic setup... * */ Condition current = new Condition(getLeftValuename(), getFunction(), getRightValuename(), getRightExact()); current.setNegated(isNegated()); setNegated(false); list.add(current); } else // Set default operator if not on first position... if (isComposite() && idx>0 && cb.getOperator()==OPERATOR_NONE) { cb.setOperator(OPERATOR_AND); } list.add(idx, cb ); } public void removeCondition(int nr) { if (isComposite()) { Condition c = list.get(nr); list.remove(nr); // Nothing left or only one condition left: move it to the parent: make it atomic. boolean moveUp = isAtomic() || nrConditions()==1; if (nrConditions()==1) c=getCondition(0); if (moveUp) { setLeftValuename(c.getLeftValuename()); setFunction(c.getFunction()); setRightValuename(c.getRightValuename()); setRightExact(c.getRightExact()); setNegated(c.isNegated()); } } } public int nrConditions() { return list.size(); } public Condition getCondition(int i) { return list.get(i); } public void setCondition(int i, Condition subCondition) { list.set(i, subCondition); } public String toString() { return toString(0, true, true); } public String toString(int level, boolean show_negate, boolean show_operator) { String retval=""; if (isAtomic()) { //retval+="<ATOMIC "+level+", "+show_negate+", "+show_operator+">"; for (int i=0;i<level;i++) retval+=" "; if (show_operator && getOperator()!=OPERATOR_NONE) { retval += getOperatorDesc()+" "; } else { retval+=" "; } // Atomic is negated? if (isNegated() && ( show_negate || level>0 )) { retval+="NOT ( "; } else { retval+=" "; } retval+=left_valuename+" "+getFunctionDesc(); if (function != FUNC_NULL && function != FUNC_NOT_NULL) { if ( right_valuename != null ) { retval+=" "+right_valuename; } else { retval+=" ["+( getRightExactString()==null?"":getRightExactString() )+"]"; } } if (isNegated() && ( show_negate || level>0 )) retval+=" )"; retval+=Const.CR; } else { //retval+="<COMP "+level+", "+show_negate+", "+show_operator+">"; // Group is negated? if (isNegated() && (show_negate || level>0)) { for (int i=0;i<level;i++) retval+=" "; retval+="NOT"+Const.CR; } // Group is preceded by an operator: if (getOperator()!=OPERATOR_NONE && (show_operator || level>0)) { for (int i=0;i<level;i++) retval+=" "; retval+=getOperatorDesc()+Const.CR; } for (int i=0;i<level;i++) retval+=" "; retval+="("+Const.CR; for (int i=0;i<list.size();i++) { Condition cb = list.get(i); retval+=cb.toString(level+1, true, i>0); } for (int i=0;i<level;i++) retval+=" "; retval+=")"+Const.CR; } return retval; } public String getXML() { return getXML(0); } public String getXML(int level) { String retval=""; String indent1 = Const.rightPad(" ", level); String indent2 = Const.rightPad(" ", level+1); String indent3 = Const.rightPad(" ", level+2); retval+= indent1+"<condition>"+Const.CR; retval+=indent2+XMLHandler.addTagValue("negated", isNegated()); if (getOperator()!=OPERATOR_NONE) { retval+=indent2+XMLHandler.addTagValue("operator", Const.rtrim(getOperatorDesc())); } if (isAtomic()) { retval+=indent2+XMLHandler.addTagValue("leftvalue", getLeftValuename()); retval+=indent2+XMLHandler.addTagValue("function", getFunctionDesc()); retval+=indent2+XMLHandler.addTagValue("rightvalue", getRightValuename()); if (getRightExact()!=null) { retval+=indent2+getRightExact().getXML(); } } else { retval+=indent2+"<conditions>"+Const.CR; for (int i=0;i<nrConditions();i++) { Condition c = getCondition(i); retval+=c.getXML(level+2); } retval+=indent3+"</conditions>"+Const.CR; } retval+=indent2+"</condition>"+Const.CR; return retval; } /** * Build a new condition using an XML Document Node * @param condnode * @throws KettleXMLException */ public Condition(Node condnode) throws KettleXMLException { this(); list = new ArrayList<Condition>(); try { String str_negated = XMLHandler.getTagValue(condnode, "negated"); setNegated( "Y".equalsIgnoreCase(str_negated) ); String str_operator = XMLHandler.getTagValue(condnode, "operator"); setOperator( getOperator( str_operator ) ); Node conditions = XMLHandler.getSubNode(condnode, "conditions"); int nrconditions = XMLHandler.countNodes(conditions, "condition"); if (nrconditions==0) // ATOMIC! { setLeftValuename( XMLHandler.getTagValue(condnode, "leftvalue") ); setFunction( getFunction(XMLHandler.getTagValue(condnode, "function") ) ); setRightValuename( XMLHandler.getTagValue(condnode, "rightvalue") ); Node exactnode = XMLHandler.getSubNode(condnode, ValueMetaAndData.XML_TAG); if (exactnode!=null) { ValueMetaAndData exact = new ValueMetaAndData(exactnode); setRightExact(exact); } } else { for (int i=0;i<nrconditions;i++) { Node subcondnode = XMLHandler.getSubNodeByNr(conditions, "condition", i); Condition c = new Condition(subcondnode); addCondition(c); } } } catch(Exception e) { throw new KettleXMLException("Unable to create condition using xml: "+Const.CR+condnode, e); } } /** * * Read a condition from the repository. * @param rep The repository to read from * @param id_condition The condition id * @throws KettleException if something goes wrong. */ public Condition(Repository rep, long id_condition) throws KettleException { this(); list = new ArrayList<Condition>(); try { RowMetaAndData r = rep.getCondition(id_condition); if (r!=null) { negate = r.getBoolean("NEGATED", false); operator = getOperator( r.getString("OPERATOR", null) ); id = r.getInteger("ID_CONDITION", -1L); long subids[] = rep.getSubConditionIDs(id); if (subids.length==0) { left_valuename = r.getString("LEFT_NAME", null); function = getFunction( r.getString("CONDITION_FUNCTION", null) ); right_valuename = r.getString("RIGHT_NAME", null); long id_value = r.getInteger("ID_VALUE_RIGHT", -1L); if (id_value>0) { ValueMetaAndData v = new ValueMetaAndData(rep, id_value); right_exact = v; } } else { for (int i=0;i<subids.length;i++) { addCondition( new Condition(rep, subids[i]) ); } } } else { throw new KettleException("Condition with id_condition="+id_condition+" could not be found in the repository"); } } catch(KettleException dbe) { throw new KettleException("Error loading condition from the repository (id_condition="+id_condition+")", dbe); } } public long saveRep(Repository rep) throws KettleException { return saveRep(0L, rep); } public long saveRep(long id_condition_parent, Repository rep) throws KettleException { try { id = rep.insertCondition( id_condition_parent, this ); for (int i=0;i<nrConditions();i++) { Condition subc = getCondition(i); subc.saveRep(getID(), rep); } return getID(); } catch(KettleException dbe) { throw new KettleException("Error saving condition to the repository.", dbe); } } public String[] getUsedFields() { Hashtable<String,String> fields = new Hashtable<String,String>(); getUsedFields(fields); String retval[] = new String[fields.size()]; Enumeration<String> keys = fields.keys(); int i=0; while (keys.hasMoreElements()) { retval[i] = (String)keys.nextElement(); i++; } return retval; } public void getUsedFields(Hashtable<String,String> fields) { if (isAtomic()) { if (getLeftValuename()!=null) fields.put(getLeftValuename(), "-"); if (getRightValuename()!=null) fields.put(getRightValuename(), "-"); } else { for (int i=0;i<nrConditions();i++) { Condition subc = getCondition(i); subc.getUsedFields(fields); } } } }
src/org/pentaho/di/core/Condition.java
/********************************************************************** ** ** ** This code belongs to the KETTLE project. ** ** ** ** Kettle, from version 2.2 on, is released into the public domain ** ** under the Lesser GNU Public License (LGPL). ** ** ** ** For more details, please read the document LICENSE.txt, included ** ** in this project ** ** ** ** http://www.kettle.be ** ** [email protected] ** ** ** **********************************************************************/ package org.pentaho.di.core; import java.util.ArrayList; import java.util.Enumeration; import java.util.Hashtable; import java.util.regex.Pattern; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMetaAndData; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.core.xml.XMLInterface; import org.pentaho.di.repository.Repository; import org.w3c.dom.Node; /** This class describes a condition in a general meaning. A condition can either be<p> <p> 1) Atomic (a=10, B='aa')<p> 2) Composite ( NOT Condition1 AND Condition2 OR Condition3 )<p> <p> If the nr of atomic conditions is 0, the condition is atomic, otherwise it's Composit.<p> Precedence doesn't exist. Conditions are evaluated in the order in which they are found.<p> A condition can be negated or not.<p> <p> @author Matt @since 8-06-2004 */ public class Condition implements Cloneable, XMLInterface { public static final String[] operators = new String[] { "-", "OR", "AND", "NOT", "OR NOT", "AND NOT", "XOR" }; public static final int OPERATOR_NONE = 0; public static final int OPERATOR_OR = 1; public static final int OPERATOR_AND = 2; public static final int OPERATOR_NOT = 3; public static final int OPERATOR_OR_NOT = 4; public static final int OPERATOR_AND_NOT = 5; public static final int OPERATOR_XOR = 6; public static final String[] functions = new String[] { "=", "<>", "<", "<=", ">", ">=", "REGEXP", "IS NULL", "IS NOT NULL", "IN LIST", "CONTAINS", "STARTS WITH", "ENDS WITH" }; public static final int FUNC_EQUAL = 0; public static final int FUNC_NOT_EQUAL = 1; public static final int FUNC_SMALLER = 2; public static final int FUNC_SMALLER_EQUAL = 3; public static final int FUNC_LARGER = 4; public static final int FUNC_LARGER_EQUAL = 5; public static final int FUNC_REGEXP = 6; public static final int FUNC_NULL = 7; public static final int FUNC_NOT_NULL = 8; public static final int FUNC_IN_LIST = 9; public static final int FUNC_CONTAINS = 10; public static final int FUNC_STARTS_WITH = 11; public static final int FUNC_ENDS_WITH = 12; // // These parameters allow for: // value = othervalue // value = 'A' // NOT value = othervalue // private long id; private boolean negate; private int operator; private String left_valuename; private int function; private String right_valuename; private ValueMetaAndData right_exact; private long id_right_exact; private int left_fieldnr; private int right_fieldnr; private ArrayList<Condition> list; private String right_string; public Condition() { list = new ArrayList<Condition>(); this.operator = OPERATOR_NONE; this.negate = false; left_fieldnr = -2; right_fieldnr = -2; id=-1L; } public Condition(String valuename, int function, String valuename2, ValueMetaAndData exact) { this(); this.left_valuename = valuename; this.function = function; this.right_valuename = valuename2; this.right_exact = exact; clearFieldPositions(); } public Condition(int operator, String valuename, int function, String valuename2, ValueMetaAndData exact) { this(); this.operator = operator; this.left_valuename = valuename; this.function = function; this.right_valuename = valuename2; this.right_exact = exact; clearFieldPositions(); } public Condition(boolean negate, String valuename, int function, String valuename2, ValueMetaAndData exact) { this(valuename, function, valuename2, exact); this.negate = negate; } /** * Returns the database ID of this Condition if a repository was used before. * * @return the ID of the db connection. */ public long getID() { return id; } /** * Set the database ID for this Condition in the repository. * @param id The ID to set on this condition. * */ public void setID(long id) { this.id = id; } public Object clone() { Condition retval = null; retval = new Condition(); retval.negate = negate; retval.operator = operator; if (isComposite()) { for (int i=0;i<nrConditions();i++) { Condition c = getCondition(i); Condition cCopy = (Condition)c.clone(); retval.addCondition(cCopy); } } else { retval.negate = negate; retval.left_valuename = left_valuename; retval.operator = operator; retval.right_valuename = right_valuename; retval.function = function; if (right_exact!=null) { retval.right_exact = (ValueMetaAndData) right_exact.clone(); } else { retval.right_exact = null; } } return retval; } public void setOperator(int operator) { this.operator = operator; } public int getOperator() { return operator; } public String getOperatorDesc() { return Const.rightPad(operators[operator], 7); } public static final int getOperator(String description) { if (description==null) return OPERATOR_NONE; for (int i=1;i<operators.length;i++) { if (operators[i].equalsIgnoreCase(Const.trim(description))) return i; } return OPERATOR_NONE; } public static final String[] getOperators() { String retval[] = new String[operators.length-1]; for (int i=1;i<operators.length;i++) { retval[i-1] = operators[i]; } return retval; } public static final String[] getRealOperators() { return new String[] { "OR", "AND", "OR NOT", "AND NOT", "XOR" }; } public void setLeftValuename(String left_valuename) { this.left_valuename = left_valuename; } public String getLeftValuename() { return left_valuename; } public int getFunction() { return function; } public void setFunction( int function ) { this.function = function; } public String getFunctionDesc() { return functions[function]; } public static final int getFunction(String description) { for (int i=1;i<functions.length;i++) { if (functions[i].equalsIgnoreCase(Const.trim(description))) return i; } return FUNC_EQUAL; } public void setRightValuename(String right_valuename) { this.right_valuename = right_valuename; } public String getRightValuename() { return right_valuename; } public void setRightExact(ValueMetaAndData right_exact) { this.right_exact = right_exact; } public ValueMetaAndData getRightExact() { return right_exact; } public String getRightExactString() { if (right_exact == null) return null; return right_exact.toString(); } /** * Get the id of the RightExact Value in the repository * @return The id of the RightExact Value in the repository */ public long getRightExactID() { return id_right_exact; } /** * Set the database ID for the RightExact Value in the repository. * @param id_right_exact The ID to set on this Value. * */ public void setRightExactID(long id_right_exact) { this.id_right_exact = id_right_exact; } public boolean isAtomic() { return list.size()==0; } public boolean isComposite() { return list.size()!=0; } public boolean isNegated() { return negate; } public void setNegated(boolean negate) { this.negate = negate; } public void negate() { setNegated(!isNegated()); } /** * A condition is empty when the condition is atomic and no left field is specified. */ public boolean isEmpty() { return (isAtomic() && left_valuename==null); } /** * We cache the position of a value in a row. * If ever we want to change the rowtype, we need to * clear these cached field positions... */ public void clearFieldPositions() { left_fieldnr = -1; right_fieldnr = -1; } // // Evaluate the condition... // public boolean evaluate(RowMetaInterface rowMeta, Object[] r) { // Start of evaluate boolean retval = false; // If we have 0 items in the list, evaluate the current condition // Otherwise, evaluate all sub-conditions // try { if (isAtomic()) { // Get fieldnrs left value // // Check out the fieldnrs if we don't have them... if (left_valuename!=null && left_valuename.length()>0 && left_fieldnr<-1) left_fieldnr = rowMeta.indexOfValue(left_valuename); // Get fieldnrs right value // if (right_valuename!=null && right_valuename.length()>0 && right_fieldnr<-1) right_fieldnr = rowMeta.indexOfValue(right_valuename); // Get fieldnrs left field ValueMetaInterface fieldMeta = null; Object field=null; if (left_fieldnr>=0) { fieldMeta = rowMeta.getValueMeta(left_fieldnr); field = r[left_fieldnr]; // JIRA PDI-38 // if (field==null) // { // throw new KettleException("Unable to find field ["+left_valuename+"] in the input row!"); // } } else return false; //no fields to evaluate // Get fieldnrs right exact ValueMetaInterface fieldMeta2 = right_exact!=null ? right_exact.getValueMeta() : null; Object field2 = right_exact!=null ? right_exact.getValueData() : null; if (field2==null && right_fieldnr>=0) { fieldMeta2 = rowMeta.getValueMeta(right_fieldnr); field2 = r[right_fieldnr]; // JIRA PDI-38 // if (field2==null) // { // throw new KettleException("Unable to find field ["+right_valuename+"] in the input row!"); // } } // if (field==null) // { // throw new KettleException("Unable to find value for field ["+left_valuename+"] in the input row!"); // } // This condition goes too as field2 can indeed be null, just not fieldMeta2 // if (field2==null && function!=FUNC_NULL && function!=FUNC_NOT_NULL) // { // throw new KettleException("Unable to find value for field ["+right_valuename+"] in the input row!"); // } // Evaluate switch(function) { case FUNC_EQUAL : retval = (fieldMeta.compare(field, fieldMeta2, field2)==0); break; case FUNC_NOT_EQUAL : retval = (fieldMeta.compare(field, fieldMeta2, field2)!=0); break; case FUNC_SMALLER : retval = (fieldMeta.compare(field, fieldMeta2, field2)< 0); break; case FUNC_SMALLER_EQUAL : retval = (fieldMeta.compare(field, fieldMeta2, field2)<=0); break; case FUNC_LARGER : retval = (fieldMeta.compare(field, fieldMeta2, field2)> 0); break; case FUNC_LARGER_EQUAL : retval = (fieldMeta.compare(field, fieldMeta2, field2)>=0); break; case FUNC_REGEXP : if (fieldMeta.isNull(field) || field2==null) { retval = false; } else { retval = Pattern.matches(fieldMeta2.getString(field2), fieldMeta.getString(field)); } break; case FUNC_NULL : retval = (fieldMeta.isNull(field)); break; case FUNC_NOT_NULL : retval = (!fieldMeta.isNull(field)); break; case FUNC_IN_LIST : String list[] = Const.splitString(fieldMeta2.getString(field2), ';'); retval = Const.indexOfString(fieldMeta.getString(field), list)>=0; break; case FUNC_CONTAINS : retval = fieldMeta.getString(field)!=null?fieldMeta.getString(field).indexOf(fieldMeta2.getString(field2))>=0:false; break; case FUNC_STARTS_WITH : retval = fieldMeta.getString(field)!=null?fieldMeta.getString(field).startsWith(fieldMeta2.getString(field2)):false; break; case FUNC_ENDS_WITH : String string = fieldMeta.getString(field); if (!Const.isEmpty(string)) { if (right_string==null && field2!=null) right_string=fieldMeta2.getString(field2); if (right_string!=null) { retval = string.endsWith(fieldMeta2.getString(field2)); } else { retval = false; } } else { retval = false; } // retval = fieldMeta.getString(field)!=null?fieldMeta.getString(field).endsWith(fieldMeta2.getString(field2)):false; break; default: break; } // Only NOT makes sense, the rest doesn't, so ignore!!!! // Optionally negate // if (isNegated()) retval=!retval; } else { // Composite : get first Condition cb0 = list.get(0); retval = cb0.evaluate(rowMeta, r); // Loop over the conditions listed below. // for (int i=1;i<list.size();i++) { // Composite : evaluate #i // Condition cb = list.get(i); boolean cmp = cb.evaluate(rowMeta, r); switch (cb.getOperator()) { case Condition.OPERATOR_OR : retval = retval || cmp; break; case Condition.OPERATOR_AND : retval = retval && cmp; break; case Condition.OPERATOR_OR_NOT : retval = retval || ( !cmp ); break; case Condition.OPERATOR_AND_NOT : retval = retval && ( !cmp ); break; case Condition.OPERATOR_XOR : retval = retval ^ cmp; break; default: break; } } // Composite: optionally negate if (isNegated()) retval=!retval; } } catch(Exception e) { throw new RuntimeException("Unexpected error evaluation condition ["+toString()+"]", e); } return retval; } public void addCondition(Condition cb) { if (isAtomic() && getLeftValuename()!=null) { /* Copy current atomic setup... * */ Condition current = new Condition(getLeftValuename(), getFunction(), getRightValuename(), getRightExact()); current.setNegated(isNegated()); setNegated(false); list.add(current); } else // Set default operator if not on first position... if (isComposite() && list.size()>0 && cb.getOperator()==OPERATOR_NONE) { cb.setOperator(OPERATOR_AND); } list.add( cb ); } public void addCondition(int idx, Condition cb) { if (isAtomic() && getLeftValuename()!=null) { /* Copy current atomic setup... * */ Condition current = new Condition(getLeftValuename(), getFunction(), getRightValuename(), getRightExact()); current.setNegated(isNegated()); setNegated(false); list.add(current); } else // Set default operator if not on first position... if (isComposite() && idx>0 && cb.getOperator()==OPERATOR_NONE) { cb.setOperator(OPERATOR_AND); } list.add(idx, cb ); } public void removeCondition(int nr) { if (isComposite()) { Condition c = list.get(nr); list.remove(nr); // Nothing left or only one condition left: move it to the parent: make it atomic. boolean moveUp = isAtomic() || nrConditions()==1; if (nrConditions()==1) c=getCondition(0); if (moveUp) { setLeftValuename(c.getLeftValuename()); setFunction(c.getFunction()); setRightValuename(c.getRightValuename()); setRightExact(c.getRightExact()); setNegated(c.isNegated()); } } } public int nrConditions() { return list.size(); } public Condition getCondition(int i) { return list.get(i); } public void setCondition(int i, Condition subCondition) { list.set(i, subCondition); } public String toString() { return toString(0, true, true); } public String toString(int level, boolean show_negate, boolean show_operator) { String retval=""; if (isAtomic()) { //retval+="<ATOMIC "+level+", "+show_negate+", "+show_operator+">"; for (int i=0;i<level;i++) retval+=" "; if (show_operator && getOperator()!=OPERATOR_NONE) { retval += getOperatorDesc()+" "; } else { retval+=" "; } // Atomic is negated? if (isNegated() && ( show_negate || level>0 )) { retval+="NOT ( "; } else { retval+=" "; } retval+=left_valuename+" "+getFunctionDesc(); if (function != FUNC_NULL && function != FUNC_NOT_NULL) { if ( right_valuename != null ) { retval+=" "+right_valuename; } else { retval+=" ["+( getRightExactString()==null?"":getRightExactString() )+"]"; } } if (isNegated() && ( show_negate || level>0 )) retval+=" )"; retval+=Const.CR; } else { //retval+="<COMP "+level+", "+show_negate+", "+show_operator+">"; // Group is negated? if (isNegated() && (show_negate || level>0)) { for (int i=0;i<level;i++) retval+=" "; retval+="NOT"+Const.CR; } // Group is preceded by an operator: if (getOperator()!=OPERATOR_NONE && (show_operator || level>0)) { for (int i=0;i<level;i++) retval+=" "; retval+=getOperatorDesc()+Const.CR; } for (int i=0;i<level;i++) retval+=" "; retval+="("+Const.CR; for (int i=0;i<list.size();i++) { Condition cb = list.get(i); retval+=cb.toString(level+1, true, i>0); } for (int i=0;i<level;i++) retval+=" "; retval+=")"+Const.CR; } return retval; } public String getXML() { return getXML(0); } public String getXML(int level) { String retval=""; String indent1 = Const.rightPad(" ", level); String indent2 = Const.rightPad(" ", level+1); String indent3 = Const.rightPad(" ", level+2); retval+= indent1+"<condition>"+Const.CR; retval+=indent2+XMLHandler.addTagValue("negated", isNegated()); if (getOperator()!=OPERATOR_NONE) { retval+=indent2+XMLHandler.addTagValue("operator", Const.rtrim(getOperatorDesc())); } if (isAtomic()) { retval+=indent2+XMLHandler.addTagValue("leftvalue", getLeftValuename()); retval+=indent2+XMLHandler.addTagValue("function", getFunctionDesc()); retval+=indent2+XMLHandler.addTagValue("rightvalue", getRightValuename()); if (getRightExact()!=null) { retval+=indent2+getRightExact().getXML(); } } else { retval+=indent2+"<conditions>"+Const.CR; for (int i=0;i<nrConditions();i++) { Condition c = getCondition(i); retval+=c.getXML(level+2); } retval+=indent3+"</conditions>"+Const.CR; } retval+=indent2+"</condition>"+Const.CR; return retval; } /** * Build a new condition using an XML Document Node * @param condnode * @throws KettleXMLException */ public Condition(Node condnode) throws KettleXMLException { this(); list = new ArrayList<Condition>(); try { String str_negated = XMLHandler.getTagValue(condnode, "negated"); setNegated( "Y".equalsIgnoreCase(str_negated) ); String str_operator = XMLHandler.getTagValue(condnode, "operator"); setOperator( getOperator( str_operator ) ); Node conditions = XMLHandler.getSubNode(condnode, "conditions"); int nrconditions = XMLHandler.countNodes(conditions, "condition"); if (nrconditions==0) // ATOMIC! { setLeftValuename( XMLHandler.getTagValue(condnode, "leftvalue") ); setFunction( getFunction(XMLHandler.getTagValue(condnode, "function") ) ); setRightValuename( XMLHandler.getTagValue(condnode, "rightvalue") ); Node exactnode = XMLHandler.getSubNode(condnode, ValueMetaAndData.XML_TAG); if (exactnode!=null) { ValueMetaAndData exact = new ValueMetaAndData(exactnode); setRightExact(exact); } } else { for (int i=0;i<nrconditions;i++) { Node subcondnode = XMLHandler.getSubNodeByNr(conditions, "condition", i); Condition c = new Condition(subcondnode); addCondition(c); } } } catch(Exception e) { throw new KettleXMLException("Unable to create condition using xml: "+Const.CR+condnode, e); } } /** * * Read a condition from the repository. * @param rep The repository to read from * @param id_condition The condition id * @throws KettleException if something goes wrong. */ public Condition(Repository rep, long id_condition) throws KettleException { this(); list = new ArrayList<Condition>(); try { RowMetaAndData r = rep.getCondition(id_condition); if (r!=null) { negate = r.getBoolean("NEGATED", false); operator = getOperator( r.getString("OPERATOR", null) ); id = r.getInteger("ID_CONDITION", -1L); long subids[] = rep.getSubConditionIDs(id); if (subids.length==0) { left_valuename = r.getString("LEFT_NAME", null); function = getFunction( r.getString("CONDITION_FUNCTION", null) ); right_valuename = r.getString("RIGHT_NAME", null); long id_value = r.getInteger("ID_VALUE_RIGHT", -1L); if (id_value>0) { ValueMetaAndData v = new ValueMetaAndData(rep, id_value); right_exact = v; } } else { for (int i=0;i<subids.length;i++) { addCondition( new Condition(rep, subids[i]) ); } } } else { throw new KettleException("Condition with id_condition="+id_condition+" could not be found in the repository"); } } catch(KettleException dbe) { throw new KettleException("Error loading condition from the repository (id_condition="+id_condition+")", dbe); } } public long saveRep(Repository rep) throws KettleException { return saveRep(0L, rep); } public long saveRep(long id_condition_parent, Repository rep) throws KettleException { try { id = rep.insertCondition( id_condition_parent, this ); for (int i=0;i<nrConditions();i++) { Condition subc = getCondition(i); subc.saveRep(getID(), rep); } return getID(); } catch(KettleException dbe) { throw new KettleException("Error saving condition to the repository.", dbe); } } public String[] getUsedFields() { Hashtable<String,String> fields = new Hashtable<String,String>(); getUsedFields(fields); String retval[] = new String[fields.size()]; Enumeration<String> keys = fields.keys(); int i=0; while (keys.hasMoreElements()) { retval[i] = (String)keys.nextElement(); i++; } return retval; } public void getUsedFields(Hashtable<String,String> fields) { if (isAtomic()) { if (getLeftValuename()!=null) fields.put(getLeftValuename(), "-"); if (getRightValuename()!=null) fields.put(getRightValuename(), "-"); } else { for (int i=0;i<nrConditions();i++) { Condition subc = getCondition(i); subc.getUsedFields(fields); } } } }
PDI-352 - Condition now also uses compatible String conversion to make sure it works as before. git-svn-id: 51b39fcfd0d3a6ea7caa15377cad4af13b9d2664@5595 5fb7f6ec-07c1-534a-b4ca-9155e429e800
src/org/pentaho/di/core/Condition.java
PDI-352 - Condition now also uses compatible String conversion to make sure it works as before.
Java
apache-2.0
8a81ac06aa4dd8fe78c910110e9893c3a656c680
0
bd-dev-mobileum/presto,bd-dev-mobileum/presto,bd-dev-mobileum/presto,bd-dev-mobileum/presto,bd-dev-mobileum/presto
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.sql.tree; /** * A reference to an execution engine channel. * <p> * This is used to replace a {@link QualifiedNameReference} with a direct reference to the physical * channel and field to avoid unnecessary lookups in a symbol->channel map during evaluation */ public class InputReference extends Expression { private final int channel; public InputReference(int channel) { this.channel = channel; } public int getChannel() { return channel; } @Override public <R, C> R accept(AstVisitor<R, C> visitor, C context) { return visitor.visitInputReference(this, context); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } InputReference that = (InputReference) o; return channel == that.channel; } @Override public int hashCode() { return channel; } }
presto-parser/src/main/java/com/facebook/presto/sql/tree/InputReference.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.sql.tree; import static com.google.common.base.Preconditions.checkNotNull; /** * A reference to an execution engine channel. * <p> * This is used to replace a {@link QualifiedNameReference} with a direct reference to the physical * channel and field to avoid unnecessary lookups in a symbol->channel map during evaluation */ public class InputReference extends Expression { private final int channel; public InputReference(int channel) { checkNotNull(channel, "channel is null"); this.channel = channel; } public Integer getChannel() { return channel; } @Override public <R, C> R accept(AstVisitor<R, C> visitor, C context) { return visitor.visitInputReference(this, context); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } InputReference that = (InputReference) o; return channel == that.channel; } @Override public int hashCode() { return channel; } }
Fix InputChannel to return a primitive
presto-parser/src/main/java/com/facebook/presto/sql/tree/InputReference.java
Fix InputChannel to return a primitive
Java
apache-2.0
8b04f2796085b6ae31f2092ba01d16c1b446ab75
0
juanitodread/mobile-lab,juanitodread/mobile-lab,juanitodread/mobile-lab
/** * GeoQuiz * * Copyright 2015 juanitodread * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.juanitodread.mobile.android.geoquiz; import org.juanitodread.mobile.android.geoquiz.model.Question; import android.support.v7.app.ActionBarActivity; import android.os.Bundle; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.Button; import android.widget.TextView; import android.widget.Toast; /** * This is the main activity * * @author juanitodread * @version 1.0 * * Feb 18, 2015 */ public class QuizActivity extends ActionBarActivity { private Button mTrueButton; private Button mFalseButton; private Button mNextButton; private TextView mQuestionTextView; private int mCurrentIndex = 0; private Question[ ] mQuestionBank = new Question[ ] { new Question( R.string.question_oceans, true ), new Question( R.string.question_mideast, false ), new Question( R.string.question_africa, false ), new Question( R.string.question_americas, true ), new Question( R.string.question_asia, true ) }; private void updateQuestion( ) { int question = mQuestionBank[ mCurrentIndex ].getQuestion( ); mQuestionTextView.setText( question ); } private void checkAnswer(boolean userPressedTrue) { boolean answerIsTrue = mQuestionBank[mCurrentIndex].isTrueQuestion( ); int msgResId = 0; if(userPressedTrue == answerIsTrue) { msgResId = R.string.correct_toast; } else { msgResId = R.string.incorrect_toast; } Toast.makeText( this, msgResId, Toast.LENGTH_SHORT ).show( ); } @Override protected void onCreate( Bundle savedInstanceState ) { super.onCreate( savedInstanceState ); setContentView( R.layout.activity_quiz ); mQuestionTextView = ( TextView ) findViewById( R.id.question_text_view ); mTrueButton = ( Button ) findViewById( R.id.true_button ); mTrueButton.setOnClickListener( new View.OnClickListener( ) { /* * (non-Javadoc) * * @see android.view.View.OnClickListener#onClick(android.view.View) */ @Override public void onClick( View v ) { checkAnswer( true ); } } ); mFalseButton = ( Button ) findViewById( R.id.false_button ); mFalseButton.setOnClickListener( new View.OnClickListener( ) { /* * (non-Javadoc) * * @see android.view.View.OnClickListener#onClick(android.view.View) */ @Override public void onClick( View v ) { checkAnswer( false ); } } ); mNextButton = ( Button ) findViewById( R.id.next_button ); mNextButton.setOnClickListener( new View.OnClickListener( ) { @Override public void onClick( View v ) { mCurrentIndex = ( mCurrentIndex + 1 ) % mQuestionBank.length; updateQuestion( ); } } ); updateQuestion( ); } @Override public boolean onCreateOptionsMenu( Menu menu ) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater( ).inflate( R.menu.quiz, menu ); return true; } @Override public boolean onOptionsItemSelected( MenuItem item ) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId( ); if ( id == R.id.action_settings ) { return true; } return super.onOptionsItemSelected( item ); } }
android/GeoQuiz/src/org/juanitodread/mobile/android/geoquiz/QuizActivity.java
/** * GeoQuiz * * Copyright 2015 juanitodread * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.juanitodread.mobile.android.geoquiz; import org.juanitodread.mobile.android.geoquiz.model.Question; import android.support.v7.app.ActionBarActivity; import android.os.Bundle; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.Button; import android.widget.TextView; import android.widget.Toast; /** * This is the main activity * * @author juanitodread * @version 1.0 * * Feb 18, 2015 */ public class QuizActivity extends ActionBarActivity { private Button mTrueButton; private Button mFalseButton; private Button mNextButton; private TextView mQuestionTextView; private int mCurrentIndex = 0; private Question[ ] mQuestionBank = new Question[ ] { new Question( R.string.question_oceans, true ), new Question( R.string.question_mideast, false ), new Question( R.string.question_africa, false ), new Question( R.string.question_americas, true ), new Question( R.string.question_asia, true ) }; @Override protected void onCreate( Bundle savedInstanceState ) { super.onCreate( savedInstanceState ); setContentView( R.layout.activity_quiz ); mQuestionTextView = (TextView) findViewById( R.id.question_text_view ); int question = mQuestionBank[mCurrentIndex].getQuestion( ); mQuestionTextView.setText( question ); mTrueButton = ( Button ) findViewById( R.id.true_button ); mTrueButton.setOnClickListener( new View.OnClickListener( ) { @Override public void onClick( View v ) { Toast.makeText( QuizActivity.this, R.string.correct_toast, Toast.LENGTH_SHORT ).show( ); } } ); mFalseButton = ( Button ) findViewById( R.id.false_button ); mFalseButton.setOnClickListener( new View.OnClickListener( ) { /* * (non-Javadoc) * * @see android.view.View.OnClickListener#onClick(android.view.View) */ @Override public void onClick( View v ) { Toast.makeText( QuizActivity.this, R.string.incorrect_toast, Toast.LENGTH_SHORT ).show( ); } } ); } @Override public boolean onCreateOptionsMenu( Menu menu ) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater( ).inflate( R.menu.quiz, menu ); return true; } @Override public boolean onOptionsItemSelected( MenuItem item ) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId( ); if ( id == R.id.action_settings ) { return true; } return super.onOptionsItemSelected( item ); } }
Add multiple questions
android/GeoQuiz/src/org/juanitodread/mobile/android/geoquiz/QuizActivity.java
Add multiple questions
Java
apache-2.0
15a746f5396f55db70f318404e68e93a9a093cad
0
ruz76/piii
package cz.vsb.ruz76.piii; import java.util.Random; /** * Created by ruz76 on 1.3.2017. */ public class Point { private double x; private double y; public Point() { this.x = 0; this.y = 0; } public Point(double x, double y) { this.x = x; this.y = y; } public Point(Extent ex) { this(ex.getMinx(), ex.getMiny(), ex.getMaxx(), ex.getMaxy()); } public Point(double minx, double miny, double maxx, double maxy) { Random r = new Random(); //TODO random in defined extent double r1 = r.nextDouble(); x = minx + (r1 * (maxx - minx)); y = miny + (r1 * (maxy - miny)); } public double getX() { return x; } public void setX(double x) { //TODO test x /*if (x < 12 || x > 19) { return false; } else { this.x = x; return true; }*/ this.x = x; } public double getY() { return y; } public void setY(double y) { this.y = y; } @Override public String toString() { return "POINT(" + x + " " + y + ")"; //return super.toString(); } public String toString(String type) { String output = this.toString(); switch (type.toLowerCase()) { case "wkt": output = this.toString(); break; case "simple": output = x + " " + y; break; case "super": output = super.toString(); break; default: output = this.toString(); break; } return output; } public String toString(String type, String type2) { switch (type.toLowerCase()) { case "wkt": return this.toString(); case "simple": return x + " " + y; case "super": return super.toString(); default: return this.toString(); } } //TODO //Metoda prijme Point //Metoda vrati vzdalenost od this //c^2 = a^2+ b^2 //Math public double getDistance(Point p) { return 0; } }
src/main/java/cz/vsb/ruz76/piii/Point.java
package cz.vsb.ruz76.piii; import java.util.Random; /** * Created by ruz76 on 1.3.2017. */ public class Point { private double x; private double y; public Point() { this.x = 0; this.y = 0; } public Point(double x, double y) { this.x = x; this.y = y; } public Point(Extent ex) { this(ex.getMinx(), ex.getMiny(), ex.getMaxx(), ex.getMaxy()); } public Point(double minx, double miny, double maxx, double maxy) { Random r = new Random(); //TODO random in defined extent double r1 = r.nextDouble(); x = minx + (r1 * (maxx - minx)); y = miny + (r1 * (maxy - miny)); } public double getX() { return x; } public void setX(double x) { //TODO test x /*if (x < 12 || x > 19) { return false; } else { this.x = x; return true; }*/ this.x = x; } public double getY() { return y; } public void setY(double y) { this.y = y; } @Override public String toString() { return "POINT(" + x + " " + y + ")"; //return super.toString(); } public String toString(String type) { String output = this.toString(); switch (type.toLowerCase()) { case "wkt": output = this.toString(); break; case "simple": output = x + " " + y; break; case "super": output = super.toString(); break; default: output = this.toString(); break; } return output; } public String toString(String type, String type2) { switch (type.toLowerCase()) { case "wkt": return this.toString(); case "simple": return x + " " + y; case "super": return super.toString(); default: return this.toString(); } } }
Switch
src/main/java/cz/vsb/ruz76/piii/Point.java
Switch
Java
apache-2.0
03e9b9419204b118babf1f580b2309ca8688052d
0
sakai-mirror/k2,sakai-mirror/k2,sakai-mirror/k2
/* * Licensed to the Sakai Foundation (SF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The SF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package org.sakaiproject.kernel.authz.simple; import com.google.inject.Inject; import net.sf.ehcache.Cache; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.sakaiproject.kernel.api.authz.AccessControlStatement; import org.sakaiproject.kernel.api.jcr.EventRegistration; import org.sakaiproject.kernel.api.jcr.JCRConstants; import org.sakaiproject.kernel.api.jcr.support.JCRNodeFactoryService; import org.sakaiproject.kernel.api.jcr.support.JCRNodeFactoryServiceException; import org.sakaiproject.kernel.api.memory.CacheManagerService; import org.sakaiproject.kernel.api.memory.CacheScope; import org.sakaiproject.kernel.model.AclIndexBean; import java.util.ArrayList; import java.util.List; import javax.jcr.Node; import javax.jcr.PathNotFoundException; import javax.jcr.Property; import javax.jcr.RepositoryException; import javax.jcr.Value; import javax.jcr.observation.Event; import javax.jcr.observation.EventIterator; import javax.jcr.observation.EventListener; import javax.jcr.observation.ObservationManager; import javax.persistence.EntityManager; import javax.persistence.EntityTransaction; import javax.persistence.Query; /** * */ public class AclListener implements EventListener, EventRegistration { private static final Log LOG = LogFactory.getLog(AclListener.class); private final JCRNodeFactoryService jcrNodeFactoryService; private final EntityManager entityManager; private CacheManagerService cacheManagerService; @Inject public AclListener(JCRNodeFactoryService jcrNodeFactoryService, EntityManager entityManager, CacheManagerService cacheManagerService ) { this.jcrNodeFactoryService = jcrNodeFactoryService; this.entityManager = entityManager; this.cacheManagerService = cacheManagerService; } /** * {@inheritDoc} * @see org.sakaiproject.kernel.api.jcr.EventRegistration#register(javax.jcr.observation.ObservationManager) */ public void register(ObservationManager observationManager) throws RepositoryException { observationManager.addEventListener(this, Event.PROPERTY_ADDED | Event.PROPERTY_CHANGED | Event.PROPERTY_REMOVED, "/", false, null, new String[] { JCRConstants.NT_FILE, JCRConstants.NT_FOLDER }, false); } /** * {@inheritDoc} * * @see org.sakaiproject.kernel.jcr.api.JcrContentListener#onEvent(int, * java.lang.String, java.lang.String, java.lang.String) */ public void handleEvent(int type, String userID, String filePath) { try { if ((type == Event.PROPERTY_ADDED || type == Event.PROPERTY_CHANGED || type == Event.PROPERTY_REMOVED)) { ArrayList<AclIndexBean> toCreate = new ArrayList<AclIndexBean>(); ArrayList<AclIndexBean> toUpdate = new ArrayList<AclIndexBean>(); ArrayList<AclIndexBean> toDelete = new ArrayList<AclIndexBean>(); Query query = entityManager .createNamedQuery(AclIndexBean.Queries.FINDBY_PATH); query.setParameter(AclIndexBean.QueryParams.FINDBY_PATH_PATH, filePath); List<?> currentIndex = query.getResultList(); try { Node node = jcrNodeFactoryService.getNode(filePath); Property acl = node.getProperty(JCRConstants.MIX_ACL); for (Value val : acl.getValues()) { AccessControlStatement acs = new JcrAccessControlStatementImpl(val .getString()); switch (type) { case Event.PROPERTY_ADDED: if (inList(acs, currentIndex) == null) { toCreate.add(convert(acs)); } break; case Event.PROPERTY_CHANGED: AclIndexBean indexBean = inList(acs, currentIndex); if (indexBean != null) { toUpdate.add(indexBean); } break; case Event.PROPERTY_REMOVED: if (inList(acs, currentIndex) == null) { toDelete.add(convert(acs)); } break; } } EntityTransaction trans = entityManager.getTransaction(); trans.begin(); try { if (!toCreate.isEmpty()) { for (AclIndexBean bean : toCreate) { entityManager.persist(bean); } } else if (!toUpdate.isEmpty()) { for (AclIndexBean bean : toUpdate) { entityManager.persist(bean); } } else if (!toDelete.isEmpty()) { for (AclIndexBean bean : toDelete) { entityManager.remove(bean); } } trans.commit(); } catch (Exception e) { LOG.error( "Transaction rolled back due to a problem when updating the ACL index: " + e.getMessage(), e); trans.rollback(); } } catch (PathNotFoundException e) { // nothing to care about. this happens when there is no ACL // on the node } catch (RepositoryException e) { // nothing we can do LOG.error(e.getMessage(), e); } catch (JCRNodeFactoryServiceException e) { // nothing we can do LOG.error(e.getMessage(), e); } } } finally { try { cacheManagerService.unbind(CacheScope.REQUEST); } catch ( Exception ex) { // not interested } try { cacheManagerService.unbind(CacheScope.THREAD); } catch ( Exception ex ) { // not interested } } } private AclIndexBean convert(AccessControlStatement acs) { AclIndexBean bean = new AclIndexBean(); bean.setKey(acs.getStatementKey()); bean.setSubject(acs.getSubject()); bean.setGranted(acs.isGranted()); return bean; } private AclIndexBean inList(AccessControlStatement stmt, List<?> list) { AclIndexBean found = null; boolean stmtNotNull = stmt != null; boolean listNotEmpty = list != null && list.size() > 0; if (stmtNotNull && listNotEmpty) { for (Object listBeanO : list) { AclIndexBean listBean = (AclIndexBean) listBeanO; boolean same = true; same &= stmt.getStatementKey().equals(listBean.getKey()); same &= stmt.getSubject().getSubjectType().toString().equals( listBean.getSubjectType()); same &= stmt.getSubject().getSubjectToken().equals( listBean.getSubjectToken()); same &= stmt.getSubject().getPermissionToken().equals( listBean.getPermissionToken()); if (same) { found = listBean; break; } } } return found; } public void onEvent(EventIterator events) { for (; events.hasNext();) { Event e = events.nextEvent(); try { String path = e.getPath(); if (path.endsWith(JCRConstants.MIX_ACL)) { handleEvent(e.getType(), e.getUserID(), path); } } catch (Exception e1) { // TODO Auto-generated catch block e1.printStackTrace(); } } } }
kernel/src/main/java/org/sakaiproject/kernel/authz/simple/AclListener.java
/* * Licensed to the Sakai Foundation (SF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The SF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package org.sakaiproject.kernel.authz.simple; import com.google.inject.Inject; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.sakaiproject.kernel.api.authz.AccessControlStatement; import org.sakaiproject.kernel.api.jcr.EventRegistration; import org.sakaiproject.kernel.api.jcr.JCRConstants; import org.sakaiproject.kernel.api.jcr.support.JCRNodeFactoryService; import org.sakaiproject.kernel.api.jcr.support.JCRNodeFactoryServiceException; import org.sakaiproject.kernel.model.AclIndexBean; import java.util.ArrayList; import java.util.List; import javax.jcr.Node; import javax.jcr.PathNotFoundException; import javax.jcr.Property; import javax.jcr.RepositoryException; import javax.jcr.Value; import javax.jcr.observation.Event; import javax.jcr.observation.EventIterator; import javax.jcr.observation.EventListener; import javax.jcr.observation.ObservationManager; import javax.persistence.EntityManager; import javax.persistence.EntityTransaction; import javax.persistence.Query; /** * */ public class AclListener implements EventListener, EventRegistration { private static final Log LOG = LogFactory.getLog(AclListener.class); private final JCRNodeFactoryService jcrNodeFactoryService; private final EntityManager entityManager; public void register(ObservationManager observationManager) throws RepositoryException { observationManager.addEventListener(this, Event.PROPERTY_ADDED | Event.PROPERTY_CHANGED | Event.PROPERTY_REMOVED, "/", false, null, new String[] { JCRConstants.NT_FILE, JCRConstants.NT_FOLDER }, false); } @Inject public AclListener(JCRNodeFactoryService jcrNodeFactoryService, EntityManager entityManager) { this.jcrNodeFactoryService = jcrNodeFactoryService; this.entityManager = entityManager; } /** * {@inheritDoc} * * @see org.sakaiproject.kernel.jcr.api.JcrContentListener#onEvent(int, * java.lang.String, java.lang.String, java.lang.String) */ public void handleEvent(int type, String userID, String filePath) { if ((type == Event.PROPERTY_ADDED || type == Event.PROPERTY_CHANGED || type == Event.PROPERTY_REMOVED)) { ArrayList<AclIndexBean> toCreate = new ArrayList<AclIndexBean>(); ArrayList<AclIndexBean> toUpdate = new ArrayList<AclIndexBean>(); ArrayList<AclIndexBean> toDelete = new ArrayList<AclIndexBean>(); Query query = entityManager .createNamedQuery(AclIndexBean.Queries.FINDBY_PATH); query.setParameter(AclIndexBean.QueryParams.FINDBY_PATH_PATH, filePath); List<?> currentIndex = query.getResultList(); try { Node node = jcrNodeFactoryService.getNode(filePath); Property acl = node.getProperty(JCRConstants.MIX_ACL); for (Value val : acl.getValues()) { AccessControlStatement acs = new JcrAccessControlStatementImpl(val .getString()); switch (type) { case Event.PROPERTY_ADDED: if (inList(acs, currentIndex) == null) { toCreate.add(convert(acs)); } break; case Event.PROPERTY_CHANGED: AclIndexBean indexBean = inList(acs, currentIndex); if (indexBean != null) { toUpdate.add(indexBean); } break; case Event.PROPERTY_REMOVED: if (inList(acs, currentIndex) == null) { toDelete.add(convert(acs)); } break; } } EntityTransaction trans = entityManager.getTransaction(); trans.begin(); try { if (!toCreate.isEmpty()) { for (AclIndexBean bean : toCreate) { entityManager.persist(bean); } } else if (!toUpdate.isEmpty()) { for (AclIndexBean bean : toUpdate) { entityManager.persist(bean); } } else if (!toDelete.isEmpty()) { for (AclIndexBean bean : toDelete) { entityManager.remove(bean); } } trans.commit(); } catch (Exception e) { LOG.error( "Transaction rolled back due to a problem when updating the ACL index: " + e.getMessage(), e); trans.rollback(); } } catch (PathNotFoundException e) { // nothing to care about. this happens when there is no ACL // on the node } catch (RepositoryException e) { // nothing we can do LOG.error(e.getMessage(), e); } catch (JCRNodeFactoryServiceException e) { // nothing we can do LOG.error(e.getMessage(), e); } } } private AclIndexBean convert(AccessControlStatement acs) { AclIndexBean bean = new AclIndexBean(); bean.setKey(acs.getStatementKey()); bean.setSubject(acs.getSubject()); bean.setGranted(acs.isGranted()); return bean; } private AclIndexBean inList(AccessControlStatement stmt, List<?> list) { AclIndexBean found = null; boolean stmtNotNull = stmt != null; boolean listNotEmpty = list != null && list.size() > 0; if (stmtNotNull && listNotEmpty) { for (Object listBeanO : list) { AclIndexBean listBean = (AclIndexBean) listBeanO; boolean same = true; same &= stmt.getStatementKey().equals(listBean.getKey()); same &= stmt.getSubject().getSubjectType().toString().equals( listBean.getSubjectType()); same &= stmt.getSubject().getSubjectToken().equals( listBean.getSubjectToken()); same &= stmt.getSubject().getPermissionToken().equals( listBean.getPermissionToken()); if (same) { found = listBean; break; } } } return found; } public void onEvent(EventIterator events) { for (; events.hasNext();) { Event e = events.nextEvent(); try { String path = e.getPath(); if (path.endsWith(JCRConstants.MIX_ACL)) { handleEvent(e.getType(), e.getUserID(), path); } } catch (Exception e1) { // TODO Auto-generated catch block e1.printStackTrace(); } } } }
KERN-84 Fixed second potential leak. git-svn-id: 81ed41d7d168891742cba5e65a82c2d517ef9008@57660 fdecad78-55fc-0310-b1b2-d7d25cf747c9
kernel/src/main/java/org/sakaiproject/kernel/authz/simple/AclListener.java
KERN-84
Java
apache-2.0
a535513c6aed2d7fd8a56b56301e509c8b3e9609
0
zmGitHub/cordova-plugin-email-composer,Cube4Dev/cordova-plugin-email-composer,kinfkong/cordova-plugin-email-composer,lukaslisowski/cordova-plugin-email-composer,Telerik-Verified-Plugins/EmailComposer,zmGitHub/cordova-plugin-email-composer,wegewerk-mdt/cordova-plugin-email-composer,katzer/cordova-plugin-email-composer,Telerik-Verified-Plugins/EmailComposer,TylerAldrich/cordova-plugin-email-composer,ebulay/cordova-plugin-email-composer,loic/cordova-plugin-email-composer,sdharnasi/Email-Composer,sportstech/cordova-plugin-email-composer,ebulay/cordova-plugin-email-composer,hypery2k/cordova-email-plugin,hypery2k/cordova-email-plugin,rhaker/cordova-plugin-email-composer,hypery2k/cordova-email-plugin,sdharnasi/Email-Composer,TylerAldrich/cordova-plugin-email-composer,blocktrail/cordova-plugin-email-composer,blocktrail/cordova-plugin-email-composer,lukaslisowski/cordova-plugin-email-composer,loic/cordova-plugin-email-composer,rhaker/cordova-plugin-email-composer,DispatchMe/cordova-plugin-email-composer,Vertafore/cordova-plugin-email-composer,katzer/cordova-plugin-email-composer,Cube4Dev/cordova-plugin-email-composer,DispatchMe/cordova-plugin-email-composer,wegewerk-mdt/cordova-plugin-email-composer,sportstech/cordova-plugin-email-composer,kinfkong/cordova-plugin-email-composer,Vertafore/cordova-plugin-email-composer
/** * EmailComposer.java * Cordova Email Composition Plugin * * Created by Sebastian Katzer (github.com/katzer) on 16/08/2013. * Copyright 2013 Sebastian Katzer. All rights reserved. * GPL v2 licensed */ package de.appplant.cordova.plugin; import java.io.File; import java.util.ArrayList; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import android.content.Intent; import android.net.Uri; import android.text.Html; import org.apache.cordova.CordovaPlugin; import org.apache.cordova.CallbackContext; import org.apache.cordova.PluginResult; public class EmailComposer extends CordovaPlugin { private CallbackContext ctx; @Override public boolean execute (String action, JSONArray args, CallbackContext callbackContext) throws JSONException { // Eine E-Mail soll versendet werden if ("open".equals(action)) { open(args, callbackContext); return true; } // Es soll überprüft werden, ob ein Dienst zum Versenden der E-Mail zur Verfügung steht if ("isServiceAvailable".equals(action)) { isServiceAvailable(callbackContext); return true; } // Returning false results in a "MethodNotFound" error. return false; } /** * Überprüft, ob Emails versendet werden können. */ private void isServiceAvailable (CallbackContext ctx) { Boolean available = this.isEmailAccountConfigured(); PluginResult result = new PluginResult(PluginResult.Status.OK, available); ctx.sendPluginResult(result); } /** * Öffnet den Email-Kontroller mit vorausgefüllten Daten. */ private void open (JSONArray args, CallbackContext ctx) throws JSONException { JSONObject properties = args.getJSONObject(0); Intent draft = this.getDraftWithProperties(properties); this.ctx = ctx; this.openDraft(draft); } /** * Erstellt den ViewController für Mails und fügt die übergebenen Eigenschaften ein. * * @param {JSONObject} params (Subject, Body, Recipients, ...) */ private Intent getDraftWithProperties (JSONObject params) throws JSONException { Intent mail = new Intent(android.content.Intent.ACTION_SEND); if (params.has("subject")) this.setSubject(params.getString("subject"), mail); if (params.has("body")) this.setBody(params.getString("body"), params.optBoolean("isHtml"), mail); if (params.has("recipients")) this.setRecipients(params.getJSONArray("recipients"), mail); if (params.has("ccRecipients")) this.setCcRecipients(params.getJSONArray("ccRecipients"), mail); if (params.has("bccRecipients")) this.setBccRecipients(params.getJSONArray("bccRecipients"), mail); if (params.has("attachments")) this.setAttachments(params.getJSONArray("attachments"), mail); mail.setType("application/octet-stream"); return mail; } /** * Zeigt den ViewController zum Versenden/Bearbeiten der Mail an. */ private void openDraft (Intent draft) { this.cordova.startActivityForResult(this, Intent.createChooser(draft, "Select Email app"), 0); } /** * Setzt den Subject der Mail. */ private void setSubject (String subject, Intent draft) { draft.putExtra(android.content.Intent.EXTRA_SUBJECT, subject); } /** * Setzt den Body der Mail. */ private void setBody (String body, Boolean isHTML, Intent draft) { if (isHTML) { draft.putExtra(android.content.Intent.EXTRA_TEXT, Html.fromHtml(body)); draft.setType("text/html"); } else { draft.putExtra(android.content.Intent.EXTRA_TEXT, body); draft.setType("text/plain"); } } /** * Setzt die Empfänger der Mail. */ private void setRecipients (JSONArray recipients, Intent draft) throws JSONException { String[] receivers = new String[recipients.length()]; for (int i = 0; i < recipients.length(); i++) { receivers[i] = recipients.getString(i); } draft.putExtra(android.content.Intent.EXTRA_EMAIL, receivers); } /** * Setzt die CC-Empfänger der Mail. */ private void setCcRecipients (JSONArray ccRecipients, Intent draft) throws JSONException { String[] receivers = new String[ccRecipients.length()]; for (int i = 0; i < ccRecipients.length(); i++) { receivers[i] = ccRecipients.getString(i); } draft.putExtra(android.content.Intent.EXTRA_CC, receivers); } /** * Setzt die BCC-Empfänger der Mail. */ private void setBccRecipients (JSONArray bccRecipients, Intent draft) throws JSONException { String[] receivers = new String[bccRecipients.length()]; for (int i = 0; i < bccRecipients.length(); i++) { receivers[i] = bccRecipients.getString(i); } draft.putExtra(android.content.Intent.EXTRA_BCC, receivers); } /** * Fügt die Anhände zur Mail hinzu. * Convert from paths to Android friendly Parcelable Uri's */ private void setAttachments (JSONArray attachments, Intent draft) throws JSONException { ArrayList<Uri> uris = new ArrayList<Uri>(); for (int i = 0; i < attachments.length(); i++) { File file = new File(attachments.getString(i)); if (file.exists()) { Uri uri = Uri.fromFile(file); uris.add(uri); } } draft.putParcelableArrayListExtra(Intent.EXTRA_STREAM, uris); } /** * Gibt an, ob es eine Anwendung gibt, welche E-Mails versenden kann. */ private Boolean isEmailAccountConfigured () { Intent intent = new Intent(Intent.ACTION_SENDTO, Uri.fromParts("mailto","[email protected]", null)); Boolean available = cordova.getActivity().getPackageManager().queryIntentActivities(intent, 0).size() > 1; return available; } @Override public void onActivityResult(int requestCode, int resultCode, Intent intent) { super.onActivityResult(requestCode, resultCode, intent); if (this.isEmailAccountConfigured()) { this.ctx.success(); } else { this.ctx.success(4); } } }
src/android/EmailComposer.java
/** * EmailComposer.java * Cordova Email Composition Plugin * * Created by Sebastian Katzer (github.com/katzer) on 16/08/2013. * Copyright 2013 Sebastian Katzer. All rights reserved. * GPL v2 licensed */ package de.appplant.cordova.plugin; import java.io.File; import java.util.ArrayList; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import android.content.Intent; import android.net.Uri; import android.text.Html; import org.apache.cordova.CordovaPlugin; import org.apache.cordova.CallbackContext; import org.apache.cordova.PluginResult; public class EmailComposer extends CordovaPlugin { private CallbackContext ctx; @Override public boolean execute (String action, JSONArray args, CallbackContext callbackContext) throws JSONException { // Eine E-Mail soll versendet werden if ("open".equals(action)) { open(args, callbackContext); return true; } // Es soll überprüft werden, ob ein Dienst zum Versenden der E-Mail zur Verfügung steht if ("isServiceAvailable".equals(action)) { isServiceAvailable(callbackContext); return true; } // Returning false results in a "MethodNotFound" error. return false; } /** * Überprüft, ob Emails versendet werden können. */ private void isServiceAvailable (CallbackContext ctx) { Boolean available = this.isEmailAccountConfigured(); PluginResult result = new PluginResult(PluginResult.Status.OK, available); ctx.sendPluginResult(result); } /** * Öffnet den Email-Kontroller mit vorausgefüllten Daten. */ private void open (JSONArray args, CallbackContext ctx) throws JSONException { JSONObject properties = args.getJSONObject(0); Intent draft = this.getDraftWithProperties(properties); this.ctx = ctx; this.openDraft(draft); } /** * Erstellt den ViewController für Mails und fügt die übergebenen Eigenschaften ein. * * @param {JSONObject} params (Subject, Body, Recipients, ...) */ private Intent getDraftWithProperties (JSONObject params) throws JSONException { Intent mail = new Intent(android.content.Intent.ACTION_SEND); if (params.has("subject")) this.setSubject(params.getString("subject"), mail); if (params.has("body")) this.setBody(params.getString("body"), params.optBoolean("isHtml"), mail); if (params.has("recipients")) this.setRecipients(params.getJSONArray("recipients"), mail); if (params.has("ccRecipients")) this.setCcRecipients(params.getJSONArray("ccRecipients"), mail); if (params.has("bccRecipients")) this.setBccRecipients(params.getJSONArray("bccRecipients"), mail); if (params.has("attachments")) this.setAttachments(params.getJSONArray("attachments"), mail); return mail; } /** * Zeigt den ViewController zum Versenden/Bearbeiten der Mail an. */ private void openDraft (Intent draft) { this.cordova.startActivityForResult(this, Intent.createChooser(draft, "Select Email app"), 0); } /** * Setzt den Subject der Mail. */ private void setSubject (String subject, Intent draft) { draft.putExtra(android.content.Intent.EXTRA_SUBJECT, subject); } /** * Setzt den Body der Mail. */ private void setBody (String body, Boolean isHTML, Intent draft) { if (isHTML) { draft.putExtra(android.content.Intent.EXTRA_TEXT, Html.fromHtml(body)); draft.setType("text/html"); } else { draft.putExtra(android.content.Intent.EXTRA_TEXT, body); draft.setType("text/plain"); } } /** * Setzt die Empfänger der Mail. */ private void setRecipients (JSONArray recipients, Intent draft) throws JSONException { String[] receivers = new String[recipients.length()]; for (int i = 0; i < recipients.length(); i++) { receivers[i] = recipients.getString(i); } draft.putExtra(android.content.Intent.EXTRA_EMAIL, receivers); } /** * Setzt die CC-Empfänger der Mail. */ private void setCcRecipients (JSONArray ccRecipients, Intent draft) throws JSONException { String[] receivers = new String[ccRecipients.length()]; for (int i = 0; i < ccRecipients.length(); i++) { receivers[i] = ccRecipients.getString(i); } draft.putExtra(android.content.Intent.EXTRA_CC, receivers); } /** * Setzt die BCC-Empfänger der Mail. */ private void setBccRecipients (JSONArray bccRecipients, Intent draft) throws JSONException { String[] receivers = new String[bccRecipients.length()]; for (int i = 0; i < bccRecipients.length(); i++) { receivers[i] = bccRecipients.getString(i); } draft.putExtra(android.content.Intent.EXTRA_BCC, receivers); } /** * Fügt die Anhände zur Mail hinzu. * Convert from paths to Android friendly Parcelable Uri's */ private void setAttachments (JSONArray attachments, Intent draft) throws JSONException { ArrayList<Uri> uris = new ArrayList<Uri>(); for (int i = 0; i < attachments.length(); i++) { File file = new File(attachments.getString(i)); if (file.exists()) { Uri uri = Uri.fromFile(file); uris.add(uri); } } draft.putParcelableArrayListExtra(Intent.EXTRA_STREAM, uris); } /** * Gibt an, ob es eine Anwendung gibt, welche E-Mails versenden kann. */ private Boolean isEmailAccountConfigured () { Intent intent = new Intent(Intent.ACTION_SENDTO, Uri.fromParts("mailto","[email protected]", null)); Boolean available = cordova.getActivity().getPackageManager().queryIntentActivities(intent, 0).size() > 1; return available; } @Override public void onActivityResult(int requestCode, int resultCode, Intent intent) { super.onActivityResult(requestCode, resultCode, intent); if (this.isEmailAccountConfigured()) { this.ctx.success(); } else { this.ctx.success(4); } } }
Suddenly the send action is not enough. You have to specify the mime type too.
src/android/EmailComposer.java
Suddenly the send action is not enough. You have to specify the mime type too.
Java
apache-2.0
67d1f80901fbf6b54d0de05ec3f24ea8693973a8
0
NitorCreations/willow,NitorCreations/willow,NitorCreations/willow,NitorCreations/willow,NitorCreations/willow
package com.nitorcreations.willow.utils; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.lang.reflect.Field; import java.net.Proxy; import java.net.URI; import java.net.URISyntaxException; import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import org.junit.Test; import com.btr.proxy.selector.pac.PacProxySelector; import com.btr.proxy.selector.pac.TestUtil; public class ProxyUtilsTest { @Test public void testProxyMatch() { assertFalse(ProxyUtils.noProxyMatches("foo.bar.com", "localhost,127.0.0.1,localaddress,.localdomain.com")); assertTrue(ProxyUtils.noProxyMatches("localhost", "localhost,127.0.0.1,localaddress,.localdomain.com")); assertTrue(ProxyUtils.noProxyMatches("localaddress", "localhost,127.0.0.1,localaddress,.localdomain.com")); assertTrue(ProxyUtils.noProxyMatches("localaddress.localdomain.com", "localhost,127.0.0.1,localaddress,.localdomain.com")); assertTrue(ProxyUtils.noProxyMatches(".localdomain.com", "localhost,127.0.0.1,localaddress,.localdomain.com")); } @Test public void testSystemProxyForClasspath() throws URISyntaxException { String old = System.getProperty(ProxyUtils.USE_SYSTEMPROXIES); try { System.setProperty(ProxyUtils.USE_SYSTEMPROXIES, "true"); List<Proxy> ret = ProxyUtils.resolveSystemProxy(new URI("classpath:foo")); assertTrue("Proxy for classpath should be null", ret == null); } finally { if (old != null) { System.setProperty(ProxyUtils.USE_SYSTEMPROXIES, old); } else { System.getProperties().remove(ProxyUtils.USE_SYSTEMPROXIES); } } } @Test public void testProxyAutoconf() throws URISyntaxException { Map<String, String> oldEnv = new LinkedHashMap<String, String>(System.getenv()); Map<String, String> newEnv = new LinkedHashMap<String, String>(oldEnv); newEnv.put("autoconf_proxy", "file:src/test/resources/test1.pac"); try { setEnv(newEnv); List<Proxy> result = ProxyUtils.resolveSystemProxy(TestUtil.HTTP_TEST_URI); assertTrue(PacProxySelector.isEnabled()); assertTrue("Result should be non null and not empty", result != null && !result.isEmpty()); assertEquals("Result should have the test proxy", TestUtil.HTTP_TEST_PROXY, result.get(0)); } finally { setEnv(oldEnv); } } protected static void setEnv(Map<String, String> newenv) { try { Class<?> processEnvironmentClass = Class.forName("java.lang.ProcessEnvironment"); Field theEnvironmentField = processEnvironmentClass.getDeclaredField("theEnvironment"); theEnvironmentField.setAccessible(true); Map<String, String> env = (Map<String, String>) theEnvironmentField.get(null); env.putAll(newenv); Field theCaseInsensitiveEnvironmentField = processEnvironmentClass.getDeclaredField("theCaseInsensitiveEnvironment"); theCaseInsensitiveEnvironmentField.setAccessible(true); Map<String, String> cienv = (Map<String, String>) theCaseInsensitiveEnvironmentField.get(null); cienv.putAll(newenv); } catch (NoSuchFieldException e) { try { Class[] classes = Collections.class.getDeclaredClasses(); Map<String, String> env = System.getenv(); for(Class cl : classes) { if("java.util.Collections$UnmodifiableMap".equals(cl.getName())) { Field field = cl.getDeclaredField("m"); field.setAccessible(true); Object obj = field.get(env); Map<String, String> map = (Map<String, String>) obj; map.clear(); map.putAll(newenv); } } } catch (Exception e2) { e2.printStackTrace(); } } catch (Exception e1) { e1.printStackTrace(); } } }
willow-utils/src/test/java/com/nitorcreations/willow/utils/ProxyUtilsTest.java
package com.nitorcreations.willow.utils; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.lang.reflect.Field; import java.net.Proxy; import java.net.URI; import java.net.URISyntaxException; import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import org.junit.Test; import com.btr.proxy.selector.pac.PacProxySelector; import com.btr.proxy.selector.pac.TestUtil; public class ProxyUtilsTest { @Test public void testProxyMatch() { assertFalse(ProxyUtils.noProxyMatches("foo.bar.com", "localhost,127.0.0.1,localaddress,.localdomain.com")); assertTrue(ProxyUtils.noProxyMatches("localhost", "localhost,127.0.0.1,localaddress,.localdomain.com")); assertTrue(ProxyUtils.noProxyMatches("localaddress", "localhost,127.0.0.1,localaddress,.localdomain.com")); assertTrue(ProxyUtils.noProxyMatches("localaddress.localdomain.com", "localhost,127.0.0.1,localaddress,.localdomain.com")); assertTrue(ProxyUtils.noProxyMatches(".localdomain.com", "localhost,127.0.0.1,localaddress,.localdomain.com")); } @Test public void testSystemProxyForClasspath() throws URISyntaxException { String old = System.getProperty(ProxyUtils.USE_SYSTEMPROXIES); try { System.setProperty(ProxyUtils.USE_SYSTEMPROXIES, "true"); List<Proxy> ret = ProxyUtils.resolveSystemProxy(new URI("classpath:foo")); assertTrue("Proxy for classpath should be null", ret == null); } finally { if (old != null) { System.setProperty(ProxyUtils.USE_SYSTEMPROXIES, old); } else { System.getProperties().remove(ProxyUtils.USE_SYSTEMPROXIES); } } } @Test public void testProxyAutoconf() throws URISyntaxException { Map<String, String> oldEnv = new LinkedHashMap<String, String>(System.getenv()); Map<String, String> newEnv = new LinkedHashMap<String, String>(oldEnv); newEnv.put("autoconf_proxy", "file:src/test/resources/test1.pac"); try { setEnv(newEnv); List<Proxy> result = ProxyUtils.resolveSystemProxy(TestUtil.HTTP_TEST_URI); assertTrue(PacProxySelector.isEnabled()); assertEquals(TestUtil.HTTP_TEST_PROXY, result.get(0)); } finally { setEnv(oldEnv); } } protected static void setEnv(Map<String, String> newenv) { try { Class<?> processEnvironmentClass = Class.forName("java.lang.ProcessEnvironment"); Field theEnvironmentField = processEnvironmentClass.getDeclaredField("theEnvironment"); theEnvironmentField.setAccessible(true); Map<String, String> env = (Map<String, String>) theEnvironmentField.get(null); env.putAll(newenv); Field theCaseInsensitiveEnvironmentField = processEnvironmentClass.getDeclaredField("theCaseInsensitiveEnvironment"); theCaseInsensitiveEnvironmentField.setAccessible(true); Map<String, String> cienv = (Map<String, String>) theCaseInsensitiveEnvironmentField.get(null); cienv.putAll(newenv); } catch (NoSuchFieldException e) { try { Class[] classes = Collections.class.getDeclaredClasses(); Map<String, String> env = System.getenv(); for(Class cl : classes) { if("java.util.Collections$UnmodifiableMap".equals(cl.getName())) { Field field = cl.getDeclaredField("m"); field.setAccessible(true); Object obj = field.get(env); Map<String, String> map = (Map<String, String>) obj; map.clear(); map.putAll(newenv); } } } catch (Exception e2) { e2.printStackTrace(); } } catch (Exception e1) { e1.printStackTrace(); } } }
Improve asserts
willow-utils/src/test/java/com/nitorcreations/willow/utils/ProxyUtilsTest.java
Improve asserts
Java
apache-2.0
5369f48e90ea0bbf1ce4c2b935f80ff5289c2c37
0
nicoben/pentaho-kettle,gretchiemoran/pentaho-kettle,lgrill-pentaho/pentaho-kettle,SergeyTravin/pentaho-kettle,IvanNikolaychuk/pentaho-kettle,drndos/pentaho-kettle,YuryBY/pentaho-kettle,drndos/pentaho-kettle,zlcnju/kettle,HiromuHota/pentaho-kettle,CapeSepias/pentaho-kettle,pymjer/pentaho-kettle,pedrofvteixeira/pentaho-kettle,mattyb149/pentaho-kettle,jbrant/pentaho-kettle,tkafalas/pentaho-kettle,marcoslarsen/pentaho-kettle,yshakhau/pentaho-kettle,dkincade/pentaho-kettle,TatsianaKasiankova/pentaho-kettle,SergeyTravin/pentaho-kettle,brosander/pentaho-kettle,denisprotopopov/pentaho-kettle,cjsonger/pentaho-kettle,gretchiemoran/pentaho-kettle,pymjer/pentaho-kettle,airy-ict/pentaho-kettle,MikhailHubanau/pentaho-kettle,GauravAshara/pentaho-kettle,AliaksandrShuhayeu/pentaho-kettle,EcoleKeine/pentaho-kettle,codek/pentaho-kettle,mkambol/pentaho-kettle,kurtwalker/pentaho-kettle,airy-ict/pentaho-kettle,stepanovdg/pentaho-kettle,marcoslarsen/pentaho-kettle,birdtsai/pentaho-kettle,pentaho/pentaho-kettle,akhayrutdinov/pentaho-kettle,alina-ipatina/pentaho-kettle,cjsonger/pentaho-kettle,Advent51/pentaho-kettle,DFieldFL/pentaho-kettle,wseyler/pentaho-kettle,denisprotopopov/pentaho-kettle,EcoleKeine/pentaho-kettle,ViswesvarSekar/pentaho-kettle,mdamour1976/pentaho-kettle,pedrofvteixeira/pentaho-kettle,aminmkhan/pentaho-kettle,marcoslarsen/pentaho-kettle,codek/pentaho-kettle,lgrill-pentaho/pentaho-kettle,ddiroma/pentaho-kettle,stepanovdg/pentaho-kettle,ccaspanello/pentaho-kettle,andrei-viaryshka/pentaho-kettle,tkafalas/pentaho-kettle,pymjer/pentaho-kettle,IvanNikolaychuk/pentaho-kettle,wseyler/pentaho-kettle,EcoleKeine/pentaho-kettle,AlexanderBuloichik/pentaho-kettle,nicoben/pentaho-kettle,ivanpogodin/pentaho-kettle,AliaksandrShuhayeu/pentaho-kettle,GauravAshara/pentaho-kettle,DFieldFL/pentaho-kettle,rfellows/pentaho-kettle,mbatchelor/pentaho-kettle,graimundo/pentaho-kettle,matrix-stone/pentaho-kettle,cjsonger/pentaho-kettle,matrix-stone/pentaho-kettle,rmansoor/pentaho-kettle,emartin-pentaho/pentaho-kettle,AlexanderBuloichik/pentaho-kettle,kurtwalker/pentaho-kettle,skofra0/pentaho-kettle,e-cuellar/pentaho-kettle,SergeyTravin/pentaho-kettle,pavel-sakun/pentaho-kettle,rmansoor/pentaho-kettle,stevewillcock/pentaho-kettle,jbrant/pentaho-kettle,sajeetharan/pentaho-kettle,yshakhau/pentaho-kettle,birdtsai/pentaho-kettle,birdtsai/pentaho-kettle,brosander/pentaho-kettle,andrei-viaryshka/pentaho-kettle,pminutillo/pentaho-kettle,nantunes/pentaho-kettle,nanata1115/pentaho-kettle,aminmkhan/pentaho-kettle,roboguy/pentaho-kettle,alina-ipatina/pentaho-kettle,jbrant/pentaho-kettle,rfellows/pentaho-kettle,sajeetharan/pentaho-kettle,dkincade/pentaho-kettle,nanata1115/pentaho-kettle,mattyb149/pentaho-kettle,mkambol/pentaho-kettle,kurtwalker/pentaho-kettle,graimundo/pentaho-kettle,nicoben/pentaho-kettle,cjsonger/pentaho-kettle,HiromuHota/pentaho-kettle,mbatchelor/pentaho-kettle,EcoleKeine/pentaho-kettle,ddiroma/pentaho-kettle,lgrill-pentaho/pentaho-kettle,mbatchelor/pentaho-kettle,airy-ict/pentaho-kettle,e-cuellar/pentaho-kettle,matrix-stone/pentaho-kettle,matrix-stone/pentaho-kettle,ViswesvarSekar/pentaho-kettle,ma459006574/pentaho-kettle,mkambol/pentaho-kettle,codek/pentaho-kettle,YuryBY/pentaho-kettle,mbatchelor/pentaho-kettle,ccaspanello/pentaho-kettle,gretchiemoran/pentaho-kettle,GauravAshara/pentaho-kettle,tmcsantos/pentaho-kettle,matthewtckr/pentaho-kettle,flbrino/pentaho-kettle,rfellows/pentaho-kettle,tmcsantos/pentaho-kettle,mattyb149/pentaho-kettle,denisprotopopov/pentaho-kettle,DFieldFL/pentaho-kettle,zlcnju/kettle,akhayrutdinov/pentaho-kettle,gretchiemoran/pentaho-kettle,zlcnju/kettle,rmansoor/pentaho-kettle,IvanNikolaychuk/pentaho-kettle,Advent51/pentaho-kettle,Advent51/pentaho-kettle,TatsianaKasiankova/pentaho-kettle,pminutillo/pentaho-kettle,sajeetharan/pentaho-kettle,HiromuHota/pentaho-kettle,stepanovdg/pentaho-kettle,kurtwalker/pentaho-kettle,jbrant/pentaho-kettle,ddiroma/pentaho-kettle,ivanpogodin/pentaho-kettle,IvanNikolaychuk/pentaho-kettle,TatsianaKasiankova/pentaho-kettle,ViswesvarSekar/pentaho-kettle,mdamour1976/pentaho-kettle,skofra0/pentaho-kettle,eayoungs/pentaho-kettle,ma459006574/pentaho-kettle,akhayrutdinov/pentaho-kettle,nicoben/pentaho-kettle,MikhailHubanau/pentaho-kettle,HiromuHota/pentaho-kettle,bmorrise/pentaho-kettle,airy-ict/pentaho-kettle,flbrino/pentaho-kettle,ivanpogodin/pentaho-kettle,MikhailHubanau/pentaho-kettle,e-cuellar/pentaho-kettle,dkincade/pentaho-kettle,SergeyTravin/pentaho-kettle,matthewtckr/pentaho-kettle,andrei-viaryshka/pentaho-kettle,hudak/pentaho-kettle,graimundo/pentaho-kettle,flbrino/pentaho-kettle,hudak/pentaho-kettle,tkafalas/pentaho-kettle,codek/pentaho-kettle,drndos/pentaho-kettle,ccaspanello/pentaho-kettle,Advent51/pentaho-kettle,stepanovdg/pentaho-kettle,nantunes/pentaho-kettle,stevewillcock/pentaho-kettle,skofra0/pentaho-kettle,mattyb149/pentaho-kettle,skofra0/pentaho-kettle,dkincade/pentaho-kettle,CapeSepias/pentaho-kettle,mdamour1976/pentaho-kettle,graimundo/pentaho-kettle,eayoungs/pentaho-kettle,nanata1115/pentaho-kettle,flbrino/pentaho-kettle,nantunes/pentaho-kettle,AliaksandrShuhayeu/pentaho-kettle,zlcnju/kettle,GauravAshara/pentaho-kettle,yshakhau/pentaho-kettle,denisprotopopov/pentaho-kettle,eayoungs/pentaho-kettle,pedrofvteixeira/pentaho-kettle,pentaho/pentaho-kettle,pavel-sakun/pentaho-kettle,roboguy/pentaho-kettle,drndos/pentaho-kettle,YuryBY/pentaho-kettle,e-cuellar/pentaho-kettle,brosander/pentaho-kettle,pavel-sakun/pentaho-kettle,pedrofvteixeira/pentaho-kettle,pavel-sakun/pentaho-kettle,marcoslarsen/pentaho-kettle,brosander/pentaho-kettle,yshakhau/pentaho-kettle,sajeetharan/pentaho-kettle,roboguy/pentaho-kettle,birdtsai/pentaho-kettle,YuryBY/pentaho-kettle,ccaspanello/pentaho-kettle,tmcsantos/pentaho-kettle,emartin-pentaho/pentaho-kettle,pymjer/pentaho-kettle,ma459006574/pentaho-kettle,alina-ipatina/pentaho-kettle,rmansoor/pentaho-kettle,AlexanderBuloichik/pentaho-kettle,TatsianaKasiankova/pentaho-kettle,lgrill-pentaho/pentaho-kettle,matthewtckr/pentaho-kettle,mkambol/pentaho-kettle,tmcsantos/pentaho-kettle,eayoungs/pentaho-kettle,pminutillo/pentaho-kettle,pentaho/pentaho-kettle,wseyler/pentaho-kettle,pminutillo/pentaho-kettle,tkafalas/pentaho-kettle,AliaksandrShuhayeu/pentaho-kettle,stevewillcock/pentaho-kettle,roboguy/pentaho-kettle,emartin-pentaho/pentaho-kettle,matthewtckr/pentaho-kettle,akhayrutdinov/pentaho-kettle,DFieldFL/pentaho-kettle,bmorrise/pentaho-kettle,hudak/pentaho-kettle,aminmkhan/pentaho-kettle,CapeSepias/pentaho-kettle,stevewillcock/pentaho-kettle,ma459006574/pentaho-kettle,ddiroma/pentaho-kettle,pentaho/pentaho-kettle,CapeSepias/pentaho-kettle,AlexanderBuloichik/pentaho-kettle,ivanpogodin/pentaho-kettle,hudak/pentaho-kettle,nantunes/pentaho-kettle,alina-ipatina/pentaho-kettle,mdamour1976/pentaho-kettle,bmorrise/pentaho-kettle,bmorrise/pentaho-kettle,aminmkhan/pentaho-kettle,emartin-pentaho/pentaho-kettle,wseyler/pentaho-kettle,ViswesvarSekar/pentaho-kettle,nanata1115/pentaho-kettle
/********************************************************************** ** ** ** This code belongs to the KETTLE project. ** ** ** ** Kettle, from version 2.2 on, is released into the public domain ** ** under the Lesser GNU Public License (LGPL). ** ** ** ** For more details, please read the document LICENSE.txt, included ** ** in this project ** ** ** ** http://www.kettle.be ** ** [email protected] ** ** ** **********************************************************************/ package be.ibridge.kettle.spoon; import java.io.DataOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.Properties; import org.eclipse.jface.dialogs.MessageDialog; import org.eclipse.jface.dialogs.MessageDialogWithToggle; import org.eclipse.jface.wizard.Wizard; import org.eclipse.jface.wizard.WizardDialog; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CTabFolder; import org.eclipse.swt.custom.CTabItem; import org.eclipse.swt.custom.SashForm; import org.eclipse.swt.dnd.Clipboard; import org.eclipse.swt.dnd.DND; import org.eclipse.swt.dnd.DragSource; import org.eclipse.swt.dnd.DragSourceEvent; import org.eclipse.swt.dnd.DragSourceListener; import org.eclipse.swt.dnd.Transfer; import org.eclipse.swt.events.DisposeEvent; import org.eclipse.swt.events.DisposeListener; import org.eclipse.swt.events.KeyAdapter; import org.eclipse.swt.events.KeyEvent; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.events.ShellAdapter; import org.eclipse.swt.events.ShellEvent; import org.eclipse.swt.graphics.Color; import org.eclipse.swt.graphics.Cursor; import org.eclipse.swt.graphics.Image; import org.eclipse.swt.graphics.ImageData; import org.eclipse.swt.graphics.RGB; import org.eclipse.swt.layout.FillLayout; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.printing.Printer; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.FileDialog; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.Menu; import org.eclipse.swt.widgets.MenuItem; import org.eclipse.swt.widgets.MessageBox; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.ToolBar; import org.eclipse.swt.widgets.ToolItem; import org.eclipse.swt.widgets.Tree; import org.eclipse.swt.widgets.TreeItem; import org.w3c.dom.Document; import org.w3c.dom.Node; import be.ibridge.kettle.core.Const; import be.ibridge.kettle.core.DragAndDropContainer; import be.ibridge.kettle.core.GUIResource; import be.ibridge.kettle.core.KettleVariables; import be.ibridge.kettle.core.LogWriter; import be.ibridge.kettle.core.NotePadMeta; import be.ibridge.kettle.core.Point; import be.ibridge.kettle.core.PrintSpool; import be.ibridge.kettle.core.Props; import be.ibridge.kettle.core.Row; import be.ibridge.kettle.core.SourceToTargetMapping; import be.ibridge.kettle.core.TransAction; import be.ibridge.kettle.core.WindowProperty; import be.ibridge.kettle.core.XMLHandler; import be.ibridge.kettle.core.XMLHandlerCache; import be.ibridge.kettle.core.XMLTransfer; import be.ibridge.kettle.core.clipboard.ImageDataTransfer; import be.ibridge.kettle.core.database.Database; import be.ibridge.kettle.core.database.DatabaseMeta; import be.ibridge.kettle.core.dialog.CheckResultDialog; import be.ibridge.kettle.core.dialog.DatabaseDialog; import be.ibridge.kettle.core.dialog.DatabaseExplorerDialog; import be.ibridge.kettle.core.dialog.EnterMappingDialog; import be.ibridge.kettle.core.dialog.EnterOptionsDialog; import be.ibridge.kettle.core.dialog.EnterSearchDialog; import be.ibridge.kettle.core.dialog.EnterStringsDialog; import be.ibridge.kettle.core.dialog.ErrorDialog; import be.ibridge.kettle.core.dialog.PreviewRowsDialog; import be.ibridge.kettle.core.dialog.SQLEditor; import be.ibridge.kettle.core.dialog.SQLStatementsDialog; import be.ibridge.kettle.core.dialog.ShowBrowserDialog; import be.ibridge.kettle.core.dialog.Splash; import be.ibridge.kettle.core.exception.KettleDatabaseException; import be.ibridge.kettle.core.exception.KettleException; import be.ibridge.kettle.core.reflection.StringSearchResult; import be.ibridge.kettle.core.util.EnvUtil; import be.ibridge.kettle.core.value.Value; import be.ibridge.kettle.core.wizards.createdatabase.CreateDatabaseWizard; import be.ibridge.kettle.job.JobEntryLoader; import be.ibridge.kettle.pan.CommandLineOption; import be.ibridge.kettle.repository.PermissionMeta; import be.ibridge.kettle.repository.RepositoriesMeta; import be.ibridge.kettle.repository.Repository; import be.ibridge.kettle.repository.RepositoryDirectory; import be.ibridge.kettle.repository.RepositoryMeta; import be.ibridge.kettle.repository.UserInfo; import be.ibridge.kettle.repository.dialog.RepositoriesDialog; import be.ibridge.kettle.repository.dialog.RepositoryExplorerDialog; import be.ibridge.kettle.repository.dialog.SelectObjectDialog; import be.ibridge.kettle.repository.dialog.UserDialog; import be.ibridge.kettle.spoon.dialog.AnalyseImpactProgressDialog; import be.ibridge.kettle.spoon.dialog.CheckTransProgressDialog; import be.ibridge.kettle.spoon.dialog.GetSQLProgressDialog; import be.ibridge.kettle.spoon.dialog.ShowCreditsDialog; import be.ibridge.kettle.spoon.dialog.TipsDialog; import be.ibridge.kettle.spoon.wizards.CopyTableWizardPage1; import be.ibridge.kettle.spoon.wizards.CopyTableWizardPage2; import be.ibridge.kettle.spoon.wizards.CopyTableWizardPage3; import be.ibridge.kettle.trans.DatabaseImpact; import be.ibridge.kettle.trans.StepLoader; import be.ibridge.kettle.trans.StepPlugin; import be.ibridge.kettle.trans.TransHopMeta; import be.ibridge.kettle.trans.TransMeta; import be.ibridge.kettle.trans.dialog.TransDialog; import be.ibridge.kettle.trans.dialog.TransHopDialog; import be.ibridge.kettle.trans.dialog.TransLoadProgressDialog; import be.ibridge.kettle.trans.dialog.TransSaveProgressDialog; import be.ibridge.kettle.trans.step.BaseStep; import be.ibridge.kettle.trans.step.StepDialogInterface; import be.ibridge.kettle.trans.step.StepMeta; import be.ibridge.kettle.trans.step.StepMetaInterface; import be.ibridge.kettle.trans.step.selectvalues.SelectValuesMeta; import be.ibridge.kettle.trans.step.tableinput.TableInputMeta; import be.ibridge.kettle.trans.step.tableoutput.TableOutputMeta; /** * This class handles the main window of the Spoon graphical transformation editor. * * @author Matt * @since 16-may-2003 * * Add i18n support * import the package:be.ibridge.kettle.i18n.Messages * @modified by vitoelv since 07-Feb-2006 */ public class Spoon { public static final String APP_NAME = Messages.getString("Spoon.Application.Name"); //"Spoon"; private LogWriter log; private Display disp; private Shell shell; private boolean destroy; private SpoonGraph spoongraph; private SpoonLog spoonlog; private SashForm sashform; public CTabFolder tabfolder; public Row variables; /** * These are the arguments that were given at Spoon launch time... */ private String[] arguments; /** * A list of remarks on the current Transformation... */ private ArrayList remarks; /** * A list of impacts of the current transformation on the used databases. */ private ArrayList impact; /** * Indicates whether or not an impact analyses has already run. */ private boolean impactHasRun; private boolean stopped; private Cursor cursor_hourglass, cursor_hand; public Props props; public Repository rep; public TransMeta transMeta; private ToolBar tBar; private Menu msFile; private MenuItem miFileSep3; private MenuItem miEditUndo, miEditRedo; private Tree selectionTree; private TreeItem tiConn, tiHops, tiStep, tiBase, tiPlug; private Tree pluginHistoryTree; private Listener lsNew, lsEdit, lsDupe, lsCopy, lsDel, lsSQL, lsCache, lsExpl; private SelectionAdapter lsEditDef, lsEditSel; public static final String STRING_CONNECTIONS = Messages.getString("Spoon.STRING_CONNECTIONS"); //"Connections"; public static final String STRING_STEPS = Messages.getString("Spoon.STRING_STEPS"); //"Steps"; public static final String STRING_HOPS = Messages.getString("Spoon.STRING_HOPS"); //"Hops"; public static final String STRING_BASE = Messages.getString("Spoon.STRING_BASE"); //"Base step types"; public static final String STRING_PLUGIN = Messages.getString("Spoon.STRING_PLUGIN"); //"Plugin step types"; public static final String STRING_HISTORY = Messages.getString("Spoon.STRING_HISTORY"); //"Step creation history"; private static final String APPL_TITLE = APP_NAME; public KeyAdapter defKeys; public KeyAdapter modKeys; private SpoonHistory spoonhist; private Menu mBar; private Composite tabComp; private SashForm leftSash; public Spoon(LogWriter l, Repository rep) { this(l, null, null, rep); } public Spoon(LogWriter l, Display d, Repository rep) { this(l, d, null, rep); } public Spoon(LogWriter log, Display d, TransMeta ti, Repository rep) { this.log = log; this.rep = rep; if (d!=null) { disp=d; destroy=false; } else { disp=new Display(); destroy=true; } shell=new Shell(disp); shell.setText(APPL_TITLE); FormLayout layout = new FormLayout(); layout.marginWidth = 0; layout.marginHeight = 0; shell.setLayout (layout); // INIT Data structure if (ti==null) { this.transMeta = new TransMeta(); } else { this.transMeta = ti; } if (!Props.isInitialized()) { //log.logDetailed(toString(), "Load properties for Spoon..."); log.logDetailed(toString(),Messages.getString("Spoon.Log.LoadProperties")); Props.init(disp, Props.TYPE_PROPERTIES_SPOON); // things to remember... } props=Props.getInstance(); // Load settings in the props loadSettings(); remarks = new ArrayList(); impact = new ArrayList(); impactHasRun = false; // Clean out every time we start, auto-loading etc, is not a good idea // If they are needed that often, set them in the kettle.properties file // variables = new Row(); // props.setLook(shell); shell.setImage(GUIResource.getInstance().getImageSpoon()); cursor_hourglass = new Cursor(disp, SWT.CURSOR_WAIT); cursor_hand = new Cursor(disp, SWT.CURSOR_HAND); // widgets = new WidgetContainer(); defKeys = new KeyAdapter() { public void keyPressed(KeyEvent e) { // ESC --> Unselect All steps if (e.keyCode == SWT.ESC) { spoongraph.clearSettings(); transMeta.unselectAll(); refreshGraph(); }; // F3 --> createDatabaseWizard if (e.keyCode == SWT.F3) { createDatabaseWizard(); } // F4 --> copyTableWizard if (e.keyCode == SWT.F4) { copyTableWizard(); } // F5 --> refresh if (e.keyCode == SWT.F5) { refreshGraph(); refreshTree(true); } // F6 --> show last impact analyses if (e.keyCode == SWT.F6) { showLastImpactAnalyses(); } // F7 --> show last verify results if (e.keyCode == SWT.F7) { showLastTransCheck(); } // F8 --> show last preview if (e.keyCode == SWT.F8) { spoonlog.showPreview(); } // F9 --> run if (e.keyCode == SWT.F9) { tabfolder.setSelection(1); spoonlog.startstop(); } // F10 --> preview if (e.keyCode == SWT.F10) { spoonlog.preview(); } // F11 --> Verify if (e.keyCode == SWT.F11) { checkTrans(); spoongraph.clearSettings(); } // CTRL-A --> Select All steps if ((int)e.character == 1) { transMeta.selectAll(); }; // CTRL-D --> Disconnect from repository if ((int)e.character == 4) { closeRepository(); spoongraph.clearSettings(); }; // CTRL-E --> Explore the repository if ((int)e.character == 5) { exploreRepository(); spoongraph.clearSettings(); }; // CTRL-F --> Java examination if ((int)e.character == 6 && (( e.stateMask&SWT.CONTROL)!=0) && (( e.stateMask&SWT.ALT)==0) ) { searchMetaData(); spoongraph.clearSettings(); }; // CTRL-I --> Import from XML file && (e.keyCode&SWT.CONTROL)!=0 if ((int)e.character == 9 && (( e.stateMask&SWT.CONTROL)!=0) && (( e.stateMask&SWT.ALT)==0) ) { openFile(true); spoongraph.clearSettings(); }; // CTRL-J --> Get variables if ((int)e.character == 10 && (( e.stateMask&SWT.CONTROL)!=0) && (( e.stateMask&SWT.ALT)==0) ) { getVariables(); spoongraph.clearSettings(); }; // CTRL-N --> new if ((int)e.character == 14) { newFile(); spoongraph.clearSettings(); } // CTRL-O --> open if ((int)e.character == 15) { openFile(false); spoongraph.clearSettings(); } // CTRL-P --> print if ((int)e.character == 16) { printFile(); spoongraph.clearSettings(); } // CTRL-Q --> Impact analyses if ((int)e.character == 17) { analyseImpact(); spoongraph.clearSettings(); } // CTRL-R --> Connect to repository if ((int)e.character == 18) { openRepository(); spoongraph.clearSettings(); }; // CTRL-S --> save if ((int)e.character == 19) { saveFile(); spoongraph.clearSettings(); } // CTRL-T --> transformation if ((int)e.character == 20) { setTrans(); spoongraph.clearSettings(); } // CTRL-Y --> redo action if ((int)e.character == 25) { redoAction(); spoongraph.clearSettings(); } // CTRL-Z --> undo action if ((int)e.character == 26) { spoongraph.clearSettings(); undoAction(); } // CTRL-SHIFT-I --> Copy Transformation Image to clipboard if ((int)e.character == 9 && (( e.stateMask&SWT.CONTROL)!=0) && (( e.stateMask&SWT.ALT)!=0)) { copyTransformationImage(); } // System.out.println("(int)e.character = "+(int)e.character+", keycode = "+e.keyCode+", stateMask="+e.stateMask); } }; modKeys = new KeyAdapter() { public void keyPressed(KeyEvent e) { spoongraph.shift = (e.keyCode == SWT.SHIFT ); spoongraph.control = (e.keyCode == SWT.CONTROL); } public void keyReleased(KeyEvent e) { spoongraph.shift = (e.keyCode == SWT.SHIFT ); spoongraph.control = (e.keyCode == SWT.CONTROL); } }; addBar(); FormData fdBar = new FormData(); fdBar.left = new FormAttachment(0, 0); fdBar.top = new FormAttachment(0, 0); tBar.setLayoutData(fdBar); sashform = new SashForm(shell, SWT.HORIZONTAL); // props.setLook(sashform); FormData fdSash = new FormData(); fdSash.left = new FormAttachment(0, 0); fdSash.top = new FormAttachment(tBar, 0); fdSash.bottom = new FormAttachment(100, 0); fdSash.right = new FormAttachment(100, 0); sashform.setLayoutData(fdSash); addMenu(); addTree(); addTabs(); setTreeImages(); // In case someone dares to press the [X] in the corner ;-) shell.addShellListener( new ShellAdapter() { public void shellClosed(ShellEvent e) { e.doit=quitFile(); } } ); shell.layout(); // Set the shell size, based upon previous time... WindowProperty winprop = props.getScreen(APPL_TITLE); if (winprop!=null) winprop.setShell(shell); else { shell.pack(); shell.setMaximized(true); // Default = maximized! } } /** * Search the transformation meta-data. * */ public void searchMetaData() { EnterSearchDialog esd = new EnterSearchDialog(shell); if (esd.open()) { String filterString = esd.getFilterString(); String filter = filterString; if (filter!=null) filter = filter.toUpperCase(); List stringList = transMeta.getStringList(esd.isSearchingSteps(), esd.isSearchingDatabases(), esd.isSearchingNotes()); ArrayList rows = new ArrayList(); for (int i=0;i<stringList.size();i++) { StringSearchResult result = (StringSearchResult) stringList.get(i); boolean add = Const.isEmpty(filter); if (filter!=null && result.getString().toUpperCase().indexOf(filter)>=0) add=true; if (filter!=null && result.getFieldName().toUpperCase().indexOf(filter)>=0) add=true; if (filter!=null && result.getParentObject().toString().toUpperCase().indexOf(filter)>=0) add=true; if (add) rows.add(result.toRow()); } if (rows.size()!=0) { PreviewRowsDialog prd = new PreviewRowsDialog(shell, SWT.NONE, "String searcher", rows); prd.open(); } else { MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_INFORMATION); mb.setMessage(Messages.getString("Spoon.Dialog.NothingFound.Message")); // Nothing found that matches your criteria mb.setText(Messages.getString("Spoon.Dialog.NothingFound.Title")); // Sorry! mb.open(); } } } public void getVariables() { Properties sp = new Properties(); KettleVariables kettleVariables = KettleVariables.getInstance(); sp.putAll(kettleVariables.getProperties()); sp.putAll(System.getProperties()); List list = transMeta.getUsedVariables(); for (int i=0;i<list.size();i++) { String varName = (String)list.get(i); String varValue = sp.getProperty(varName, ""); System.out.println("variable ["+varName+"] is defined as : "+varValue); if (variables.searchValueIndex(varName)<0) { variables.addValue(new Value(varName, varValue)); } } // Now ask the use for more info on these! EnterStringsDialog esd = new EnterStringsDialog(shell, SWT.NONE, variables); esd.setReadOnly(false); if (esd.open()!=null) { for (int i=0;i<variables.size();i++) { Value varval = variables.getValue(i); if (!Const.isEmpty(varval.getString())) { kettleVariables.setVariable(varval.getName(), varval.getString()); System.out.println("Variable ${"+varval.getName()+"} set to ["+varval.getString()+"] for thread ["+Thread.currentThread()+"]"); } } } } public void clear() { remarks = new ArrayList(); impact = new ArrayList(); impactHasRun = false; transMeta.clear(); XMLHandlerCache.getInstance().clear(); setUndoMenu(); } public void open() { shell.open(); // Shared database entries to load from repository? loadRepositoryObjects(); // What plugins did we use previously? refreshPluginHistory(); // Perhaps the transformation contains elements at startup? if (transMeta.nrSteps()>0 || transMeta.nrDatabases()>0 || transMeta.nrTransHops()>0) { refreshTree(true); // Do a complete refresh then... } transMeta.clearChanged(); // Clear changed: they were artificial (databases loaded, etc.) setShellText(); if (props.showTips()) { TipsDialog tip = new TipsDialog(shell, props); tip.open(); } } public boolean readAndDispatch () { return disp.readAndDispatch(); } /** * @return check whether or not the application was stopped. */ public boolean isStopped() { return stopped; } /** * @param stopped True to stop this application. */ public void setStopped(boolean stopped) { this.stopped = stopped; } /** * @param destroy Whether or not to distroy the display. */ public void setDestroy(boolean destroy) { this.destroy = destroy; } /** * @return Returns whether or not we should distroy the display. */ public boolean doDestroy() { return destroy; } /** * @param arguments The arguments to set. */ public void setArguments(String[] arguments) { this.arguments = arguments; } /** * @return Returns the arguments. */ public String[] getArguments() { return arguments; } public synchronized void dispose() { setStopped(true); cursor_hand.dispose(); cursor_hourglass.dispose(); if (destroy && !disp.isDisposed()) disp.dispose(); } public boolean isDisposed() { return disp.isDisposed(); } public void sleep() { disp.sleep(); } public void addMenu() { if (mBar!=null) { mBar.dispose(); } mBar = new Menu(shell, SWT.BAR); shell.setMenuBar(mBar); // main File menu... MenuItem mFile = new MenuItem(mBar, SWT.CASCADE); //mFile.setText("&File"); mFile.setText(Messages.getString("Spoon.Menu.File") ); msFile = new Menu(shell, SWT.DROP_DOWN); mFile.setMenu(msFile); MenuItem miFileNew = new MenuItem(msFile, SWT.CASCADE); miFileNew.setText(Messages.getString("Spoon.Menu.File.New")); //miFileNew.setText("&New \tCTRL-N"); MenuItem miFileOpen = new MenuItem(msFile, SWT.CASCADE); miFileOpen.setText(Messages.getString("Spoon.Menu.File.Open")); //&Open \tCTRL-O MenuItem miFileImport = new MenuItem(msFile, SWT.CASCADE); miFileImport.setText(Messages.getString("Spoon.Menu.File.Import")); //"&Import from an XML file\tCTRL-I" MenuItem miFileExport = new MenuItem(msFile, SWT.CASCADE); miFileExport.setText(Messages.getString("Spoon.Menu.File.Export")); //&Export to an XML file MenuItem miFileSave = new MenuItem(msFile, SWT.CASCADE); miFileSave.setText(Messages.getString("Spoon.Menu.File.Save")); //"&Save \tCTRL-S" MenuItem miFileSaveAs = new MenuItem(msFile, SWT.CASCADE); miFileSaveAs.setText(Messages.getString("Spoon.Menu.File.SaveAs")); //"Save &as..." new MenuItem(msFile, SWT.SEPARATOR); MenuItem miFilePrint = new MenuItem(msFile, SWT.CASCADE); miFilePrint.setText(Messages.getString("Spoon.Menu.File.Print")); //"&Print \tCTRL-P" new MenuItem(msFile, SWT.SEPARATOR); MenuItem miFileQuit = new MenuItem(msFile, SWT.CASCADE); miFileQuit.setText(Messages.getString("Spoon.Menu.File.Quit")); //miFileQuit.setText("&Quit"); miFileSep3 = new MenuItem(msFile, SWT.SEPARATOR); addMenuLast(); Listener lsFileOpen = new Listener() { public void handleEvent(Event e) { openFile(false); } }; Listener lsFileImport = new Listener() { public void handleEvent(Event e) { openFile(true); } }; Listener lsFileExport = new Listener() { public void handleEvent(Event e) { saveXMLFile(); } }; Listener lsFileNew = new Listener() { public void handleEvent(Event e) { newFile(); } }; Listener lsFileSave = new Listener() { public void handleEvent(Event e) { saveFile(); } }; Listener lsFileSaveAs = new Listener() { public void handleEvent(Event e) { saveFileAs(); } }; Listener lsFilePrint = new Listener() { public void handleEvent(Event e) { printFile(); } }; Listener lsFileQuit = new Listener() { public void handleEvent(Event e) { quitFile(); } }; miFileOpen .addListener (SWT.Selection, lsFileOpen ); miFileImport .addListener (SWT.Selection, lsFileImport ); miFileExport .addListener (SWT.Selection, lsFileExport ); miFileNew .addListener (SWT.Selection, lsFileNew ); miFileSave .addListener (SWT.Selection, lsFileSave ); miFileSaveAs .addListener (SWT.Selection, lsFileSaveAs ); miFilePrint .addListener (SWT.Selection, lsFilePrint ); miFileQuit .addListener (SWT.Selection, lsFileQuit ); // main Edit menu... MenuItem mEdit = new MenuItem(mBar, SWT.CASCADE); mEdit.setText(Messages.getString("Spoon.Menu.Edit")); //&Edit Menu msEdit = new Menu(shell, SWT.DROP_DOWN); mEdit.setMenu(msEdit); miEditUndo = new MenuItem(msEdit, SWT.CASCADE); miEditRedo = new MenuItem(msEdit, SWT.CASCADE); setUndoMenu(); new MenuItem(msEdit, SWT.SEPARATOR); MenuItem miEditSearch = new MenuItem(msEdit, SWT.CASCADE); miEditSearch.setText(Messages.getString("Spoon.Menu.Edit.Search")); //Search Metadata \tCTRL-F MenuItem miEditVars = new MenuItem(msEdit, SWT.CASCADE); miEditVars.setText(Messages.getString("Spoon.Menu.Edit.Variables")); //Edit/Enter variables \tCTRL-F new MenuItem(msEdit, SWT.SEPARATOR); MenuItem miEditUnselectAll = new MenuItem(msEdit, SWT.CASCADE); miEditUnselectAll.setText(Messages.getString("Spoon.Menu.Edit.ClearSelection")); //&Clear selection \tESC MenuItem miEditSelectAll = new MenuItem(msEdit, SWT.CASCADE); miEditSelectAll.setText(Messages.getString("Spoon.Menu.Edit.SelectAllSteps")); //"&Select all steps \tCTRL-A" new MenuItem(msEdit, SWT.SEPARATOR); MenuItem miEditCopy = new MenuItem(msEdit, SWT.CASCADE); miEditCopy.setText(Messages.getString("Spoon.Menu.Edit.CopyToClipboard")); //Copy selected steps to clipboard\tCTRL-C MenuItem miEditPaste = new MenuItem(msEdit, SWT.CASCADE); miEditPaste.setText(Messages.getString("Spoon.Menu.Edit.PasteFromClipboard")); //Paste steps from clipboard\tCTRL-V new MenuItem(msEdit, SWT.SEPARATOR); MenuItem miEditRefresh = new MenuItem(msEdit, SWT.CASCADE); miEditRefresh.setText(Messages.getString("Spoon.Menu.Edit.Refresh")); //&Refresh \tF5 new MenuItem(msEdit, SWT.SEPARATOR); MenuItem miEditOptions = new MenuItem(msEdit, SWT.CASCADE); miEditOptions.setText(Messages.getString("Spoon.Menu.Edit.Options")); //&Options... Listener lsEditUndo = new Listener() { public void handleEvent(Event e) { undoAction(); } }; Listener lsEditRedo = new Listener() { public void handleEvent(Event e) { redoAction(); } }; Listener lsEditSearch = new Listener() { public void handleEvent(Event e) { searchMetaData(); } }; Listener lsEditVars = new Listener() { public void handleEvent(Event e) { getVariables(); } }; Listener lsEditUnselectAll = new Listener() { public void handleEvent(Event e) { editUnselectAll(); } }; Listener lsEditSelectAll = new Listener() { public void handleEvent(Event e) { editSelectAll(); } }; Listener lsEditOptions = new Listener() { public void handleEvent(Event e) { editOptions(); } }; miEditUndo .addListener(SWT.Selection, lsEditUndo); miEditRedo .addListener(SWT.Selection, lsEditRedo); miEditSearch .addListener(SWT.Selection, lsEditSearch); miEditVars .addListener(SWT.Selection, lsEditVars); miEditUnselectAll.addListener(SWT.Selection, lsEditUnselectAll); miEditSelectAll .addListener(SWT.Selection, lsEditSelectAll); miEditOptions .addListener(SWT.Selection, lsEditOptions); // main Repository menu... MenuItem mRep = new MenuItem(mBar, SWT.CASCADE); mRep.setText(Messages.getString("Spoon.Menu.Repository")); //&Repository Menu msRep = new Menu(shell, SWT.DROP_DOWN); mRep.setMenu(msRep); MenuItem miRepConnect = new MenuItem(msRep, SWT.CASCADE); miRepConnect.setText(Messages.getString("Spoon.Menu.Repository.ConnectToRepository")); //&Connect to repository \tCTRL-R MenuItem miRepDisconnect = new MenuItem(msRep, SWT.CASCADE); miRepDisconnect.setText(Messages.getString("Spoon.Menu.Repository.DisconnectRepository")); //&Disconnect repository \tCTRL-D MenuItem miRepExplore = new MenuItem(msRep, SWT.CASCADE); miRepExplore.setText(Messages.getString("Spoon.Menu.Repository.ExploreRepository")); //&Explore repository \tCTRL-E new MenuItem(msRep, SWT.SEPARATOR); MenuItem miRepUser = new MenuItem(msRep, SWT.CASCADE); miRepUser.setText(Messages.getString("Spoon.Menu.Repository.EditCurrentUser")); //&Edit current user\tCTRL-U Listener lsRepConnect = new Listener() { public void handleEvent(Event e) { openRepository(); } }; Listener lsRepDisconnect = new Listener() { public void handleEvent(Event e) { closeRepository(); } }; Listener lsRepExplore = new Listener() { public void handleEvent(Event e) { exploreRepository(); } }; Listener lsRepUser = new Listener() { public void handleEvent(Event e) { editRepositoryUser();} }; miRepConnect .addListener (SWT.Selection, lsRepConnect ); miRepDisconnect .addListener (SWT.Selection, lsRepDisconnect); miRepExplore .addListener (SWT.Selection, lsRepExplore ); miRepUser .addListener (SWT.Selection, lsRepUser ); // main Transformation menu... MenuItem mTrans = new MenuItem(mBar, SWT.CASCADE); mTrans.setText(Messages.getString("Spoon.Menu.Transformation")); //&Transformation Menu msTrans = new Menu(shell, SWT.DROP_DOWN ); mTrans.setMenu(msTrans); MenuItem miTransRun = new MenuItem(msTrans, SWT.CASCADE); miTransRun .setText(Messages.getString("Spoon.Menu.Transformation.Run"));//&Run \tF9 MenuItem miTransPreview = new MenuItem(msTrans, SWT.CASCADE); miTransPreview.setText(Messages.getString("Spoon.Menu.Transformation.Preview"));//&Preview \tF10 MenuItem miTransCheck = new MenuItem(msTrans, SWT.CASCADE); miTransCheck .setText(Messages.getString("Spoon.Menu.Transformation.Verify"));//&Verify \tF11 MenuItem miTransImpact = new MenuItem(msTrans, SWT.CASCADE); miTransImpact .setText(Messages.getString("Spoon.Menu.Transformation.Impact"));//&Impact MenuItem miTransSQL = new MenuItem(msTrans, SWT.CASCADE); miTransSQL .setText(Messages.getString("Spoon.Menu.Transformation.GetSQL"));//&Get SQL new MenuItem(msTrans, SWT.SEPARATOR); MenuItem miLastImpact = new MenuItem(msTrans, SWT.CASCADE); miLastImpact .setText(Messages.getString("Spoon.Menu.Transformation.ShowLastImpactAnalyses"));//Show last impact analyses \tF6 MenuItem miLastCheck = new MenuItem(msTrans, SWT.CASCADE); miLastCheck .setText(Messages.getString("Spoon.Menu.Transformation.ShowLastVerifyResults"));//Show last verify results \tF7 MenuItem miLastPreview = new MenuItem(msTrans, SWT.CASCADE); miLastPreview .setText(Messages.getString("Spoon.Menu.Transformation.ShowLastPreviewResults"));//Show last preview results \tF8 new MenuItem(msTrans, SWT.SEPARATOR); MenuItem miTransCopy = new MenuItem(msTrans, SWT.CASCADE); miTransCopy .setText(Messages.getString("Spoon.Menu.Transformation.CopyTransformationToClipboard"));//&Copy transformation to clipboard MenuItem miTransPaste = new MenuItem(msTrans, SWT.CASCADE); miTransPaste .setText(Messages.getString("Spoon.Menu.Transformation.PasteTransformationFromClipboard"));//P&aste transformation from clipboard MenuItem miTransImage = new MenuItem(msTrans, SWT.CASCADE); miTransImage .setText(Messages.getString("Spoon.Menu.Transformation.CopyTransformationImageClipboard"));//Copy the transformation image clipboard \tCTRL-ALT-I new MenuItem(msTrans, SWT.SEPARATOR); MenuItem miTransDetails = new MenuItem(msTrans, SWT.CASCADE); miTransDetails.setText(Messages.getString("Spoon.Menu.Transformation.Settings"));//&Settings... \tCTRL-T Listener lsTransDetails = new Listener() { public void handleEvent(Event e) { setTrans(); } }; Listener lsTransRun = new Listener() { public void handleEvent(Event e) { tabfolder.setSelection(1); spoonlog.startstop(); } }; Listener lsTransPreview = new Listener() { public void handleEvent(Event e) { spoonlog.preview(); } }; Listener lsTransCheck = new Listener() { public void handleEvent(Event e) { checkTrans(); } }; Listener lsTransImpact = new Listener() { public void handleEvent(Event e) { analyseImpact(); } }; Listener lsTransSQL = new Listener() { public void handleEvent(Event e) { getSQL(); } }; Listener lsLastPreview = new Listener() { public void handleEvent(Event e) { spoonlog.showPreview(); } }; Listener lsLastCheck = new Listener() { public void handleEvent(Event e) { showLastTransCheck(); } }; Listener lsLastImpact = new Listener() { public void handleEvent(Event e) { showLastImpactAnalyses(); } }; Listener lsTransCopy = new Listener() { public void handleEvent(Event e) { copyTransformation(); } }; Listener lsTransImage = new Listener() { public void handleEvent(Event e) { copyTransformationImage(); } }; Listener lsTransPaste = new Listener() { public void handleEvent(Event e) { pasteTransformation(); } }; miTransDetails.addListener(SWT.Selection, lsTransDetails); miTransRun .addListener(SWT.Selection, lsTransRun); miTransPreview.addListener(SWT.Selection, lsTransPreview); miTransCheck .addListener(SWT.Selection, lsTransCheck); miTransImpact .addListener(SWT.Selection, lsTransImpact); miTransSQL .addListener(SWT.Selection, lsTransSQL); miLastPreview .addListener(SWT.Selection, lsLastPreview); miLastCheck .addListener(SWT.Selection, lsLastCheck); miLastImpact .addListener(SWT.Selection, lsLastImpact); miTransCopy .addListener(SWT.Selection, lsTransCopy); miTransPaste .addListener(SWT.Selection, lsTransPaste); miTransImage .addListener(SWT.Selection, lsTransImage); // Wizard menu MenuItem mWizard = new MenuItem(mBar, SWT.CASCADE); mWizard.setText(Messages.getString("Spoon.Menu.Wizard")); //"&Wizard" Menu msWizard = new Menu(shell, SWT.DROP_DOWN ); mWizard.setMenu(msWizard); MenuItem miWizardNewConnection = new MenuItem(msWizard, SWT.CASCADE); miWizardNewConnection.setText(Messages.getString("Spoon.Menu.Wizard.CreateDatabaseConnectionWizard"));//&Create database connection wizard...\tF3 Listener lsWizardNewConnection= new Listener() { public void handleEvent(Event e) { createDatabaseWizard(); } }; miWizardNewConnection.addListener(SWT.Selection, lsWizardNewConnection); MenuItem miWizardCopyTable = new MenuItem(msWizard, SWT.CASCADE); miWizardCopyTable.setText(Messages.getString("Spoon.Menu.Wizard.CopyTableWizard"));//&Copy table wizard...\tF4 Listener lsWizardCopyTable= new Listener() { public void handleEvent(Event e) { copyTableWizard(); } }; miWizardCopyTable.addListener(SWT.Selection, lsWizardCopyTable); // main Help menu... MenuItem mHelp = new MenuItem(mBar, SWT.CASCADE); mHelp.setText(Messages.getString("Spoon.Menu.Help")); //"&Help" Menu msHelp = new Menu(shell, SWT.DROP_DOWN ); mHelp.setMenu(msHelp); MenuItem miHelpCredit = new MenuItem(msHelp, SWT.CASCADE); miHelpCredit.setText(Messages.getString("Spoon.Menu.Help.Credits"));//&Credits Listener lsHelpCredit = new Listener() { public void handleEvent(Event e) { ShowCreditsDialog scd = new ShowCreditsDialog(shell, props, GUIResource.getInstance().getImageCredits()); scd.open(); } }; miHelpCredit.addListener (SWT.Selection, lsHelpCredit ); MenuItem miHelpTOTD = new MenuItem(msHelp, SWT.CASCADE); miHelpTOTD.setText(Messages.getString("Spoon.Menu.Help.Tip"));//&Tip of the day Listener lsHelpTOTD = new Listener() { public void handleEvent(Event e) { TipsDialog td = new TipsDialog(shell, props); td.open(); } }; miHelpTOTD.addListener (SWT.Selection, lsHelpTOTD ); new MenuItem(msHelp, SWT.SEPARATOR); MenuItem miHelpAbout = new MenuItem(msHelp, SWT.CASCADE); miHelpAbout.setText(Messages.getString("Spoon.Menu.About"));//"&About" Listener lsHelpAbout = new Listener() { public void handleEvent(Event e) { helpAbout(); } }; miHelpAbout.addListener (SWT.Selection, lsHelpAbout ); } private void addMenuLast() { int idx = msFile.indexOf(miFileSep3); int max = msFile.getItemCount(); // Remove everything until end... for (int i=max-1;i>idx;i--) { MenuItem mi = msFile.getItem(i); mi.dispose(); } // Previously loaded files... String lf[] = props.getLastFiles(); String ld[] = props.getLastDirs(); boolean lt[] = props.getLastTypes(); String lr[] = props.getLastRepositories(); for (int i=0;i<lf.length;i++) { MenuItem miFileLast = new MenuItem(msFile, SWT.CASCADE); char chr = (char)('1'+i ); int accel = SWT.CTRL | chr; String repository = ( lr[i]!=null && lr[i].length()>0 ) ? ( "["+lr[i]+"] " ) : ""; String filename = RepositoryDirectory.DIRECTORY_SEPARATOR + lf[i]; if (!lt[i]) filename = lf[i]; if (!ld[i].equals(RepositoryDirectory.DIRECTORY_SEPARATOR)) { filename=ld[i]+filename; } if (i<9) { miFileLast.setAccelerator(accel); miFileLast.setText("&"+chr+" "+repository+filename+ "\tCTRL-"+chr); } else { miFileLast.setText(" "+repository+filename); } final String fn = lf[i]; // filename final String fd = ld[i]; // Repository directory ... final boolean ft = lt[i]; // type: true=repository, false=file final String fr = lr[i]; // repository name Listener lsFileLast = new Listener() { public void handleEvent(Event e) { if (showChangedWarning()) { // If the file comes from a repository and it's not the same as // the one we're connected to, ask for a username/password! // boolean noRepository=false; if (ft && (rep==null || !rep.getRepositoryInfo().getName().equalsIgnoreCase(fr) )) { int perms[] = new int[] { PermissionMeta.TYPE_PERMISSION_TRANSFORMATION }; RepositoriesDialog rd = new RepositoriesDialog(disp, SWT.NONE, perms, Messages.getString("Spoon.Application.Name")); //RepositoriesDialog.ToolName="Spoon" rd.setRepositoryName(fr); if (rd.open()) { // Close the previous connection... if (rep!=null) rep.disconnect(); rep = new Repository(log, rd.getRepository(), rd.getUser()); try { rep.connect(APP_NAME); } catch(KettleException ke) { rep=null; new ErrorDialog(shell, props, Messages.getString("Spoon.Dialog.UnableConnectRepository.Title"), Messages.getString("Spoon.Dialog.UnableConnectRepository.Message"), ke); //$NON-NLS-1$ //$NON-NLS-2$ } } else { noRepository=true; } } if (ft) { if (!noRepository && rep!=null && rep.getRepositoryInfo().getName().equalsIgnoreCase(fr)) { // OK, we're connected to the new repository... // Load the transformation... RepositoryDirectory fdRepdir = rep.getDirectoryTree().findDirectory(fd); TransLoadProgressDialog tlpd = new TransLoadProgressDialog(shell, rep, fn, fdRepdir); TransMeta transInfo = tlpd.open(); if (transInfo!=null) { transMeta = transInfo; transMeta.clearChanged(); props.addLastFile(Props.TYPE_PROPERTIES_SPOON, fn, fdRepdir.getPath(), true, rep.getName()); } } else { clear(); MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR ); mb.setMessage(Messages.getString("Spoon.Dialog.UnableLoadTransformation.Message"));//Can't load this transformation. Please connect to the correct repository first. mb.setText(Messages.getString("Spoon.Dialog.UnableLoadTransformation.Title"));//Error! mb.open(); } } else // Load from XML! { try { transMeta = new TransMeta(fn); transMeta.clearChanged(); transMeta.setFilename(fn); props.addLastFile(Props.TYPE_PROPERTIES_SPOON, fn, null, false, null); } catch(KettleException ke) { clear(); //"Error loading transformation", "I was unable to load this transformation from the XML file because of an error" new ErrorDialog(shell, props, Messages.getString("Spoon.Dialog.LoadTransformationError.Title"), Messages.getString("Spoon.Dialog.LoadTransformationError.Message"), ke); } } setShellText(); addMenuLast(); refreshTree(); refreshGraph(); refreshHistory(); } } }; miFileLast.addListener(SWT.Selection, lsFileLast); } } private void addBar() { tBar = new ToolBar(shell, SWT.HORIZONTAL | SWT.FLAT ); // props.setLook(tBar); final ToolItem tiFileNew = new ToolItem(tBar, SWT.PUSH); final Image imFileNew = new Image(disp, getClass().getResourceAsStream(Const.IMAGE_DIRECTORY+"new.png")); tiFileNew.setImage(imFileNew); tiFileNew.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { newFile(); }}); tiFileNew.setToolTipText(Messages.getString("Spoon.Tooltip.NewTranformation"));//New transformation, clear all settings final ToolItem tiFileOpen = new ToolItem(tBar, SWT.PUSH); final Image imFileOpen = new Image(disp, getClass().getResourceAsStream(Const.IMAGE_DIRECTORY+"open.png")); tiFileOpen.setImage(imFileOpen); tiFileOpen.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { openFile(false); }}); tiFileOpen.setToolTipText(Messages.getString("Spoon.Tooltip.OpenTranformation"));//Open tranformation final ToolItem tiFileSave = new ToolItem(tBar, SWT.PUSH); final Image imFileSave = new Image(disp, getClass().getResourceAsStream(Const.IMAGE_DIRECTORY+"save.png")); tiFileSave.setImage(imFileSave); tiFileSave.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { saveFile(); }}); tiFileSave.setToolTipText(Messages.getString("Spoon.Tooltip.SaveCurrentTranformation"));//Save current transformation final ToolItem tiFileSaveAs = new ToolItem(tBar, SWT.PUSH); final Image imFileSaveAs = new Image(disp, getClass().getResourceAsStream(Const.IMAGE_DIRECTORY+"saveas.png")); tiFileSaveAs.setImage(imFileSaveAs); tiFileSaveAs.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { saveFileAs(); }}); tiFileSaveAs.setToolTipText(Messages.getString("Spoon.Tooltip.SaveDifferentNameTranformation"));//Save transformation with different name new ToolItem(tBar, SWT.SEPARATOR); final ToolItem tiFilePrint = new ToolItem(tBar, SWT.PUSH); final Image imFilePrint = new Image(disp, getClass().getResourceAsStream(Const.IMAGE_DIRECTORY+"print.png")); tiFilePrint.setImage(imFilePrint); tiFilePrint.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { printFile(); }}); tiFilePrint.setToolTipText(Messages.getString("Spoon.Tooltip.Print"));//Print new ToolItem(tBar, SWT.SEPARATOR); final ToolItem tiFileRun = new ToolItem(tBar, SWT.PUSH); final Image imFileRun = new Image(disp, getClass().getResourceAsStream(Const.IMAGE_DIRECTORY+"run.png")); tiFileRun.setImage(imFileRun); tiFileRun.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { tabfolder.setSelection(1); spoonlog.startstop(); }}); tiFileRun.setToolTipText(Messages.getString("Spoon.Tooltip.RunTranformation"));//Run this transformation final ToolItem tiFilePreview = new ToolItem(tBar, SWT.PUSH); final Image imFilePreview = new Image(disp, getClass().getResourceAsStream(Const.IMAGE_DIRECTORY+"preview.png")); tiFilePreview.setImage(imFilePreview); tiFilePreview.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { spoonlog.preview(); }}); tiFilePreview.setToolTipText(Messages.getString("Spoon.Tooltip.PreviewTranformation"));//Preview this transformation final ToolItem tiFileReplay = new ToolItem(tBar, SWT.PUSH); final Image imFileReplay = new Image(disp, getClass().getResourceAsStream(Const.IMAGE_DIRECTORY+"replay.png")); tiFileReplay.setImage(imFileReplay); tiFileReplay.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { tabfolder.setSelection(1); spoonlog.startstopReplay(); }}); tiFileReplay.setToolTipText("Replay this transformation"); new ToolItem(tBar, SWT.SEPARATOR); final ToolItem tiFileCheck = new ToolItem(tBar, SWT.PUSH); final Image imFileCheck = new Image(disp, getClass().getResourceAsStream(Const.IMAGE_DIRECTORY+"check.png")); tiFileCheck.setImage(imFileCheck); tiFileCheck.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { checkTrans(); }}); tiFileCheck.setToolTipText(Messages.getString("Spoon.Tooltip.VerifyTranformation"));//Verify this transformation new ToolItem(tBar, SWT.SEPARATOR); final ToolItem tiImpact = new ToolItem(tBar, SWT.PUSH); final Image imImpact = new Image(disp, getClass().getResourceAsStream(Const.IMAGE_DIRECTORY+"impact.png")); // Can't seem to get the transparency correct for this image! ImageData idImpact = imImpact.getImageData(); int impactPixel = idImpact.palette.getPixel(new RGB(255, 255, 255)); idImpact.transparentPixel = impactPixel; Image imImpact2 = new Image(disp, idImpact); tiImpact.setImage(imImpact2); tiImpact.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { analyseImpact(); }}); tiImpact.setToolTipText(Messages.getString("Spoon.Tooltip.AnalyzeTranformation"));//Analyze the impact of this transformation on the database(s) new ToolItem(tBar, SWT.SEPARATOR); final ToolItem tiSQL = new ToolItem(tBar, SWT.PUSH); final Image imSQL = new Image(disp, getClass().getResourceAsStream(Const.IMAGE_DIRECTORY+"SQLbutton.png")); // Can't seem to get the transparency correct for this image! ImageData idSQL = imSQL.getImageData(); int sqlPixel= idSQL.palette.getPixel(new RGB(255, 255, 255)); idSQL.transparentPixel = sqlPixel; Image imSQL2= new Image(disp, idSQL); tiSQL.setImage(imSQL2); tiSQL.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { getSQL(); }}); tiSQL.setToolTipText(Messages.getString("Spoon.Tooltip.GenerateSQLForTranformation"));//Generate the SQL needed to run this transformation tBar.addDisposeListener(new DisposeListener() { public void widgetDisposed(DisposeEvent e) { imFileNew.dispose(); imFileOpen.dispose(); imFileSave.dispose(); imFileSaveAs.dispose(); } } ); tBar.addKeyListener(defKeys); tBar.addKeyListener(modKeys); tBar.pack(); } private void addTree() { if (leftSash!=null) { leftSash.dispose(); } // Split the left side of the screen in half leftSash = new SashForm(sashform, SWT.VERTICAL); // Now set up the main CSH tree selectionTree = new Tree(leftSash, SWT.SINGLE | SWT.BORDER); props.setLook(selectionTree); selectionTree.setLayout(new FillLayout()); tiConn = new TreeItem(selectionTree, SWT.NONE); tiConn.setText(STRING_CONNECTIONS); tiStep = new TreeItem(selectionTree, SWT.NONE); tiStep.setText(STRING_STEPS); tiHops = new TreeItem(selectionTree, SWT.NONE); tiHops.setText(STRING_HOPS); tiBase = new TreeItem(selectionTree, SWT.NONE); tiBase.setText(STRING_BASE); tiPlug = new TreeItem(selectionTree, SWT.NONE); tiPlug.setText(STRING_PLUGIN); // Fill the base components... StepLoader steploader = StepLoader.getInstance(); StepPlugin basesteps[] = steploader.getStepsWithType(StepPlugin.TYPE_NATIVE); String basecat[] = steploader.getCategories(StepPlugin.TYPE_NATIVE); TreeItem tiBaseCat[] = new TreeItem[basecat.length]; for (int i=0;i<basecat.length;i++) { tiBaseCat[i] = new TreeItem(tiBase, SWT.NONE); tiBaseCat[i].setText(basecat[i]); for (int j=0;j<basesteps.length;j++) { if (basesteps[j].getCategory().equalsIgnoreCase(basecat[i])) { TreeItem ti = new TreeItem(tiBaseCat[i], 0); ti.setText(basesteps[j].getDescription()); } } } // Show the plugins... StepPlugin plugins[] = steploader.getStepsWithType(StepPlugin.TYPE_PLUGIN); String plugcat[] = steploader.getCategories(StepPlugin.TYPE_PLUGIN); TreeItem tiPlugCat[] = new TreeItem[plugcat.length]; for (int i=0;i<plugcat.length;i++) { tiPlugCat[i] = new TreeItem(tiPlug, SWT.NONE); tiPlugCat[i].setText(plugcat[i]); for (int j=0;j<plugins.length;j++) { if (plugins[j].getCategory().equalsIgnoreCase(plugcat[i])) { TreeItem ti = new TreeItem(tiPlugCat[i], 0); ti.setText(plugins[j].getDescription()); } } } tiConn.setExpanded(true); tiStep.setExpanded(false); tiBase.setExpanded(true); tiPlug.setExpanded(true); addToolTipsToTree(selectionTree); // Popup-menu selection lsNew = new Listener() { public void handleEvent(Event e) { newSelected(); } }; lsEdit = new Listener() { public void handleEvent(Event e) { editSelected(); } }; lsDupe = new Listener() { public void handleEvent(Event e) { dupeSelected(); } }; lsCopy = new Listener() { public void handleEvent(Event e) { clipSelected(); } }; lsDel = new Listener() { public void handleEvent(Event e) { delSelected(); } }; lsSQL = new Listener() { public void handleEvent(Event e) { sqlSelected(); } }; lsCache = new Listener() { public void handleEvent(Event e) { clearDBCache(); } }; lsExpl = new Listener() { public void handleEvent(Event e) { exploreDB(); } }; // Default selection (double-click, enter) lsEditDef = new SelectionAdapter() { public void widgetDefaultSelected(SelectionEvent e){ editSelected(); } }; //lsNewDef = new SelectionAdapter() { public void widgetDefaultSelected(SelectionEvent e){ newSelected(); } }; lsEditSel = new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { setMenu(e); } }; // Add all the listeners... selectionTree.addSelectionListener(lsEditDef); // double click somewhere in the tree... //tCSH.addSelectionListener(lsNewDef); // double click somewhere in the tree... selectionTree.addSelectionListener(lsEditSel); // Keyboard shortcuts! selectionTree.addKeyListener(defKeys); selectionTree.addKeyListener(modKeys); // Set a listener on the tree addDragSourceToTree(selectionTree); // OK, now add a list of often-used icons to the bottom of the tree... pluginHistoryTree = new Tree(leftSash, SWT.SINGLE ); // Add tooltips for history tree too addToolTipsToTree(pluginHistoryTree); // Set the same listener on this tree addDragSourceToTree(pluginHistoryTree); leftSash.setWeights(new int[] { 70, 30 } ); } private void addToolTipsToTree(Tree tree) { tree.addListener(SWT.MouseHover, new Listener() { public void handleEvent(Event e) { String tooltip=null; Tree tree = (Tree)e.widget; TreeItem item = tree.getItem(new org.eclipse.swt.graphics.Point(e.x, e.y)); if (item!=null) { StepLoader steploader = StepLoader.getInstance(); StepPlugin sp = steploader.findStepPluginWithDescription(item.getText()); if (sp!=null) { tooltip = sp.getTooltip(); } else if (item.getText().equalsIgnoreCase(STRING_BASE) || item.getText().equalsIgnoreCase(STRING_PLUGIN) ) { tooltip=Messages.getString("Spoon.Tooltip.SelectStepType",Const.CR); //"Select one of the step types listed below and"+Const.CR+"drag it onto the graphical view tab to the right."; } } tree.setToolTipText(tooltip); } } ); } private void addDragSourceToTree(Tree tree) { final Tree fTree = tree; // Drag & Drop for steps Transfer[] ttypes = new Transfer[] { XMLTransfer.getInstance() }; DragSource ddSource = new DragSource(fTree, DND.DROP_MOVE); ddSource.setTransfer(ttypes); ddSource.addDragListener(new DragSourceListener() { public void dragStart(DragSourceEvent event){ } public void dragSetData(DragSourceEvent event) { TreeItem ti[] = fTree.getSelection(); if (ti.length>0) { String data = null; int type = 0; String ts[] = Const.getTreeStrings(ti[0]); if (ts!=null && ts.length > 0) { // Drop of existing hidden step onto canvas? if (ts[0].equalsIgnoreCase(STRING_STEPS)) { type = DragAndDropContainer.TYPE_STEP; data=ti[0].getText(); // name of the step. } else if ( ts[0].equalsIgnoreCase(STRING_BASE) || ts[0].equalsIgnoreCase(STRING_PLUGIN) || ts[0].equalsIgnoreCase(STRING_HISTORY) ) { type = DragAndDropContainer.TYPE_BASE_STEP_TYPE; data=ti[0].getText(); // Step type } else if (ts[0].equalsIgnoreCase(STRING_CONNECTIONS)) { type = DragAndDropContainer.TYPE_DATABASE_CONNECTION; data=ti[0].getText(); // Database connection name to use } else if (ts[0].equalsIgnoreCase(STRING_HOPS)) { type = DragAndDropContainer.TYPE_TRANS_HOP; data=ti[0].getText(); // nothing for really ;-) } else { event.doit=false; return; // ignore anything else you drag. } event.data = new DragAndDropContainer(type, data); } } else // Nothing got dragged, only can happen on OSX :-) { event.doit=false; } } public void dragFinished(DragSourceEvent event) {} } ); } public void refreshPluginHistory() { pluginHistoryTree.removeAll(); TreeItem tiMain = new TreeItem(pluginHistoryTree, SWT.NONE); tiMain.setText(STRING_HISTORY); List pluginHistory = props.getPluginHistory(); for (int i=0;i<pluginHistory.size();i++) { String pluginID = (String)pluginHistory.get(i); StepPlugin stepPlugin = StepLoader.getInstance().findStepPluginWithID(pluginID); if (stepPlugin!=null) { Image image = (Image) GUIResource.getInstance().getImagesSteps().get(pluginID); TreeItem ti = new TreeItem(tiMain, SWT.NONE); ti.setText(stepPlugin.getDescription()); ti.setImage(image); } } tiMain.setExpanded(true); } private void setMenu(SelectionEvent e) { TreeItem ti = (TreeItem)e.item; String strti = ti.getText(); Tree root = ti.getParent(); log.logDebug(toString(), Messages.getString("Spoon.Log.ClickedOn") +ti.getText());//Clicked on TreeItem sel[] = root.getSelection(); Menu mCSH = new Menu(shell, SWT.POP_UP); // Find the level we clicked on: Top level (only NEW in the menu) or below (edit, insert, ...) TreeItem parent = ti.getParentItem(); if (parent==null) // Top level { if (!strti.equalsIgnoreCase(STRING_BASE) && !strti.equalsIgnoreCase(STRING_PLUGIN)) { MenuItem miNew = new MenuItem(mCSH, SWT.PUSH); miNew.setText(Messages.getString("Spoon.Menu.Popup.BASE.New"));//"New" miNew.addListener( SWT.Selection, lsNew ); } if (strti.equalsIgnoreCase(STRING_STEPS)) { MenuItem miNew = new MenuItem(mCSH, SWT.PUSH); miNew.setText(Messages.getString("Spoon.Menu.Popup.STEPS.SortSteps"));//Sort steps miNew.addSelectionListener( new SelectionAdapter() { public void widgetSelected(SelectionEvent arg0) { transMeta.sortSteps(); refreshTree(true); } }); } if (strti.equalsIgnoreCase(STRING_HOPS)) { MenuItem miNew = new MenuItem(mCSH, SWT.PUSH); miNew.setText(Messages.getString("Spoon.Menu.Popup.HOPS.SortHops"));//Sort hops miNew.addSelectionListener( new SelectionAdapter() { public void widgetSelected(SelectionEvent arg0) { transMeta.sortHops(); refreshTree(true); } }); } if (strti.equalsIgnoreCase(STRING_CONNECTIONS)) { MenuItem miNew = new MenuItem(mCSH, SWT.PUSH); miNew.setText(Messages.getString("Spoon.Menu.Popup.CONNECTIONS.NewConnectionWizard"));//New Connection Wizard miNew.addSelectionListener( new SelectionAdapter() { public void widgetSelected(SelectionEvent arg0) { createDatabaseWizard(); } } ); MenuItem miCache = new MenuItem(mCSH, SWT.PUSH); miCache.setText(Messages.getString("Spoon.Menu.Popup.CONNECTIONS.ClearDBCacheComplete"));//Clear complete DB Cache miCache.addListener( SWT.Selection, lsCache ); } } else { String strparent = parent.getText(); if (strparent.equalsIgnoreCase(STRING_CONNECTIONS)) { MenuItem miNew = new MenuItem(mCSH, SWT.PUSH); miNew.setText(Messages.getString("Spoon.Menu.Popup.CONNECTIONS.New"));//New MenuItem miEdit = new MenuItem(mCSH, SWT.PUSH); miEdit.setText(Messages.getString("Spoon.Menu.Popup.CONNECTIONS.Edit"));//Edit MenuItem miDupe = new MenuItem(mCSH, SWT.PUSH); miDupe.setText(Messages.getString("Spoon.Menu.Popup.CONNECTIONS.Duplicate"));//Duplicate MenuItem miCopy = new MenuItem(mCSH, SWT.PUSH); miCopy.setText(Messages.getString("Spoon.Menu.Popup.CONNECTIONS.CopyToClipboard"));//Copy to clipboard MenuItem miDel = new MenuItem(mCSH, SWT.PUSH); miDel.setText(Messages.getString("Spoon.Menu.Popup.CONNECTIONS.Delete"));//Delete new MenuItem(mCSH, SWT.SEPARATOR); MenuItem miSQL = new MenuItem(mCSH, SWT.PUSH); miSQL.setText(Messages.getString("Spoon.Menu.Popup.CONNECTIONS.SQLEditor"));//SQL Editor MenuItem miCache= new MenuItem(mCSH, SWT.PUSH); miCache.setText(Messages.getString("Spoon.Menu.Popup.CONNECTIONS.ClearDBCache")+ti.getText());//Clear DB Cache of new MenuItem(mCSH, SWT.SEPARATOR); MenuItem miExpl = new MenuItem(mCSH, SWT.PUSH); miExpl.setText(Messages.getString("Spoon.Menu.Popup.CONNECTIONS.Explore"));//Explore // disable for now if the connection is an SAP R/3 type of database... DatabaseMeta dbMeta = transMeta.findDatabase(strti); if (dbMeta==null || dbMeta.getDatabaseType()==DatabaseMeta.TYPE_DATABASE_SAPR3) miExpl.setEnabled(false); miNew.addListener( SWT.Selection, lsNew ); miEdit.addListener(SWT.Selection, lsEdit ); miDupe.addListener(SWT.Selection, lsDupe ); miCopy.addListener(SWT.Selection, lsCopy ); miDel.addListener(SWT.Selection, lsDel ); miSQL.addListener(SWT.Selection, lsSQL ); miCache.addListener(SWT.Selection, lsCache); miExpl.addListener(SWT.Selection, lsExpl); } if (strparent.equalsIgnoreCase(STRING_STEPS)) { if (sel.length==2) { MenuItem miNewHop = new MenuItem(mCSH, SWT.PUSH); miNewHop.setText(Messages.getString("Spoon.Menu.Popup.STEPS.NewHop"));//New Hop miNewHop.addListener(SWT.Selection, lsNew); } MenuItem miEdit = new MenuItem(mCSH, SWT.PUSH); miEdit.setText(Messages.getString("Spoon.Menu.Popup.STEPS.Edit"));//Edit MenuItem miDupe = new MenuItem(mCSH, SWT.PUSH); miDupe.setText(Messages.getString("Spoon.Menu.Popup.STEPS.Duplicate"));//Duplicate MenuItem miDel = new MenuItem(mCSH, SWT.PUSH); miDel.setText(Messages.getString("Spoon.Menu.Popup.STEPS.Delete"));//Delete miEdit.addListener(SWT.Selection, lsEdit ); miDupe.addListener(SWT.Selection, lsDupe ); miDel.addListener(SWT.Selection, lsDel ); } if (strparent.equalsIgnoreCase(STRING_HOPS)) { MenuItem miEdit = new MenuItem(mCSH, SWT.PUSH); miEdit.setText(Messages.getString("Spoon.Menu.Popup.HOPS.Edit"));//Edit MenuItem miDel = new MenuItem(mCSH, SWT.PUSH); miDel.setText(Messages.getString("Spoon.Menu.Popup.HOPS.Delete"));//Delete miEdit.addListener( SWT.Selection, lsEdit ); miDel.addListener ( SWT.Selection, lsDel ); } TreeItem grandparent = parent.getParentItem(); if (grandparent!=null) { String strgrandparent = grandparent.getText(); if (strgrandparent.equalsIgnoreCase(STRING_BASE) || strgrandparent.equalsIgnoreCase(STRING_PLUGIN)) { MenuItem miNew = new MenuItem(mCSH, SWT.PUSH); miNew.setText(Messages.getString("Spoon.Menu.Popup.BASE_PLUGIN.New"));//New miNew.addListener( SWT.Selection, lsNew ); } } } selectionTree.setMenu(mCSH); } private void addTabs() { if (tabComp!=null) { tabComp.dispose(); } tabComp = new Composite(sashform, SWT.BORDER ); props.setLook(tabComp); FormLayout childLayout = new FormLayout(); childLayout.marginWidth = 0; childLayout.marginHeight = 0; tabComp.setLayout(childLayout); tabfolder= new CTabFolder(tabComp, SWT.BORDER); props.setLook(tabfolder, Props.WIDGET_STYLE_TAB); FormData fdTabfolder = new FormData(); fdTabfolder.left = new FormAttachment(0, 0); fdTabfolder.right = new FormAttachment(100, 0); fdTabfolder.top = new FormAttachment(0, 0); fdTabfolder.bottom = new FormAttachment(100, 0); tabfolder.setLayoutData(fdTabfolder); CTabItem tiTabsGraph = new CTabItem(tabfolder, SWT.NONE); tiTabsGraph.setText(Messages.getString("Spoon.Title.GraphicalView"));//"Graphical view" tiTabsGraph.setToolTipText(Messages.getString("Spoon.Tooltip.DisplaysTransformationGraphical"));//Displays the transformation graphically. CTabItem tiTabsList = new CTabItem(tabfolder, SWT.NULL); tiTabsList.setText(Messages.getString("Spoon.Title.LogView"));//Log view tiTabsList.setToolTipText(Messages.getString("Spoon.Tooltip.DisplaysTransformationLog"));//Displays the log of the running transformation. CTabItem tiTabsHist = new CTabItem(tabfolder, SWT.NULL); tiTabsHist.setText(Messages.getString("Spoon.Title.LogHistory"));//Log view tiTabsHist.setToolTipText(Messages.getString("Spoon.Tooltip.DisplaysHistoryLogging"));//Displays the history of previous transformation runs. spoongraph = new SpoonGraph(tabfolder, SWT.V_SCROLL | SWT.H_SCROLL | SWT.NO_BACKGROUND, log, this); spoonlog = new SpoonLog(tabfolder, SWT.NONE, this, log, null); spoonhist = new SpoonHistory(tabfolder, SWT.NONE, this, log, null, spoonlog, shell); tabfolder.addKeyListener(defKeys); tabfolder.addKeyListener(modKeys); SpoonHistoryRefresher spoonHistoryRefresher = new SpoonHistoryRefresher(tiTabsHist, spoonhist); tabfolder.addSelectionListener(spoonHistoryRefresher); spoonlog.setSpoonHistoryRefresher(spoonHistoryRefresher); tiTabsGraph.setControl(spoongraph); tiTabsList.setControl(spoonlog); tiTabsHist.setControl(spoonhist); tabfolder.setSelection(0); sashform.addKeyListener(defKeys); sashform.addKeyListener(modKeys); int weights[] = props.getSashWeights(); sashform.setWeights(weights); sashform.setVisible(true); } public String getRepositoryName() { if (rep==null) return null; return rep.getRepositoryInfo().getName(); } public void newSelected() { log.logDebug(toString(), Messages.getString("Spoon.Log.NewSelected"));//"New Selected" // Determine what menu we selected from... TreeItem ti[] = selectionTree.getSelection(); // Then call newConnection or newTrans if (ti.length>=1) { String name = ti[0].getText(); TreeItem parent = ti[0].getParentItem(); if (parent == null) { log.logDebug(toString(), Messages.getString("Spoon.Log.ElementHasNoParent"));//Element has no parent if (name.equalsIgnoreCase(STRING_CONNECTIONS)) newConnection(); if (name.equalsIgnoreCase(STRING_HOPS )) newHop(); if (name.equalsIgnoreCase(STRING_STEPS )) { MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_INFORMATION); mb.setMessage(Messages.getString("Spoon.Dialog.WarningCreateNewSteps.Message"));//Please use the 'Base step types' below to create new steps. mb.setText(Messages.getString("Spoon.Dialog.WarningCreateNewSteps.Title"));//Tip! mb.open(); } //refreshTree(); } else { String strparent = parent.getText(); log.logDebug(toString(), Messages.getString("Spoon.Log.ElementHasParent")+strparent);//Element has parent: if (strparent.equalsIgnoreCase(STRING_CONNECTIONS)) newConnection(); if (strparent.equalsIgnoreCase(STRING_STEPS )) { log.logDebug(toString(), Messages.getString("Spoon.Log.NewHop"));//New hop! StepMeta from = transMeta.findStep( ti[0].getText() ); StepMeta to = transMeta.findStep( ti[1].getText() ); if (from!=null && to!=null) newHop(from, to); } TreeItem grandparent = parent.getParentItem(); if (grandparent!=null) { String strgrandparent = grandparent.getText(); if (strgrandparent.equalsIgnoreCase(STRING_BASE) || strgrandparent.equalsIgnoreCase(STRING_PLUGIN)) { newStep(); } } } } } public void editSelected() { // Determine what menu we selected from... TreeItem ti[] = selectionTree.getSelection(); // Then call editConnection or editStep or editTrans if (ti.length==1) { String name = ti[0].getText(); TreeItem parent = ti[0].getParentItem(); if (parent != null) { log.logDebug(toString(), Messages.getString("Spoon.Log.EDIT.ElementHasParent"));//(EDIT) Element has parent. String strparent = parent.getText(); if (strparent.equalsIgnoreCase(STRING_CONNECTIONS)) editConnection(name); if (strparent.equalsIgnoreCase(STRING_STEPS )) editStep(name); if (strparent.equalsIgnoreCase(STRING_HOPS )) editHop(name); TreeItem grandparent = parent.getParentItem(); if (grandparent!=null) { String strgrandparent = grandparent.getText(); if (strgrandparent.equalsIgnoreCase(STRING_BASE ) || strgrandparent.equalsIgnoreCase(STRING_PLUGIN ) ) { newStep(); } } } else { log.logDebug(toString(), Messages.getString("Spoon.Log.ElementHasNoParent"));//Element has no parent if (name.equalsIgnoreCase(STRING_CONNECTIONS)) newConnection(); if (name.equalsIgnoreCase(STRING_HOPS )) newHop(); } } } public void dupeSelected() { // Determine what menu we selected from... TreeItem ti[] = selectionTree.getSelection(); // Then call editConnection or editStep or editTrans if (ti.length==1) { String name = ti[0].getText(); TreeItem parent = ti[0].getParentItem(); if (parent != null) { log.logDebug(toString(), Messages.getString("Spoon.Log.DUPE.ElementHasParent"));//"(DUPE) Element has parent." String type = parent.getText(); if (type.equalsIgnoreCase(STRING_CONNECTIONS)) dupeConnection(name); if (type.equalsIgnoreCase(STRING_STEPS )) dupeStep(name); } } } /** * Copy selected tree item to the clipboard in XML format * */ public void clipSelected() { // Determine what menu we selected from... TreeItem ti[] = selectionTree.getSelection(); // Then call editConnection or editStep or editTrans if (ti.length==1) { String name = ti[0].getText(); TreeItem parent = ti[0].getParentItem(); if (parent != null) { log.logDebug(toString(), Messages.getString("Spoon.Log.DUPE.ElementHasParent"));//"(DUPE) Element has parent." String type = parent.getText(); if (type.equalsIgnoreCase(STRING_CONNECTIONS)) clipConnection(name); if (type.equalsIgnoreCase(STRING_STEPS )) clipStep(name); } } } public void delSelected() { // Determine what menu we selected from... int i; TreeItem ti[] = selectionTree.getSelection(); String name[] = new String[ti.length]; TreeItem parent[] = new TreeItem[ti.length]; for (i=0;i<ti.length;i++) { name[i] = ti[i].getText(); parent[i] = ti[i].getParentItem(); } // Then call editConnection or editStep or editTrans for (i=name.length-1;i>=0;i--) { log.logDebug(toString(), Messages.getString("Spoon.Log.DELETE.TryToDelete")+"#"+i+"/"+(ti.length-1)+" : "+name[i]);//(DELETE) Trying to delete if (parent[i] != null) { String type = parent[i].getText(); log.logDebug(toString(), Messages.getString("Spoon.Log.DELETE.ElementHasParent")+type);//(DELETE) Element has parent: if (type.equalsIgnoreCase(STRING_CONNECTIONS)) delConnection(name[i]); if (type.equalsIgnoreCase(STRING_STEPS )) delStep(name[i]); if (type.equalsIgnoreCase(STRING_HOPS )) delHop(name[i]); } } } public void sqlSelected() { // Determine what menu we selected from... int i; TreeItem ti[] = selectionTree.getSelection(); for (i=0;i<ti.length;i++) { String name = ti[i].getText(); TreeItem parent = ti[i].getParentItem(); String type = parent.getText(); if (type.equalsIgnoreCase(STRING_CONNECTIONS)) { DatabaseMeta ci = transMeta.findDatabase(name); SQLEditor sql = new SQLEditor(shell, SWT.NONE, ci, transMeta.getDbCache(), ""); sql.open(); } } } public void editConnection(String name) { DatabaseMeta db = transMeta.findDatabase(name); if (db!=null) { DatabaseMeta before = (DatabaseMeta)db.clone(); DatabaseDialog con = new DatabaseDialog(shell, SWT.NONE, log, db, props); con.setDatabases(transMeta.getDatabases()); String newname = con.open(); if (newname != null && newname.length()>0) // null: CANCEL { // Store undo/redo information DatabaseMeta after = (DatabaseMeta)db.clone(); addUndoChange(new DatabaseMeta[] { before }, new DatabaseMeta[] { after }, new int[] { transMeta.indexOfDatabase(db) } ); saveConnection(db); // The connection is saved, clear the changed flag. db.setChanged(false); if (!name.equalsIgnoreCase(newname)) refreshTree(true); } } setShellText(); } public void dupeConnection(String name) { DatabaseMeta db = transMeta.findDatabase(name); int pos = transMeta.indexOfDatabase(db); if (db!=null) { DatabaseMeta newdb = (DatabaseMeta)db.clone(); String dupename = Messages.getString("Spoon.Various.DupeName") +name; //"(copy of) " newdb.setName(dupename); transMeta.addDatabase(pos+1, newdb); refreshTree(); DatabaseDialog con = new DatabaseDialog(shell, SWT.NONE, log, newdb, props); String newname = con.open(); if (newname != null) // null: CANCEL { transMeta.removeDatabase(pos+1); transMeta.addDatabase(pos+1, newdb); if (!newname.equalsIgnoreCase(dupename)) refreshTree(); } else { addUndoNew(new DatabaseMeta[] { (DatabaseMeta)db.clone() }, new int[] { pos }); saveConnection(db); } } } public void clipConnection(String name) { DatabaseMeta db = transMeta.findDatabase(name); if (db!=null) { String xml = XMLHandler.getXMLHeader() + db.getXML(); toClipboard(xml); } } /** * Delete a database connection * @param name The name of the database connection. */ public void delConnection(String name) { DatabaseMeta db = transMeta.findDatabase(name); int pos = transMeta.indexOfDatabase(db); if (db!=null) { boolean worked=false; // delete from repository? if (rep!=null) { if (!rep.getUserInfo().isReadonly()) { try { long id_database = rep.getDatabaseID(db.getName()); rep.delDatabase(id_database); worked=true; } catch(KettleDatabaseException dbe) { new ErrorDialog(shell, props, Messages.getString("Spoon.Dialog.ErrorDeletingConnection.Title"), Messages.getString("Spoon.Dialog.ErrorDeletingConnection.Message",name), dbe);//"Error deleting connection ["+db+"] from repository!" } } else { new ErrorDialog(shell, props, Messages.getString("Spoon.Dialog.ErrorDeletingConnection.Title"),Messages.getString("Spoon.Dialog.ErrorDeletingConnection.Message",name) , new KettleException(Messages.getString("Spoon.Dialog.Exception.ReadOnlyUser")));//"Error deleting connection ["+db+"] from repository!" //This user is read-only! } } if (rep==null || worked) { addUndoDelete(new DatabaseMeta[] { (DatabaseMeta)db.clone() }, new int[] { pos }); transMeta.removeDatabase(pos); } refreshTree(); } setShellText(); } public void editStep(String name) { log.logDebug(toString(), Messages.getString("Spoon.Log.EditStep") +name);//"Edit step: " editStepInfo(transMeta.findStep(name)); } public String editStepInfo(StepMeta stepMeta) { String stepname = null; if (stepMeta != null) { try { String name = stepMeta.getName(); // Before we do anything, let's store the situation the way it was... StepMeta before = (StepMeta) stepMeta.clone(); StepMetaInterface stepint = stepMeta.getStepMetaInterface(); StepDialogInterface dialog = stepint.getDialog(shell, stepMeta.getStepMetaInterface(), transMeta, name); dialog.setRepository(rep); stepname = dialog.open(); if (stepname != null) { // OK, so the step has changed... // // First, backup the situation for undo/redo StepMeta after = (StepMeta) stepMeta.clone(); addUndoChange(new StepMeta[] { before }, new StepMeta[] { after }, new int[] { transMeta.indexOfStep(stepMeta) }); // Then, store the size of the // See if the new name the user enter, doesn't collide with another step. // If so, change the stepname and warn the user! // String newname = stepname; StepMeta smeta = transMeta.findStep(newname, stepMeta); int nr = 2; while (smeta != null) { newname = stepname + " " + nr; smeta = transMeta.findStep(newname); nr++; } if (nr > 2) { stepname = newname; MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_INFORMATION); mb.setMessage(Messages.getString("Spoon.Dialog.StepnameExists.Message", stepname)); // $NON-NLS-1$ mb.setText(Messages.getString("Spoon.Dialog.StepnameExists.Title")); // $NON-NLS-1$ mb.open(); } stepMeta.setName(stepname); refreshTree(true); // Perhaps new connections were created in the step dialog. } else { // Scenario: change connections and click cancel... // Perhaps new connections were created in the step dialog? if (transMeta.haveConnectionsChanged()) { refreshTree(true); } } refreshGraph(); // name is displayed on the graph too. setShellText(); } catch (Throwable e) { if (shell.isDisposed()) return null; new ErrorDialog(shell, props, Messages.getString("Spoon.Dialog.UnableOpenDialog.Title"), Messages .getString("Spoon.Dialog.UnableOpenDialog.Message"), new Exception(e));//"Unable to open dialog for this step" } } return stepname; } public void dupeStep(String name) { log.logDebug(toString(), Messages.getString("Spoon.Log.DuplicateStep")+name);//Duplicate step: StepMeta stMeta = null, stepMeta = null, look=null; for (int i=0;i<transMeta.nrSteps() && stepMeta==null;i++) { look = transMeta.getStep(i); if (look.getName().equalsIgnoreCase(name)) { stepMeta=look; } } if (stepMeta!=null) { stMeta = (StepMeta)stepMeta.clone(); if (stMeta!=null) { String newname = transMeta.getAlternativeStepname(stepMeta.getName()); int nr=2; while (transMeta.findStep(newname)!=null) { newname = stepMeta.getName()+" (copy "+nr+")"; nr++; } stMeta.setName(newname); // Don't select this new step! stMeta.setSelected(false); Point loc = stMeta.getLocation(); stMeta.setLocation(loc.x+20, loc.y+20); transMeta.addStep(stMeta); addUndoNew(new StepMeta[] { (StepMeta)stMeta.clone() }, new int[] { transMeta.indexOfStep(stMeta) }); refreshTree(); refreshGraph(); } } } public void clipStep(String name) { log.logDebug(toString(), Messages.getString("Spoon.Log.CopyStepToClipboard")+name);//copy step to clipboard: StepMeta stepMeta = transMeta.findStep(name); if (stepMeta!=null) { String xml = stepMeta.getXML(); toClipboard(xml); } } public void pasteXML(String clipcontent, Point loc) { try { //System.out.println(clipcontent); Document doc = XMLHandler.loadXMLString(clipcontent); Node transnode = XMLHandler.getSubNode(doc, "transformation"); // De-select all, re-select pasted steps... transMeta.unselectAll(); Node stepsnode = XMLHandler.getSubNode(transnode, "steps"); int nr = XMLHandler.countNodes(stepsnode, "step"); log.logDebug(toString(), Messages.getString("Spoon.Log.FoundSteps",""+nr)+loc);//"I found "+nr+" steps to paste on location: " StepMeta steps[] = new StepMeta[nr]; //Point min = new Point(loc.x, loc.y); Point min = new Point(99999999,99999999); // Load the steps... for (int i=0;i<nr;i++) { Node stepnode = XMLHandler.getSubNodeByNr(stepsnode, "step", i); steps[i] = new StepMeta(log, stepnode, transMeta.getDatabases(), transMeta.getCounters()); if (loc!=null) { Point p = steps[i].getLocation(); if (min.x > p.x) min.x = p.x; if (min.y > p.y) min.y = p.y; } } // Load the hops... Node hopsnode = XMLHandler.getSubNode(transnode, "order"); nr = XMLHandler.countNodes(hopsnode, "hop"); log.logDebug(toString(), Messages.getString("Spoon.Log.FoundHops",""+nr));//"I found "+nr+" hops to paste." TransHopMeta hops[] = new TransHopMeta[nr]; ArrayList alSteps = new ArrayList(); for (int i=0;i<steps.length;i++) alSteps.add(steps[i]); for (int i=0;i<nr;i++) { Node hopnode = XMLHandler.getSubNodeByNr(hopsnode, "hop", i); hops[i] = new TransHopMeta(hopnode, alSteps); } // What's the difference between loc and min? // This is the offset: Point offset = new Point(loc.x-min.x, loc.y-min.y); // Undo/redo object positions... int position[] = new int[steps.length]; for (int i=0;i<steps.length;i++) { Point p = steps[i].getLocation(); String name = steps[i].getName(); steps[i].setLocation(p.x+offset.x, p.y+offset.y); steps[i].setDraw(true); // Check the name, find alternative... steps[i].setName( transMeta.getAlternativeStepname(name) ); transMeta.addStep(steps[i]); position[i] = transMeta.indexOfStep(steps[i]); } // Add the hops too... for (int i=0;i<hops.length;i++) { transMeta.addTransHop(hops[i]); } // Load the notes... Node notesnode = XMLHandler.getSubNode(transnode, "notepads"); nr = XMLHandler.countNodes(notesnode, "notepad"); log.logDebug(toString(), Messages.getString("Spoon.Log.FoundNotepads",""+nr));//"I found "+nr+" notepads to paste." NotePadMeta notes[] = new NotePadMeta[nr]; for (int i=0;i<notes.length;i++) { Node notenode = XMLHandler.getSubNodeByNr(notesnode, "notepad", i); notes[i] = new NotePadMeta(notenode); Point p = notes[i].getLocation(); notes[i].setLocation(p.x+offset.x, p.y+offset.y); transMeta.addNote(notes[i]); } // Set the source and target steps ... for (int i=0;i<steps.length;i++) { StepMetaInterface smi = steps[i].getStepMetaInterface(); smi.searchInfoAndTargetSteps(transMeta.getSteps()); } // Save undo information too... addUndoNew(steps, position, false); int hoppos[] = new int[hops.length]; for (int i=0;i<hops.length;i++) hoppos[i] = transMeta.indexOfTransHop(hops[i]); addUndoNew(hops, hoppos, true); int notepos[] = new int[notes.length]; for (int i=0;i<notes.length;i++) notepos[i] = transMeta.indexOfNote(notes[i]); addUndoNew(notes, notepos, true); if (transMeta.haveStepsChanged()) { refreshTree(); refreshGraph(); } } catch(KettleException e) { new ErrorDialog(shell, props, Messages.getString("Spoon.Dialog.UnablePasteSteps.Title"),Messages.getString("Spoon.Dialog.UnablePasteSteps.Message") , e);//"Error pasting steps...", "I was unable to paste steps to this transformation" } } public void copySelected(StepMeta stepMeta[], NotePadMeta notePadMeta[]) { if (stepMeta==null || stepMeta.length==0) return; String xml = XMLHandler.getXMLHeader(); xml+="<transformation>"+Const.CR; xml+=" <steps>"+Const.CR; for (int i=0;i<stepMeta.length;i++) { xml+=stepMeta[i].getXML(); } xml+=" </steps>"+Const.CR; // // Also check for the hops in between the selected steps... // xml+="<order>"+Const.CR; if (stepMeta!=null) for (int i=0;i<stepMeta.length;i++) { for (int j=0;j<stepMeta.length;j++) { if (i!=j) { TransHopMeta hop = transMeta.findTransHop(stepMeta[i], stepMeta[j]); if (hop!=null) // Ok, we found one... { xml+=hop.getXML()+Const.CR; } } } } xml+=" </order>"+Const.CR; xml+=" <notepads>"+Const.CR; if (notePadMeta!=null) for (int i=0;i<notePadMeta.length;i++) { xml+= notePadMeta[i].getXML(); } xml+=" </notepads>"+Const.CR; xml+=" </transformation>"+Const.CR; toClipboard(xml); } public void delStep(String name) { log.logDebug(toString(), Messages.getString("Spoon.Log.DeleteStep")+name);//"Delete step: " int i, pos=0; StepMeta stepMeta = null, look=null; for (i=0;i<transMeta.nrSteps() && stepMeta==null;i++) { look = transMeta.getStep(i); if (look.getName().equalsIgnoreCase(name)) { stepMeta=look; pos=i; } } if (stepMeta!=null) { for (i=transMeta.nrTransHops()-1;i>=0;i--) { TransHopMeta hi = transMeta.getTransHop(i); if ( hi.getFromStep().equals(stepMeta) || hi.getToStep().equals(stepMeta) ) { addUndoDelete(new TransHopMeta[] { hi }, new int[] { transMeta.indexOfTransHop(hi) }, true); transMeta.removeTransHop(i); refreshTree(); } } transMeta.removeStep(pos); addUndoDelete(new StepMeta[] { stepMeta }, new int[] { pos }); refreshTree(); refreshGraph(); } else { log.logDebug(toString(),Messages.getString("Spoon.Log.UnableFindStepToDelete",name) );//"Couldn't find step ["+name+"] to delete..." } } public void editHop(String name) { TransHopMeta hi = transMeta.findTransHop(name); if (hi!=null) { // Backup situation BEFORE edit: TransHopMeta before = (TransHopMeta)hi.clone(); TransHopDialog hd = new TransHopDialog(shell, SWT.NONE, hi, transMeta); if (hd.open()!=null) { // Backup situation for redo/undo: TransHopMeta after = (TransHopMeta)hi.clone(); addUndoChange(new TransHopMeta[] { before }, new TransHopMeta[] { after }, new int[] { transMeta.indexOfTransHop(hi) } ); String newname = hi.toString(); if (!name.equalsIgnoreCase(newname)) { refreshTree(); refreshGraph(); // color, nr of copies... } } } setShellText(); } public void delHop(String name) { int i,n; n=transMeta.nrTransHops(); for (i=0;i<n;i++) { TransHopMeta hi = transMeta.getTransHop(i); if (hi.toString().equalsIgnoreCase(name)) { addUndoDelete(new Object[] { (TransHopMeta)hi.clone() }, new int[] { transMeta.indexOfTransHop(hi) }); transMeta.removeTransHop(i); refreshTree(); refreshGraph(); return; } } setShellText(); } public void newHop(StepMeta fr, StepMeta to) { TransHopMeta hi = new TransHopMeta(fr, to); TransHopDialog hd = new TransHopDialog(shell, SWT.NONE, hi, transMeta); if (hd.open()!=null) { boolean error=false; if (transMeta.findTransHop(hi.getFromStep(), hi.getToStep())!=null) { MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR ); mb.setMessage(Messages.getString("Spoon.Dialog.HopExists.Message"));//"This hop already exists!" mb.setText(Messages.getString("Spoon.Dialog.HopExists.Title"));//Error! mb.open(); error=true; } if (transMeta.hasLoop(fr) || transMeta.hasLoop(to)) { refreshTree(); refreshGraph(); MessageBox mb = new MessageBox(shell, SWT.YES | SWT.ICON_WARNING ); mb.setMessage(Messages.getString("Spoon.Dialog.AddingHopCausesLoop.Message"));//Adding this hop causes a loop in the transformation. Loops are not allowed! mb.setText(Messages.getString("Spoon.Dialog.AddingHopCausesLoop.Title"));//Warning! mb.open(); error=true; } if (!error) { transMeta.addTransHop(hi); addUndoNew(new TransHopMeta[] { (TransHopMeta)hi.clone() }, new int[] { transMeta.indexOfTransHop(hi) }); hi.getFromStep().drawStep(); hi.getToStep().drawStep(); refreshTree(); refreshGraph(); } } } public void newHop() { newHop(null, null); } public void newConnection() { DatabaseMeta db = new DatabaseMeta(); DatabaseDialog con = new DatabaseDialog(shell, SWT.APPLICATION_MODAL, log, db, props); String con_name = con.open(); if (con_name!=null && con_name.length()>0) { transMeta.addDatabase(db); addUndoNew(new DatabaseMeta[] { (DatabaseMeta)db.clone() }, new int[] { transMeta.indexOfDatabase(db) }); saveConnection(db); refreshTree(); } } public void saveConnection(DatabaseMeta db) { // Also add to repository? if (rep!=null) { if (!rep.userinfo.isReadonly()) { try { db.saveRep(rep); log.logDetailed(toString(), Messages.getString("Spoon.Log.SavedDatabaseConnection",db.getDatabaseName()));//"Saved database connection ["+db+"] to the repository." // Put a commit behind it! rep.commit(); } catch(KettleException ke) { rep.rollback(); // In case of failure: undo changes! new ErrorDialog(shell, props, Messages.getString("Spoon.Dialog.ErrorSavingConnection.Title"),Messages.getString("Spoon.Dialog.ErrorSavingConnection.Message",db.getDatabaseName()), ke);//"Can't save...","Error saving connection ["+db+"] to repository!" } } else { new ErrorDialog(shell, props, Messages.getString("Spoon.Dialog.UnableSave.Title"),Messages.getString("Spoon.Dialog.ErrorSavingConnection.Message",db.getDatabaseName()), new KettleException(Messages.getString("Spoon.Dialog.Exception.ReadOnlyRepositoryUser")));//This repository user is read-only! } } } /** * Shows a 'model has changed' warning if required * @return true if nothing has changed or the changes are rejected by the user. */ public boolean showChangedWarning() { boolean answer = true; if (transMeta.hasChanged()) { MessageBox mb = new MessageBox(shell, SWT.YES | SWT.NO | SWT.CANCEL | SWT.ICON_WARNING ); mb.setMessage(Messages.getString("Spoon.Dialog.PromptSave.Message"));//"This model has changed. Do you want to save it?" mb.setText(Messages.getString("Spoon.Dialog.PromptSave.Title")); int reply = mb.open(); if (reply==SWT.YES) { answer=saveFile(); } else { if (reply==SWT.CANCEL) { answer = false; } else { answer = true; } } } return answer; } public void openRepository() { int perms[] = new int[] { PermissionMeta.TYPE_PERMISSION_TRANSFORMATION }; RepositoriesDialog rd = new RepositoriesDialog(disp, SWT.NONE, perms, APP_NAME); rd.getShell().setImage(GUIResource.getInstance().getImageSpoon()); if (rd.open()) { // Close previous repository... if (rep!=null) { rep.disconnect(); } rep = new Repository(log, rd.getRepository(), rd.getUser()); try { rep.connect(APP_NAME); } catch(KettleException ke) { rep=null; new ErrorDialog(shell, props, Messages.getString("Spoon.Dialog.ErrorConnectingRepository.Title"), Messages.getString("Spoon.Dialog.ErrorConnectingRepository.Message",Const.CR), ke); //$NON-NLS-1$ //$NON-NLS-2$ } // Set for the existing databases, the ID's at -1! for (int i=0;i<transMeta.nrDatabases();i++) { transMeta.getDatabase(i).setID(-1L); } // Set for the existing transformation the ID at -1! transMeta.setID(-1L); // Keep track of the old databases for now. ArrayList oldDatabases = transMeta.getDatabases(); // In order to re-match the databases on name (not content), we need to load the databases from the new repository. // NOTE: for purposes such as DEVELOP - TEST - PRODUCTION sycles. // first clear the list of databases. transMeta.setDatabases(new ArrayList()); // Read them from the new repository. readDatabases(); /* for (int i=0;i<transMeta.nrDatabases();i++) { System.out.println("NEW REP: ["+transMeta.getDatabase(i).getName()+"]"); } */ // Then we need to re-match the databases at save time... for (int i=0;i<oldDatabases.size();i++) { DatabaseMeta oldDatabase = (DatabaseMeta) oldDatabases.get(i); DatabaseMeta newDatabase = Const.findDatabase(transMeta.getDatabases(), oldDatabase.getName()); // If it exists, change the settings... if (newDatabase!=null) { // System.out.println("Found the new database in the repository ["+oldDatabase.getName()+"]"); // A database connection with the same name exists in the new repository. // Change the old connections to reflect the settings in the new repository oldDatabase.setDatabaseInterface(newDatabase.getDatabaseInterface()); } else { // System.out.println("Couldn't find the new database in the repository ["+oldDatabase.getName()+"]"); // The old database is not present in the new repository: simply add it to the list. // When the transformation gets saved, it will be added to the repository. transMeta.addDatabase(oldDatabase); } } // For the existing transformation, change the directory too: // Try to find the same directory in the new repository... RepositoryDirectory redi = rep.getDirectoryTree().findDirectory(transMeta.getDirectory().getPath()); if (redi!=null) { transMeta.setDirectory(redi); } else { transMeta.setDirectory(rep.getDirectoryTree()); // the root is the default! } refreshTree(true); setShellText(); } else { // Not cancelled? --> Clear repository... if (!rd.isCancelled()) { closeRepository(); } } } public void exploreRepository() { if (rep!=null) { RepositoryExplorerDialog erd = new RepositoryExplorerDialog(shell, SWT.NONE, rep, rep.getUserInfo()); String objname = erd.open(); if (objname!=null) { String object_type = erd.getObjectType(); RepositoryDirectory repdir = erd.getObjectDirectory(); // System.out.println("Load ["+object_type+"] --> ["+objname+"] from dir ["+(repdir==null)+"]"); // Try to open it as a transformation. if (object_type.equals(RepositoryExplorerDialog.STRING_TRANSFORMATIONS)) { if (showChangedWarning()) { try { transMeta = new TransMeta(rep, objname, repdir); transMeta.clearChanged(); setFilename(objname); refreshTree(); refreshGraph(); } catch(KettleException e) { MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR ); mb.setMessage(Messages.getString("Spoon.Dialog.ErrorOpening.Message")+objname+Const.CR+e.getMessage());//"Error opening : " mb.setText(Messages.getString("Spoon.Dialog.ErrorOpening.Title")); mb.open(); } } } } } } public void editRepositoryUser() { if (rep!=null) { UserInfo userinfo = rep.getUserInfo(); UserDialog ud = new UserDialog(shell, SWT.NONE, log, props, rep, userinfo); UserInfo ui = ud.open(); if (!userinfo.isReadonly()) { if (ui!=null) { try { ui.saveRep(rep); } catch(KettleException e) { MessageBox mb = new MessageBox(shell, SWT.ICON_WARNING | SWT.OK); mb.setMessage(Messages.getString("Spoon.Dialog.UnableChangeUser.Message")+Const.CR+e.getMessage());//Sorry, I was unable to change this user in the repository: mb.setText(Messages.getString("Spoon.Dialog.UnableChangeUser.Title"));//"Edit user" mb.open(); } } } else { MessageBox mb = new MessageBox(shell, SWT.ICON_WARNING | SWT.OK); mb.setMessage(Messages.getString("Spoon.Dialog.NotAllowedChangeUser.Message"));//"Sorry, you are not allowed to change this user." mb.setText(Messages.getString("Spoon.Dialog.NotAllowedChangeUser.Title")); mb.open(); } } } public void readDatabases() { transMeta.readDatabases(rep); } public void closeRepository() { if (rep!=null) rep.disconnect(); rep = null; setShellText(); } public void openFile(boolean importfile) { if (showChangedWarning()) { if (rep==null || importfile) // Load from XML { FileDialog dialog = new FileDialog(shell, SWT.OPEN); // dialog.setFilterPath("C:\\Projects\\kettle\\source\\"); dialog.setFilterExtensions(Const.STRING_TRANS_FILTER_EXT); dialog.setFilterNames(Const.STRING_TRANS_FILTER_NAMES); String fname = dialog.open(); if (fname!=null) { try { transMeta = new TransMeta(fname); props.addLastFile(Props.TYPE_PROPERTIES_SPOON, fname, Const.FILE_SEPARATOR, false, ""); addMenuLast(); if (!importfile) transMeta.clearChanged(); setFilename(fname); } catch(KettleException e) { clear(); MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR ); mb.setMessage(Messages.getString("Spoon.Dialog.ErrorOpening.Message")+fname+Const.CR+e.getMessage());//"Error opening : " mb.setText(Messages.getString("Spoon.Dialog.ErrorOpening.Title"));//"Error!" mb.open(); } refreshGraph(); refreshTree(true); refreshHistory(); } } else // Read a transformation from the repository! { SelectObjectDialog sod = new SelectObjectDialog(shell, props, rep, true, false, false); String transname = sod.open(); RepositoryDirectory repdir = sod.getDirectory(); if (transname!=null && repdir!=null) { TransLoadProgressDialog tlpd = new TransLoadProgressDialog(shell, rep, transname, repdir); TransMeta transInfo = tlpd.open(); if (transInfo!=null) { transMeta = transInfo; // transMeta = new TransInfo(log, rep, transname, repdir); log.logDetailed(toString(),Messages.getString("Spoon.Log.LoadToTransformation",transname,repdir.getDirectoryName()) );//"Transformation ["+transname+"] in directory ["+repdir+"] loaded from the repository." //System.out.println("name="+transMeta.getName()); props.addLastFile(Props.TYPE_PROPERTIES_SPOON, transname, repdir.getPath(), true, rep.getName()); addMenuLast(); transMeta.clearChanged(); setFilename(transname); } refreshGraph(); refreshTree(true); refreshHistory(); } } } } public void newFile() { if (showChangedWarning()) { clear(); loadRepositoryObjects(); // Add databases if connected to repository setFilename(null); refreshTree(true); refreshGraph(); refreshHistory(); } } public void loadRepositoryObjects() { // Load common database info from active repository... if (rep!=null) { transMeta.readDatabases(rep); } } public boolean quitFile() { boolean exit = true; boolean showWarning = true; log.logDetailed(toString(), Messages.getString("Spoon.Log.QuitApplication"));//"Quit application." saveSettings(); if (transMeta.hasChanged()) { MessageBox mb = new MessageBox(shell, SWT.YES | SWT.NO | SWT.CANCEL | SWT.ICON_WARNING ); mb.setMessage(Messages.getString("Spoon.Dialog.SaveChangedFile.Message"));//"File has changed! Do you want to save first?" mb.setText(Messages.getString("Spoon.Dialog.SaveChangedFile.Title"));//"Warning!" int answer = mb.open(); switch(answer) { case SWT.YES: exit=saveFile(); showWarning=false; break; case SWT.NO: exit=true; showWarning=false; break; case SWT.CANCEL: exit=false; showWarning=false; break; } } // System.out.println("exit="+exit+", showWarning="+showWarning+", running="+spoonlog.isRunning()+", showExitWarning="+props.showExitWarning()); // Show warning on exit when spoon is still running // Show warning on exit when a warning needs to be displayed, but only if we didn't ask to save before. (could have pressed cancel then!) // if ( (exit && spoonlog.isRunning() ) || (exit && showWarning && props.showExitWarning() ) ) { String message = Messages.getString("Spoon.Message.Warning.PromptExit"); //"Are you sure you want to exit?" if (spoonlog.isRunning()) message = Messages.getString("Spoon.Message.Warning.PromptExitWhenRunTransformation");//There is a running transformation. Are you sure you want to exit? MessageDialogWithToggle md = new MessageDialogWithToggle(shell, Messages.getString("System.Warning"),//"Warning!" null, message, MessageDialog.WARNING, new String[] { Messages.getString("Spoon.Message.Warning.Yes"), Messages.getString("Spoon.Message.Warning.No") },//"Yes", "No" 1, Messages.getString("Spoon.Message.Warning.NotShowWarning"),//"Please, don't show this warning anymore." !props.showExitWarning() ); int idx = md.open(); props.setExitWarningShown(!md.getToggleState()); props.saveProps(); if (idx==1) exit=false; // No selected: don't exit! else exit=true; } if (exit) dispose(); return exit; } public boolean saveFile() { boolean saved=false; log.logDetailed(toString(), Messages.getString("Spoon.Log.SaveToFileOrRepository"));//"Save to file or repository..." if (rep!=null) { saved=saveRepository(); } else { if (transMeta.getFilename()!=null) { saved=save(transMeta.getFilename()); } else { saved=saveFileAs(); } } try { if (props.useDBCache()) transMeta.getDbCache().saveCache(log); } catch(KettleException e) { new ErrorDialog(shell, props, Messages.getString("Spoon.Dialog.ErrorSavingDatabaseCache.Title"), Messages.getString("Spoon.Dialog.ErrorSavingDatabaseCache.Message"), e);//"An error occured saving the database cache to disk" } return saved; } public boolean saveRepository() { return saveRepository(false); } public boolean saveRepository(boolean ask_name) { log.logDetailed(toString(), Messages.getString("Spoon.Log.SaveToRepository"));//"Save to repository..." if (rep!=null) { boolean answer = true; boolean ask = ask_name; while (answer && ( ask || transMeta.getName()==null || transMeta.getName().length()==0 ) ) { if (!ask) { MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_WARNING); mb.setMessage(Messages.getString("Spoon.Dialog.PromptTransformationName.Message"));//"Please give this transformation a name before saving it in the database." mb.setText(Messages.getString("Spoon.Dialog.PromptTransformationName.Title"));//"Transformation has no name." mb.open(); } ask=false; answer = setTrans(); // System.out.println("answer="+answer+", ask="+ask+", transMeta.getName()="+transMeta.getName()); } if (answer && transMeta.getName()!=null && transMeta.getName().length()>0) { if (!rep.getUserInfo().isReadonly()) { int response = SWT.YES; if (transMeta.showReplaceWarning(rep)) { MessageBox mb = new MessageBox(shell, SWT.YES | SWT.NO | SWT.ICON_QUESTION); mb.setMessage(Messages.getString("Spoon.Dialog.PromptOverwriteTransformation.Message",transMeta.getName(),Const.CR));//"There already is a transformation called ["+transMeta.getName()+"] in the repository."+Const.CR+"Do you want to overwrite the transformation?" mb.setText(Messages.getString("Spoon.Dialog.PromptOverwriteTransformation.Title"));//"Overwrite?" response = mb.open(); } boolean saved=false; if (response == SWT.YES) { shell.setCursor(cursor_hourglass); // Keep info on who & when this transformation was changed... transMeta.setModifiedDate( new Value("MODIFIED_DATE", Value.VALUE_TYPE_DATE) ); transMeta.getModifiedDate().sysdate(); transMeta.setModifiedUser( rep.getUserInfo().getLogin() ); TransSaveProgressDialog tspd = new TransSaveProgressDialog(log, props, shell, rep, transMeta); if (tspd.open()) { saved=true; if (!props.getSaveConfirmation()) { MessageDialogWithToggle md = new MessageDialogWithToggle(shell, Messages.getString("Spoon.Message.Warning.SaveOK"), //"Save OK!" null, Messages.getString("Spoon.Message.Warning.TransformationWasStored"),//"This transformation was stored in repository" MessageDialog.QUESTION, new String[] { Messages.getString("Spoon.Message.Warning.OK") },//"OK!" 0, Messages.getString("Spoon.Message.Warning.NotShowThisMessage"),//"Don't show this message again." props.getSaveConfirmation() ); md.open(); props.setSaveConfirmation(md.getToggleState()); } // Handle last opened files... props.addLastFile(Props.TYPE_PROPERTIES_SPOON, transMeta.getName(), transMeta.getDirectory().getPath(), true, getRepositoryName()); saveSettings(); addMenuLast(); setShellText(); } shell.setCursor(null); } return saved; } else { MessageBox mb = new MessageBox(shell, SWT.CLOSE | SWT.ICON_ERROR); mb.setMessage(Messages.getString("Spoon.Dialog.OnlyreadRepository.Message"));//"Sorry, the user you're logged on with, can only read from the repository" mb.setText(Messages.getString("Spoon.Dialog.OnlyreadRepository.Title"));//"Transformation not saved!" mb.open(); } } } else { MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR); mb.setMessage(Messages.getString("Spoon.Dialog.NoRepositoryConnection.Message"));//"There is no repository connection available." mb.setText(Messages.getString("Spoon.Dialog.NoRepositoryConnection.Title"));//"No repository available." mb.open(); } return false; } public boolean saveFileAs() { boolean saved=false; log.logBasic(toString(), Messages.getString("Spoon.Log.SaveAs"));//"Save as..." if (rep!=null) { transMeta.setID(-1L); saved=saveRepository(true); } else { saved=saveXMLFile(); } return saved; } private boolean saveXMLFile() { boolean saved=false; FileDialog dialog = new FileDialog(shell, SWT.SAVE); dialog.setFilterPath("C:\\Projects\\kettle\\source\\"); dialog.setFilterExtensions(Const.STRING_TRANS_FILTER_EXT); dialog.setFilterNames(Const.STRING_TRANS_FILTER_NAMES); String fname = dialog.open(); if (fname!=null) { // Is the filename ending on .ktr, .xml? boolean ending=false; for (int i=0;i<Const.STRING_TRANS_FILTER_EXT.length-1;i++) { if (fname.endsWith(Const.STRING_TRANS_FILTER_EXT[i].substring(1))) { ending=true; } } if (fname.endsWith(Const.STRING_TRANS_DEFAULT_EXT)) ending=true; if (!ending) { fname+=Const.STRING_TRANS_DEFAULT_EXT; } // See if the file already exists... File f = new File(fname); int id = SWT.YES; if (f.exists()) { MessageBox mb = new MessageBox(shell, SWT.NO | SWT.YES | SWT.ICON_WARNING); mb.setMessage(Messages.getString("Spoon.Dialog.PromptOverwriteFile.Message"));//"This file already exists. Do you want to overwrite it?" mb.setText(Messages.getString("Spoon.Dialog.PromptOverwriteFile.Title"));//"This file already exists!" id = mb.open(); } if (id==SWT.YES) { saved=save(fname); setFilename(fname); } } return saved; } private boolean save(String fname) { boolean saved = false; String xml = XMLHandler.getXMLHeader() + transMeta.getXML(); try { DataOutputStream dos = new DataOutputStream(new FileOutputStream(new File(fname))); dos.write(xml.getBytes(Const.XML_ENCODING)); dos.close(); saved=true; // Handle last opened files... props.addLastFile(Props.TYPE_PROPERTIES_SPOON, fname, Const.FILE_SEPARATOR, false, ""); saveSettings(); addMenuLast(); transMeta.clearChanged(); setShellText(); log.logDebug(toString(), Messages.getString("Spoon.Log.FileWritten")+" ["+fname+"]"); //"File written to } catch(Exception e) { log.logDebug(toString(), Messages.getString("Spoon.Log.ErrorOpeningFileForWriting")+e.toString());//"Error opening file for writing! --> " MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR); mb.setMessage(Messages.getString("Spoon.Dialog.ErrorSavingFile.Message")+Const.CR+e.toString());//"Error saving file:" mb.setText(Messages.getString("Spoon.Dialog.ErrorSavingFile.Title"));//"ERROR" mb.open(); } return saved; } public void helpAbout() { MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_INFORMATION | SWT.CENTER); String mess = Messages.getString("System.ProductInfo")+Const.VERSION+Const.CR+Const.CR+Const.CR;//Kettle - Spoon version mess+=Messages.getString("System.CompanyInfo")+Const.CR+" "+Messages.getString("System.ProductWebsiteUrl")+Const.CR; //(c) 2001-2004 i-Bridge bvba www.kettle.be mb.setMessage(mess); mb.setText(APP_NAME); mb.open(); } public void editUnselectAll() { transMeta.unselectAll(); spoongraph.redraw(); } public void editSelectAll() { transMeta.selectAll(); spoongraph.redraw(); } public void editOptions() { EnterOptionsDialog eod = new EnterOptionsDialog(shell, props); if (eod.open()!=null) { props.saveProps(); loadSettings(); changeLooks(); MessageBox mb = new MessageBox(shell, SWT.ICON_INFORMATION); mb.setMessage(Messages.getString("Spoon.Dialog.PleaseRestartApplication.Message")); mb.setText(Messages.getString("Spoon.Dialog.PleaseRestartApplication.Title")); mb.open(); } } public int getTreePosition(TreeItem ti, String item) { if (ti!=null) { TreeItem items[] = ti.getItems(); for (int x=0;x<items.length;x++) { if (items[x].getText().equalsIgnoreCase(item)) { return x; } } } return -1; } public void refreshTree() { refreshTree(false); refreshPluginHistory(); } /** * Refresh the object selection tree (on the left of the screen) * @param complete true refreshes the complete tree, false tries to do a differential update to avoid flickering. */ public void refreshTree(boolean complete) { if (shell.isDisposed()) return; if (!transMeta.hasChanged() && !complete) return; // Nothing changed: nothing to do! int idx; TreeItem ti[]; // Refresh the connections... // if (transMeta.haveConnectionsChanged() || complete) { tiConn.setText(STRING_CONNECTIONS); // TreeItem tiConn= this.tiConn (TreeItem)widgets.getWidget(STRING_CONNECTIONS); ti = tiConn.getItems(); // In complete refresh: delete all items first if (complete) { for (int i=0;i<ti.length;i++) ti[i].dispose(); ti = tiConn.getItems(); } // First delete no longer used items... for (int i=0;i<ti.length;i++) { String str = ti[i].getText(); DatabaseMeta inf = transMeta.findDatabase(str); if (inf!=null) idx = transMeta.indexOfDatabase(inf); else idx=-1; if (idx<0 || idx>i) ti[i].dispose(); } ti = tiConn.getItems(); // Insert missing items in tree... int j=0; for (int i=0;i<transMeta.nrDatabases();i++) { DatabaseMeta inf = transMeta.getDatabase(i); String con_name = inf.getName(); String ti_name = ""; if (j<ti.length) ti_name = ti[j].getText(); if (!con_name.equalsIgnoreCase(ti_name)) { // insert at position j in tree TreeItem newitem = new TreeItem(tiConn, j); newitem.setText(inf.getName()); newitem.setForeground(GUIResource.getInstance().getColorBlack()); newitem.setImage(GUIResource.getInstance().getImageConnection()); j++; ti = tiConn.getItems(); } else { j++; } } // tiConn.setExpanded(true); } //ni.setImage(gv.hop_image); //ni.setImage(gv.step_images_small[steptype]); // Refresh the Steps... // if (transMeta.haveStepsChanged() || complete) { tiStep.setText(STRING_STEPS); ti = tiStep.getItems(); // In complete refresh: delete all items first if (complete) { for (int i=0;i<ti.length;i++) ti[i].dispose(); ti = tiStep.getItems(); } // First delete no longer used items... log.logDebug(toString(), Messages.getString("Spoon.Log.CheckSteps"));//"check steps" for (int i=0;i<ti.length;i++) { String str = ti[i].getText(); log.logDebug(toString(), " "+Messages.getString("Spoon.Log.CheckStepTreeItem")+i+" : ["+str+"]"); StepMeta inf = transMeta.findStep(str); if (inf!=null) idx = transMeta.indexOfStep(inf); else idx=-1; if (idx<0 || idx>i) { log.logDebug(toString(), " "+ Messages.getString("Spoon.Log.RemoveTreeItem")+ "["+str+"]");//remove tree item ti[i].dispose(); } } ti = tiStep.getItems(); // Insert missing items in tree... int j=0; for (int i=0;i<transMeta.nrSteps();i++) { StepMeta inf = transMeta.getStep(i); String step_name = inf.getName(); String step_id = inf.getStepID(); String ti_name = ""; if (j<ti.length) ti_name = ti[j].getText(); if (!step_name.equalsIgnoreCase(ti_name)) { // insert at position j in tree TreeItem newitem = new TreeItem(tiStep, j); newitem.setText(inf.getName()); // Set the small image... Image img = (Image)GUIResource.getInstance().getImagesStepsSmall().get(step_id); newitem.setImage(img); j++; ti = tiStep.getItems(); } else { j++; } } // See if the colors are still OK! for (int i=0;i<ti.length;i++) { StepMeta inf = transMeta.findStep(ti[i].getText()); Color col = ti[i].getForeground(); Color newcol; if (transMeta.isStepUsedInTransHops(inf)) newcol=GUIResource.getInstance().getColorBlack(); else newcol=GUIResource.getInstance().getColorGray(); if (!newcol.equals(col)) ti[i].setForeground(newcol); } //tiStep.setExpanded(true); } // Refresh the Hops... // if (transMeta.haveHopsChanged() || complete) { tiHops.setText(STRING_HOPS); ti = tiHops.getItems(); // In complete refresh: delete all items first if (complete) { for (int i=0;i<ti.length;i++) ti[i].dispose(); ti = tiHops.getItems(); } // First delete no longer used items... for (int i=0;i<ti.length;i++) { String str = ti[i].getText(); TransHopMeta inf = transMeta.findTransHop(str); if (inf!=null) idx = transMeta.indexOfTransHop(inf); else idx=-1; if (idx<0 || idx>i) ti[i].dispose(); } ti = tiHops.getItems(); // Insert missing items in tree... int j=0; for (int i=0;i<transMeta.nrTransHops();i++) { TransHopMeta inf = transMeta.getTransHop(i); String trans_name = inf.toString(); String ti_name = ""; if (j<ti.length) ti_name = ti[j].getText(); if (!trans_name.equalsIgnoreCase(ti_name)) { // insert at position j in tree TreeItem newitem = new TreeItem(tiHops, j); newitem.setText(inf.toString()); newitem.setForeground(GUIResource.getInstance().getColorBlack()); newitem.setImage(GUIResource.getInstance().getImageHop()); j++; ti = tiHops.getItems(); } else { j++; } } // tiTrns.setExpanded(false); } selectionTree.setFocus(); setShellText(); } public void refreshGraph() { if (shell.isDisposed()) return; spoongraph.redraw(); setShellText(); } public void refreshHistory() { spoonhist.refreshHistory(); } public StepMeta newStep() { return newStep(true, true); } public StepMeta newStep(boolean openit, boolean rename) { TreeItem ti[] = selectionTree.getSelection(); StepMeta inf = null; if (ti.length==1) { String steptype = ti[0].getText(); log.logDebug(toString(), Messages.getString("Spoon.Log.NewStep")+steptype);//"New step: " inf = newStep(steptype, steptype, openit, rename); } return inf; } /** * Allocate new step, optionally open and rename it. * * @param name Name of the new step * @param description Description of the type of step * @param openit Open the dialog for this step? * @param rename Rename this step? * * @return The newly created StepMeta object. * */ public StepMeta newStep(String name, String description, boolean openit, boolean rename) { StepMeta inf = null; // See if we need to rename the step to avoid doubles! if (rename && transMeta.findStep(name)!=null) { int i=2; String newname = name+" "+i; while (transMeta.findStep(newname)!=null) { i++; newname = name+" "+i; } name=newname; } StepLoader steploader = StepLoader.getInstance(); StepPlugin stepPlugin = null; try { stepPlugin = steploader.findStepPluginWithDescription(description); if (stepPlugin!=null) { StepMetaInterface info = BaseStep.getStepInfo(stepPlugin, steploader); info.setDefault(); if (openit) { StepDialogInterface dialog = info.getDialog(shell, info, transMeta, name); name = dialog.open(); } inf=new StepMeta(log, stepPlugin.getID()[0], name, info); if (name!=null) // OK pressed in the dialog: we have a step-name { String newname=name; StepMeta stepMeta = transMeta.findStep(newname); int nr=2; while (stepMeta!=null) { newname = name+" "+nr; stepMeta = transMeta.findStep(newname); nr++; } if (nr>2) { inf.setName(newname); MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_INFORMATION); mb.setMessage(Messages.getString("Spoon.Dialog.ChangeStepname.Message",newname));//"This stepname already exists. Spoon changed the stepname to ["+newname+"]" mb.setText(Messages.getString("Spoon.Dialog.ChangeStepname.Title"));//"Info!" mb.open(); } inf.setLocation(20, 20); // default location at (20,20) transMeta.addStep(inf); // Save for later: // if openit is false: we drag&drop it onto the canvas! if (openit) { addUndoNew(new StepMeta[] { inf }, new int[] { transMeta.indexOfStep(inf) }); } // Also store it in the pluginHistory list... props.addPluginHistory(stepPlugin.getID()[0]); refreshTree(); } else { return null; // Cancel pressed in dialog. } setShellText(); } } catch(KettleException e) { String filename = stepPlugin.getErrorHelpFile(); if (stepPlugin!=null && filename!=null) { // OK, in stead of a normal error message, we give back the content of the error help file... (HTML) try { StringBuffer content=new StringBuffer(); System.out.println("Filename = "+filename); FileInputStream fis = new FileInputStream(new File(filename)); int ch = fis.read(); while (ch>=0) { content.append( (char)ch); ch = fis.read(); } System.out.println("Content = "+content); ShowBrowserDialog sbd = new ShowBrowserDialog(shell, Messages.getString("Spoon.Dialog.ErrorHelpText.Title"), content.toString());//"Error help text" sbd.open(); } catch(Exception ex) { new ErrorDialog(shell, props, Messages.getString("Spoon.Dialog.ErrorShowingHelpText.Title"), Messages.getString("Spoon.Dialog.ErrorShowingHelpText.Message"), ex);//"Error showing help text" } } else { new ErrorDialog(shell, props, Messages.getString("Spoon.Dialog.UnableCreateNewStep.Title"),Messages.getString("Spoon.Dialog.UnableCreateNewStep.Message") , e);//"Error creating step" "I was unable to create a new step" } return null; } catch(Throwable e) { if (!shell.isDisposed()) new ErrorDialog(shell, props, Messages.getString("Spoon.Dialog.ErrorCreatingStep.Title"), Messages.getString("Spoon.Dialog.UnableCreateNewStep.Message"), new Exception(e));//"Error creating step" return null; } return inf; } private void setTreeImages() { tiConn.setImage(GUIResource.getInstance().getImageConnection()); tiHops.setImage(GUIResource.getInstance().getImageHop()); tiStep.setImage(GUIResource.getInstance().getImageBol()); tiBase.setImage(GUIResource.getInstance().getImageBol()); tiPlug.setImage(GUIResource.getInstance().getImageBol()); TreeItem tiBaseCat[]=tiBase.getItems(); for (int x=0;x<tiBaseCat.length;x++) { tiBaseCat[x].setImage(GUIResource.getInstance().getImageBol()); TreeItem ti[] = tiBaseCat[x].getItems(); for (int i=0;i<ti.length;i++) { TreeItem stepitem = ti[i]; String description = stepitem.getText(); StepLoader steploader = StepLoader.getInstance(); StepPlugin sp = steploader.findStepPluginWithDescription(description); if (sp!=null) { Image stepimg = (Image)GUIResource.getInstance().getImagesStepsSmall().get(sp.getID()[0]); if (stepimg!=null) { stepitem.setImage(stepimg); } } } } TreeItem tiPlugCat[]=tiPlug.getItems(); for (int x=0;x<tiPlugCat.length;x++) { tiPlugCat[x].setImage(GUIResource.getInstance().getImageBol()); TreeItem ti[] = tiPlugCat[x].getItems(); for (int i=0;i<ti.length;i++) { TreeItem stepitem = ti[i]; String description = stepitem.getText(); StepLoader steploader = StepLoader.getInstance(); StepPlugin sp = steploader.findStepPluginWithDescription(description); if (sp!=null) { Image stepimg = (Image)GUIResource.getInstance().getImagesStepsSmall().get(sp.getID()); if (stepimg!=null) { stepitem.setImage(stepimg); } } } } } public DatabaseMeta getConnection(String name) { int i; for (i=0;i<transMeta.nrDatabases();i++) { DatabaseMeta ci = transMeta.getDatabase(i); if (ci.getName().equalsIgnoreCase(name)) { return ci; } } return null; } public void setShellText() { String fname = transMeta.getFilename(); if (shell.isDisposed()) return; if (rep!=null) { String repository = "["+getRepositoryName()+"]"; String transname = transMeta.getName(); if (transname==null) transname=Messages.getString("Spoon.Various.NoName");//"[no name]" shell.setText(APPL_TITLE+" - "+repository+" "+transname+(transMeta.hasChanged()?(" "+Messages.getString("Spoon.Various.Changed")):""));//(changed) } else { String repository = Messages.getString("Spoon.Various.NoRepository");//"[no repository]" if (fname!=null) { shell.setText(APPL_TITLE+" - "+repository+" File: "+fname+(transMeta.hasChanged()?(" "+Messages.getString("Spoon.Various.Changed")):"")); } else { shell.setText(APPL_TITLE+" - "+repository+" "+(transMeta.hasChanged()?(" "+Messages.getString("Spoon.Various.Changed")):"")); } } } public void setFilename(String fname) { if (fname!=null) transMeta.setFilename(fname); setShellText(); } private void printFile() { PrintSpool ps = new PrintSpool(); Printer printer = ps.getPrinter(shell); // Create an image of the screen Point max = transMeta.getMaximum(); Image img = spoongraph.getTransformationImage(printer, max.x, max.y); ps.printImage(shell, props, img); img.dispose(); ps.dispose(); } private boolean setTrans() { TransDialog tid = new TransDialog(shell, SWT.NONE, transMeta, rep); TransMeta ti = tid.open(); setShellText(); return ti!=null; } public void saveSettings() { WindowProperty winprop = new WindowProperty(shell); winprop.setName(APPL_TITLE); props.setScreen(winprop); props.setLogLevel(log.getLogLevelDesc()); props.setLogFilter(log.getFilter()); props.setSashWeights(sashform.getWeights()); props.saveProps(); } public void loadSettings() { log.setLogLevel(props.getLogLevel()); log.setFilter(props.getLogFilter()); transMeta.setMaxUndo(props.getMaxUndo()); transMeta.getDbCache().setActive(props.useDBCache()); } public void changeLooks() { props.setLook(selectionTree); props.setLook(tabfolder, Props.WIDGET_STYLE_TAB); spoongraph.newProps(); refreshTree(); refreshGraph(); } public void undoAction() { spoongraph.forceFocus(); TransAction ta = transMeta.previousUndo(); if (ta==null) return; setUndoMenu(); // something changed: change the menu switch(ta.getType()) { // // NEW // // We created a new step : undo this... case TransAction.TYPE_ACTION_NEW_STEP: // Delete the step at correct location: for (int i=ta.getCurrent().length-1;i>=0;i--) { int idx = ta.getCurrentIndex()[i]; transMeta.removeStep(idx); } refreshTree(); refreshGraph(); break; // We created a new connection : undo this... case TransAction.TYPE_ACTION_NEW_CONNECTION: // Delete the connection at correct location: for (int i=ta.getCurrent().length-1;i>=0;i--) { int idx = ta.getCurrentIndex()[i]; transMeta.removeDatabase(idx); } refreshTree(); refreshGraph(); break; // We created a new note : undo this... case TransAction.TYPE_ACTION_NEW_NOTE: // Delete the note at correct location: for (int i=ta.getCurrent().length-1;i>=0;i--) { int idx = ta.getCurrentIndex()[i]; transMeta.removeNote(idx); } refreshTree(); refreshGraph(); break; // We created a new hop : undo this... case TransAction.TYPE_ACTION_NEW_HOP: // Delete the hop at correct location: for (int i=ta.getCurrent().length-1;i>=0;i--) { int idx = ta.getCurrentIndex()[i]; transMeta.removeTransHop(idx); } refreshTree(); refreshGraph(); break; // // DELETE // // We delete a step : undo this... case TransAction.TYPE_ACTION_DELETE_STEP: // un-Delete the step at correct location: re-insert for (int i=0;i<ta.getCurrent().length;i++) { StepMeta stepMeta = (StepMeta)ta.getCurrent()[i]; int idx = ta.getCurrentIndex()[i]; transMeta.addStep(idx, stepMeta); } refreshTree(); refreshGraph(); break; // We deleted a connection : undo this... case TransAction.TYPE_ACTION_DELETE_CONNECTION: // re-insert the connection at correct location: for (int i=0;i<ta.getCurrent().length;i++) { DatabaseMeta ci = (DatabaseMeta)ta.getCurrent()[i]; int idx = ta.getCurrentIndex()[i]; transMeta.addDatabase(idx, ci); } refreshTree(); refreshGraph(); break; // We delete new note : undo this... case TransAction.TYPE_ACTION_DELETE_NOTE: // re-insert the note at correct location: for (int i=0;i<ta.getCurrent().length;i++) { NotePadMeta ni = (NotePadMeta)ta.getCurrent()[i]; int idx = ta.getCurrentIndex()[i]; transMeta.addNote(idx, ni); } refreshTree(); refreshGraph(); break; // We deleted a hop : undo this... case TransAction.TYPE_ACTION_DELETE_HOP: // re-insert the hop at correct location: for (int i=0;i<ta.getCurrent().length;i++) { TransHopMeta hi = (TransHopMeta)ta.getCurrent()[i]; int idx = ta.getCurrentIndex()[i]; // Build a new hop: StepMeta from = transMeta.findStep(hi.getFromStep().getName()); StepMeta to = transMeta.findStep(hi.getToStep().getName()); TransHopMeta hinew = new TransHopMeta(from, to); transMeta.addTransHop(idx, hinew); } refreshTree(); refreshGraph(); break; // // CHANGE // // We changed a step : undo this... case TransAction.TYPE_ACTION_CHANGE_STEP: // Delete the current step, insert previous version. for (int i=0;i<ta.getCurrent().length;i++) { StepMeta prev = (StepMeta)ta.getPrevious()[i]; int idx = ta.getCurrentIndex()[i]; transMeta.removeStep(idx); transMeta.addStep(idx, prev); } refreshTree(); refreshGraph(); break; // We changed a connection : undo this... case TransAction.TYPE_ACTION_CHANGE_CONNECTION: // Delete & re-insert for (int i=0;i<ta.getCurrent().length;i++) { DatabaseMeta prev = (DatabaseMeta)ta.getPrevious()[i]; int idx = ta.getCurrentIndex()[i]; transMeta.removeDatabase(idx); transMeta.addDatabase(idx, prev); } refreshTree(); refreshGraph(); break; // We changed a note : undo this... case TransAction.TYPE_ACTION_CHANGE_NOTE: // Delete & re-insert for (int i=0;i<ta.getCurrent().length;i++) { int idx = ta.getCurrentIndex()[i]; transMeta.removeNote(idx); NotePadMeta prev = (NotePadMeta)ta.getPrevious()[i]; transMeta.addNote(idx, prev); } refreshTree(); refreshGraph(); break; // We changed a hop : undo this... case TransAction.TYPE_ACTION_CHANGE_HOP: // Delete & re-insert for (int i=0;i<ta.getCurrent().length;i++) { TransHopMeta prev = (TransHopMeta)ta.getPrevious()[i]; int idx = ta.getCurrentIndex()[i]; transMeta.removeTransHop(idx); transMeta.addTransHop(idx, prev); } refreshTree(); refreshGraph(); break; // // POSITION // // The position of a step has changed: undo this... case TransAction.TYPE_ACTION_POSITION_STEP: // Find the location of the step: for (int i = 0; i < ta.getCurrentIndex().length; i++) { StepMeta stepMeta = transMeta.getStep(ta.getCurrentIndex()[i]); stepMeta.setLocation(ta.getPreviousLocation()[i]); } refreshGraph(); break; // The position of a note has changed: undo this... case TransAction.TYPE_ACTION_POSITION_NOTE: for (int i=0;i<ta.getCurrentIndex().length;i++) { int idx = ta.getCurrentIndex()[i]; NotePadMeta npi = transMeta.getNote(idx); Point prev = ta.getPreviousLocation()[i]; npi.setLocation(prev); } refreshGraph(); break; default: break; } // OK, now check if we need to do this again... if (transMeta.viewNextUndo()!=null) { if (transMeta.viewNextUndo().getNextAlso()) undoAction(); } } public void redoAction() { spoongraph.forceFocus(); TransAction ta = transMeta.nextUndo(); if (ta==null) return; setUndoMenu(); // something changed: change the menu switch(ta.getType()) { // // NEW // case TransAction.TYPE_ACTION_NEW_STEP: // re-delete the step at correct location: for (int i=0;i<ta.getCurrent().length;i++) { StepMeta stepMeta = (StepMeta)ta.getCurrent()[i]; int idx = ta.getCurrentIndex()[i]; transMeta.addStep(idx, stepMeta); refreshTree(); refreshGraph(); } break; case TransAction.TYPE_ACTION_NEW_CONNECTION: // re-insert the connection at correct location: for (int i=0;i<ta.getCurrent().length;i++) { DatabaseMeta ci = (DatabaseMeta)ta.getCurrent()[i]; int idx = ta.getCurrentIndex()[i]; transMeta.addDatabase(idx, ci); refreshTree(); refreshGraph(); } break; case TransAction.TYPE_ACTION_NEW_NOTE: // re-insert the note at correct location: for (int i=0;i<ta.getCurrent().length;i++) { NotePadMeta ni = (NotePadMeta)ta.getCurrent()[i]; int idx = ta.getCurrentIndex()[i]; transMeta.addNote(idx, ni); refreshTree(); refreshGraph(); } break; case TransAction.TYPE_ACTION_NEW_HOP: // re-insert the hop at correct location: for (int i=0;i<ta.getCurrent().length;i++) { TransHopMeta hi = (TransHopMeta)ta.getCurrent()[i]; int idx = ta.getCurrentIndex()[i]; transMeta.addTransHop(idx, hi); refreshTree(); refreshGraph(); } break; // // DELETE // case TransAction.TYPE_ACTION_DELETE_STEP: // re-remove the step at correct location: for (int i=ta.getCurrent().length-1;i>=0;i--) { int idx = ta.getCurrentIndex()[i]; transMeta.removeStep(idx); } refreshTree(); refreshGraph(); break; case TransAction.TYPE_ACTION_DELETE_CONNECTION: // re-remove the connection at correct location: for (int i=ta.getCurrent().length-1;i>=0;i--) { int idx = ta.getCurrentIndex()[i]; transMeta.removeDatabase(idx); } refreshTree(); refreshGraph(); break; case TransAction.TYPE_ACTION_DELETE_NOTE: // re-remove the note at correct location: for (int i=ta.getCurrent().length-1;i>=0;i--) { int idx = ta.getCurrentIndex()[i]; transMeta.removeNote(idx); } refreshTree(); refreshGraph(); break; case TransAction.TYPE_ACTION_DELETE_HOP: // re-remove the hop at correct location: for (int i=ta.getCurrent().length-1;i>=0;i--) { int idx = ta.getCurrentIndex()[i]; transMeta.removeTransHop(idx); } refreshTree(); refreshGraph(); break; // // CHANGE // // We changed a step : undo this... case TransAction.TYPE_ACTION_CHANGE_STEP: // Delete the current step, insert previous version. for (int i=0;i<ta.getCurrent().length;i++) { StepMeta stepMeta = (StepMeta)ta.getCurrent()[i]; int idx = ta.getCurrentIndex()[i]; transMeta.removeStep(idx); transMeta.addStep(idx, stepMeta); } refreshTree(); refreshGraph(); break; // We changed a connection : undo this... case TransAction.TYPE_ACTION_CHANGE_CONNECTION: // Delete & re-insert for (int i=0;i<ta.getCurrent().length;i++) { DatabaseMeta ci = (DatabaseMeta)ta.getCurrent()[i]; int idx = ta.getCurrentIndex()[i]; transMeta.removeDatabase(idx); transMeta.addDatabase(idx, ci); } refreshTree(); refreshGraph(); break; // We changed a note : undo this... case TransAction.TYPE_ACTION_CHANGE_NOTE: // Delete & re-insert for (int i=0;i<ta.getCurrent().length;i++) { NotePadMeta ni = (NotePadMeta)ta.getCurrent()[i]; int idx = ta.getCurrentIndex()[i]; transMeta.removeNote(idx); transMeta.addNote(idx, ni); } refreshTree(); refreshGraph(); break; // We changed a hop : undo this... case TransAction.TYPE_ACTION_CHANGE_HOP: // Delete & re-insert for (int i=0;i<ta.getCurrent().length;i++) { TransHopMeta hi = (TransHopMeta)ta.getCurrent()[i]; int idx = ta.getCurrentIndex()[i]; transMeta.removeTransHop(idx); transMeta.addTransHop(idx, hi); } refreshTree(); refreshGraph(); break; // // CHANGE POSITION // case TransAction.TYPE_ACTION_POSITION_STEP: for (int i=0;i<ta.getCurrentIndex().length;i++) { // Find & change the location of the step: StepMeta stepMeta = transMeta.getStep(ta.getCurrentIndex()[i]); stepMeta.setLocation(ta.getCurrentLocation()[i]); } refreshGraph(); break; case TransAction.TYPE_ACTION_POSITION_NOTE: for (int i=0;i<ta.getCurrentIndex().length;i++) { int idx = ta.getCurrentIndex()[i]; NotePadMeta npi = transMeta.getNote(idx); Point curr = ta.getCurrentLocation()[i]; npi.setLocation(curr); } refreshGraph(); break; default: break; } // OK, now check if we need to do this again... if (transMeta.viewNextUndo()!=null) { if (transMeta.viewNextUndo().getNextAlso()) redoAction(); } } public void setUndoMenu() { if (shell.isDisposed()) return; TransAction prev = transMeta.viewThisUndo(); TransAction next = transMeta.viewNextUndo(); if (prev!=null) { miEditUndo.setEnabled(true); miEditUndo.setText(Messages.getString("Spoon.Menu.Undo.Available", prev.toString()));//"Undo : "+prev.toString()+" \tCTRL-Z" } else { miEditUndo.setEnabled(false); miEditUndo.setText(Messages.getString("Spoon.Menu.Undo.NotAvailable"));//"Undo : not available \tCTRL-Z" } if (next!=null) { miEditRedo.setEnabled(true); miEditRedo.setText(Messages.getString("Spoon.Menu.Redo.Available",next.toString()));//"Redo : "+next.toString()+" \tCTRL-Y" } else { miEditRedo.setEnabled(false); miEditRedo.setText(Messages.getString("Spoon.Menu.Redo.NotAvailable"));//"Redo : not available \tCTRL-Y" } } public void addUndoNew(Object obj[], int position[]) { addUndoNew(obj, position, false); } public void addUndoNew(Object obj[], int position[], boolean nextAlso) { // New object? transMeta.addUndo(obj, null, position, null, null, TransMeta.TYPE_UNDO_NEW, nextAlso); setUndoMenu(); } // Undo delete object public void addUndoDelete(Object obj[], int position[]) { addUndoDelete(obj, position, false); } // Undo delete object public void addUndoDelete(Object obj[], int position[], boolean nextAlso) { transMeta.addUndo(obj, null, position, null, null, TransMeta.TYPE_UNDO_DELETE, nextAlso); setUndoMenu(); } // Change of step, connection, hop or note... public void addUndoPosition(Object obj[], int pos[], Point prev[], Point curr[]) { addUndoPosition(obj, pos, prev, curr, false); } // Change of step, connection, hop or note... public void addUndoPosition(Object obj[], int pos[], Point prev[], Point curr[], boolean nextAlso) { // It's better to store the indexes of the objects, not the objects itself! transMeta.addUndo(obj, null, pos, prev, curr, TransMeta.TYPE_UNDO_POSITION, nextAlso); setUndoMenu(); } // Change of step, connection, hop or note... public void addUndoChange(Object from[], Object to[], int[] pos) { addUndoChange(from, to, pos, false); } // Change of step, connection, hop or note... public void addUndoChange(Object from[], Object to[], int[] pos, boolean nextAlso) { transMeta.addUndo(from, to, pos, null, null, TransMeta.TYPE_UNDO_CHANGE, nextAlso); setUndoMenu(); } /** * Checks *all* the steps in the transformation, puts the result in remarks list */ public void checkTrans() { checkTrans(false); } /** * Check the steps in a transformation * * @param only_selected True: Check only the selected steps... */ public void checkTrans(boolean only_selected) { CheckTransProgressDialog ctpd = new CheckTransProgressDialog(log, props, shell, transMeta, remarks, only_selected); ctpd.open(); // manages the remarks arraylist... showLastTransCheck(); } /** * Show the remarks of the last transformation check that was run. * @see #checkTrans() */ public void showLastTransCheck() { CheckResultDialog crd = new CheckResultDialog(shell, SWT.NONE, remarks); String stepname = crd.open(); if (stepname!=null) { // Go to the indicated step! StepMeta stepMeta = transMeta.findStep(stepname); if (stepMeta!=null) { editStepInfo(stepMeta); } } } public void clearDBCache() { // Determine what menu we selected from... TreeItem ti[] = selectionTree.getSelection(); // Then call editConnection or editStep or editTrans if (ti.length==1) { String name = ti[0].getText(); TreeItem parent = ti[0].getParentItem(); if (parent != null) { String type = parent.getText(); if (type.equalsIgnoreCase(STRING_CONNECTIONS)) { transMeta.getDbCache().clear(name); } } else { if (name.equalsIgnoreCase(STRING_CONNECTIONS)) transMeta.getDbCache().clear(null); } } } public void exploreDB() { // Determine what menu we selected from... TreeItem ti[] = selectionTree.getSelection(); // Then call editConnection or editStep or editTrans if (ti.length==1) { String name = ti[0].getText(); TreeItem parent = ti[0].getParentItem(); if (parent != null) { String type = parent.getText(); if (type.equalsIgnoreCase(STRING_CONNECTIONS)) { DatabaseMeta dbinfo = transMeta.findDatabase(name); if (dbinfo!=null) { DatabaseExplorerDialog std = new DatabaseExplorerDialog(shell, props, SWT.NONE, dbinfo, transMeta.getDatabases(), true ); std.open(); } else { MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR ); mb.setMessage(Messages.getString("Spoon.Dialog.CannotFindConnection.Message"));//"Couldn't find connection, please refresh the tree (F5)!" mb.setText(Messages.getString("Spoon.Dialog.CannotFindConnection.Title"));//"Error!" mb.open(); } } } else { if (name.equalsIgnoreCase(STRING_CONNECTIONS)) transMeta.getDbCache().clear(null); } } } public void analyseImpact() { AnalyseImpactProgressDialog aipd = new AnalyseImpactProgressDialog(log, props, shell, transMeta, impact); impactHasRun = aipd.open(); if (impactHasRun) showLastImpactAnalyses(); } public void showLastImpactAnalyses() { ArrayList rows = new ArrayList(); for (int i=0;i<impact.size();i++) { DatabaseImpact ii = (DatabaseImpact)impact.get(i); rows.add(ii.getRow()); } if (rows.size()>0) { // Display all the rows... PreviewRowsDialog prd = new PreviewRowsDialog(shell, SWT.NONE, "-", rows); prd.setTitleMessage(Messages.getString("Spoon.Dialog.ImpactAnalyses.Title"), Messages.getString("Spoon.Dialog.ImpactAnalyses.Message"));//"Impact analyses" "Result of analyses:" prd.open(); } else { MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_INFORMATION ); if (impactHasRun) { mb.setMessage(Messages.getString("Spoon.Dialog.TransformationNoImpactOnDatabase.Message"));//"As far as I can tell, this transformation has no impact on any database." } else { mb.setMessage(Messages.getString("Spoon.Dialog.RunImpactAnalysesFirst.Message"));//"Please run the impact analyses first on this transformation." } mb.setText(Messages.getString("Spoon.Dialog.ImpactAnalyses.Title"));//Impact mb.open(); } } /** * Get & show the SQL required to run the loaded transformation... * */ public void getSQL() { GetSQLProgressDialog pspd = new GetSQLProgressDialog(log, props, shell, transMeta); ArrayList stats = pspd.open(); if (stats!=null) // null means error, but we already displayed the error { if (stats.size()>0) { SQLStatementsDialog ssd = new SQLStatementsDialog(shell, SWT.NONE, stats); ssd.open(); } else { MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_INFORMATION ); mb.setMessage(Messages.getString("Spoon.Dialog.NoSQLNeedEexecuted.Message"));//As far as I can tell, no SQL statements need to be executed before this transformation can run. mb.setText(Messages.getString("Spoon.Dialog.NoSQLNeedEexecuted.Title"));//"SQL" mb.open(); } } } public void toClipboard(String cliptext) { props.toClipboard(cliptext); } public String fromClipboard() { return props.fromClipboard(); } /** * Paste transformation from the clipboard... * */ public void pasteTransformation() { log.logDetailed(toString(), Messages.getString("Spoon.Log.PasteTransformationFromClipboard"));//"Paste transformation from the clipboard!" if (showChangedWarning()) { String xml = fromClipboard(); try { Document doc = XMLHandler.loadXMLString(xml); transMeta = new TransMeta(XMLHandler.getSubNode(doc, "transformation")); refreshGraph(); refreshTree(true); } catch(KettleException e) { new ErrorDialog(shell, props, Messages.getString("Spoon.Dialog.ErrorPastingTransformation.Title"), Messages.getString("Spoon.Dialog.ErrorPastingTransformation.Message"), e);//Error pasting transformation "An error occurred pasting a transformation from the clipboard" } } } public void copyTransformation() { toClipboard(XMLHandler.getXMLHeader()+transMeta.getXML()); } public void copyTransformationImage() { Clipboard clipboard = props.getNewClipboard(); Point area = transMeta.getMaximum(); Image image = spoongraph.getTransformationImage(Display.getCurrent(), area.x, area.y); clipboard.setContents(new Object[] { image.getImageData() }, new Transfer[]{ImageDataTransfer.getInstance()}); /** System.out.println("image obtained: "+area.x+"x"+area.y); ShowImageDialog sid = new ShowImageDialog(shell, image); sid.open(); */ } /** * Shows a wizard that creates a new database connection... * */ private void createDatabaseWizard() { CreateDatabaseWizard cdw=new CreateDatabaseWizard(); DatabaseMeta newDBInfo=cdw.createAndRunDatabaseWizard(shell, props, transMeta.getDatabases()); if(newDBInfo!=null){ //finished transMeta.addDatabase(newDBInfo); refreshTree(true); refreshGraph(); } } /** * Create a transformation that extracts tables & data from a database.<p><p> * * 0) Select the database to rip<p> * 1) Select the table in the database to copy<p> * 2) Select the database to dump to<p> * 3) Select the repository directory in which it will end up<p> * 4) Select a name for the new transformation<p> * 6) Create 1 transformation for the selected table<p> */ private void copyTableWizard() { if (showChangedWarning()) { final CopyTableWizardPage1 page1 = new CopyTableWizardPage1("1", transMeta.getDatabases()); page1.createControl(shell); final CopyTableWizardPage2 page2 = new CopyTableWizardPage2("2"); page2.createControl(shell); final CopyTableWizardPage3 page3 = new CopyTableWizardPage3 ("3", rep); page3.createControl(shell); Wizard wizard = new Wizard() { public boolean performFinish() { return copyTable(page3.getTransformationName(), page3.getDirectory(), page1.getSourceDatabase(), page1.getTargetDatabase(), page2.getSelection() ); } /** * @see org.eclipse.jface.wizard.Wizard#canFinish() */ public boolean canFinish() { return page3.canFinish(); } }; wizard.addPage(page1); wizard.addPage(page2); wizard.addPage(page3); WizardDialog wd = new WizardDialog(shell, wizard); wd.setMinimumPageSize(700,400); wd.open(); } } public boolean copyTable( String transname, RepositoryDirectory repdir, DatabaseMeta sourceDBInfo, DatabaseMeta targetDBInfo, String tablename ) { try { // // Create a new transformation... // TransMeta ti = new TransMeta(); ti.setName(transname); ti.setDirectory(repdir); ti.setDatabases(transMeta.getDatabases()); // // Add a note // String note = Messages.getString("Spoon.Message.Note.ReadInformationFromTableOnDB",tablename,sourceDBInfo.getDatabaseName() )+Const.CR;//"Reads information from table ["+tablename+"] on database ["+sourceDBInfo+"]" note+=Messages.getString("Spoon.Message.Note.WriteInformationToTableOnDB",tablename,targetDBInfo.getDatabaseName() );//"After that, it writes the information to table ["+tablename+"] on database ["+targetDBInfo+"]" NotePadMeta ni = new NotePadMeta(note, 150, 10, -1, -1); ti.addNote(ni); // // create the source step... // String fromstepname = Messages.getString("Spoon.Message.Note.ReadFromTable",tablename); //"read from ["+tablename+"]"; TableInputMeta tii = new TableInputMeta(); tii.setDatabaseMeta(sourceDBInfo); tii.setSQL("SELECT * FROM "+tablename); StepLoader steploader = StepLoader.getInstance(); String fromstepid = steploader.getStepPluginID(tii); StepMeta fromstep = new StepMeta(log, fromstepid, fromstepname, (StepMetaInterface)tii ); fromstep.setLocation(150,100); fromstep.setDraw(true); fromstep.setDescription(Messages.getString("Spoon.Message.Note.ReadInformationFromTableOnDB",tablename,sourceDBInfo.getDatabaseName() )); ti.addStep(fromstep); // // add logic to rename fields in case any of the field names contain reserved words... // Use metadata logic in SelectValues, use SelectValueInfo... // Database sourceDB = new Database(sourceDBInfo); sourceDB.connect(); // Get the fields for the input table... Row fields = sourceDB.getTableFields(tablename); // See if we need to deal with reserved words... int nrReserved = targetDBInfo.getNrReservedWords(fields); if (nrReserved>0) { SelectValuesMeta svi = new SelectValuesMeta(); svi.allocate(0,0,nrReserved); int nr = 0; for (int i=0;i<fields.size();i++) { Value v = fields.getValue(i); if (targetDBInfo.isReservedWord( v.getName() ) ) { svi.getMetaName()[nr] = v.getName(); svi.getMetaRename()[nr] = targetDBInfo.quoteField( v.getName() ); nr++; } } String selstepname =Messages.getString("Spoon.Message.Note.HandleReservedWords"); //"Handle reserved words"; String selstepid = steploader.getStepPluginID(svi); StepMeta selstep = new StepMeta(log, selstepid, selstepname, (StepMetaInterface)svi ); selstep.setLocation(350,100); selstep.setDraw(true); selstep.setDescription(Messages.getString("Spoon.Message.Note.RenamesReservedWords",targetDBInfo.getDatabaseTypeDesc()) );//"Renames reserved words for "+targetDBInfo.getDatabaseTypeDesc() ti.addStep(selstep); TransHopMeta shi = new TransHopMeta(fromstep, selstep); ti.addTransHop(shi); fromstep = selstep; } // // Create the target step... // // // Add the TableOutputMeta step... // String tostepname = Messages.getString("Spoon.Message.Note.WriteToTable",tablename); // "write to ["+tablename+"]"; TableOutputMeta toi = new TableOutputMeta(); toi.setDatabase( targetDBInfo ); toi.setTablename( tablename ); toi.setCommitSize( 200 ); toi.setTruncateTable( true ); String tostepid = steploader.getStepPluginID(toi); StepMeta tostep = new StepMeta(log, tostepid, tostepname, (StepMetaInterface)toi ); tostep.setLocation(550,100); tostep.setDraw(true); tostep.setDescription(Messages.getString("Spoon.Message.Note.WriteInformationToTableOnDB2",tablename,targetDBInfo.getDatabaseName() ));//"Write information to table ["+tablename+"] on database ["+targetDBInfo+"]" ti.addStep(tostep); // // Add a hop between the two steps... // TransHopMeta hi = new TransHopMeta(fromstep, tostep); ti.addTransHop(hi); // OK, if we're still here: overwrite the current transformation... transMeta = ti; refreshGraph(); refreshTree(true); } catch(Exception e) { new ErrorDialog(shell, props, Messages.getString("Spoon.Dialog.UnexpectedError.Title"), Messages.getString("Spoon.Dialog.UnexpectedError.Message"), new KettleException(e.getMessage(), e));//"Unexpected error" "An unexpected error occurred creating the new transformation" return false; } return true; } public String toString() { return APP_NAME; } /** * This is the main procedure for Spoon. * * @param a Arguments are available in the "Get System Info" step. */ public static void main (String [] a) throws KettleException { EnvUtil.environmentInit(); ArrayList args = new ArrayList(); for (int i=0;i<a.length;i++) args.add(a[i]); Display display = new Display(); Splash splash = new Splash(display); StringBuffer optionRepname, optionUsername, optionPassword, optionTransname, optionFilename, optionDirname, optionLogfile, optionLoglevel; CommandLineOption options[] = new CommandLineOption[] { new CommandLineOption("rep", "Repository name", optionRepname=new StringBuffer()), new CommandLineOption("user", "Repository username", optionUsername=new StringBuffer()), new CommandLineOption("pass", "Repository password", optionPassword=new StringBuffer()), new CommandLineOption("trans", "The name of the transformation to launch", optionTransname=new StringBuffer()), new CommandLineOption("dir", "The directory (don't forget the leading /)", optionDirname=new StringBuffer()), new CommandLineOption("file", "The filename (Transformation in XML) to launch", optionFilename=new StringBuffer()), new CommandLineOption("level", "The logging level (Basic, Detailed, Debug, Rowlevel, Error, Nothing)", optionLoglevel=new StringBuffer()), new CommandLineOption("logfile", "The logging file to write to", optionLogfile=new StringBuffer()), new CommandLineOption("log", "The logging file to write to (deprecated)", optionLogfile=new StringBuffer(), false, true), }; // Parse the options... CommandLineOption.parseArguments(args, options); String kettleRepname = Const.getEnvironmentVariable("KETTLE_REPOSITORY", null); String kettleUsername = Const.getEnvironmentVariable("KETTLE_USER", null); String kettlePassword = Const.getEnvironmentVariable("KETTLE_PASSWORD", null); if (!Const.isEmpty(kettleRepname )) optionRepname = new StringBuffer(kettleRepname); if (!Const.isEmpty(kettleUsername)) optionUsername = new StringBuffer(kettleUsername); if (!Const.isEmpty(kettlePassword)) optionPassword = new StringBuffer(kettlePassword); // Before anything else, check the runtime version!!! String version = Const.JAVA_VERSION; if ("1.4".compareToIgnoreCase(version)>0) { System.out.println("The System is running on Java version "+version); System.out.println("Unfortunately, it needs version 1.4 or higher to run."); return; } // Set default Locale: Locale.setDefault(Const.DEFAULT_LOCALE); LogWriter log; if (Const.isEmpty(optionLogfile)) { log=LogWriter.getInstance(Const.SPOON_LOG_FILE, false, LogWriter.LOG_LEVEL_BASIC); } else { log=LogWriter.getInstance( optionLogfile.toString(), true, LogWriter.LOG_LEVEL_BASIC ); } if (log.getRealFilename()!=null) log.logBasic(APP_NAME, Messages.getString("Spoon.Log.LoggingToFile")+log.getRealFilename());//"Logging goes to " if (!Const.isEmpty(optionLoglevel)) { log.setLogLevel(optionLoglevel.toString()); log.logBasic(APP_NAME, Messages.getString("Spoon.Log.LoggingAtLevel")+log.getLogLevelDesc());//"Logging is at level : " } /* Load the plugins etc.*/ StepLoader stloader = StepLoader.getInstance(); if (!stloader.read()) { log.logError(APP_NAME, Messages.getString("Spoon.Log.ErrorLoadingAndHaltSystem"));//Error loading steps & plugins... halting Spoon! return; } /* Load the plugins etc. we need to load jobentry*/ JobEntryLoader jeloader = JobEntryLoader.getInstance(); if (!jeloader.read()) { log.logError("Spoon", "Error loading job entries & plugins... halting Kitchen!"); return; } final Spoon win = new Spoon(log, display, null); win.setDestroy(true); win.setArguments((String[])args.toArray(new String[args.size()])); log.logBasic(APP_NAME, Messages.getString("Spoon.Log.MainWindowCreated"));//Main window is created. RepositoryMeta repinfo = null; UserInfo userinfo = null; if (Const.isEmpty(optionRepname) && Const.isEmpty(optionFilename) && win.props.showRepositoriesDialogAtStartup()) { log.logBasic(APP_NAME, Messages.getString("Spoon.Log.AskingForRepository"));//"Asking for repository" int perms[] = new int[] { PermissionMeta.TYPE_PERMISSION_TRANSFORMATION }; splash.hide(); RepositoriesDialog rd = new RepositoriesDialog(win.disp, SWT.NONE, perms, Messages.getString("Spoon.Application.Name"));//"Spoon" if (rd.open()) { repinfo = rd.getRepository(); userinfo = rd.getUser(); if (!userinfo.useTransformations()) { MessageBox mb = new MessageBox(win.shell, SWT.OK | SWT.ICON_ERROR ); mb.setMessage(Messages.getString("Spoon.Dialog.RepositoryUserCannotWork.Message"));//"Sorry, this repository user can't work with transformations from the repository." mb.setText(Messages.getString("Spoon.Dialog.RepositoryUserCannotWork.Title"));//"Error!" mb.open(); userinfo = null; repinfo = null; } } else { // Exit point: user pressed CANCEL! if (rd.isCancelled()) { splash.dispose(); win.quitFile(); return; } } } try { // Read kettle transformation specified on command-line? if (!Const.isEmpty(optionRepname) || !Const.isEmpty(optionFilename)) { if (!Const.isEmpty(optionRepname)) { RepositoriesMeta repsinfo = new RepositoriesMeta(log); if (repsinfo.readData()) { repinfo = repsinfo.findRepository(optionRepname.toString()); if (repinfo!=null) { // Define and connect to the repository... win.rep = new Repository(log, repinfo, userinfo); if (win.rep.connect(Messages.getString("Spoon.Application.Name")))//"Spoon" { if (Const.isEmpty(optionDirname)) optionDirname=new StringBuffer(RepositoryDirectory.DIRECTORY_SEPARATOR); // Check username, password win.rep.userinfo = new UserInfo(win.rep, optionUsername.toString(), optionPassword.toString()); if (win.rep.userinfo.getID()>0) { RepositoryDirectory repdir = win.rep.getDirectoryTree().findDirectory(optionDirname.toString()); if (repdir!=null) { win.transMeta = new TransMeta(win.rep, optionTransname.toString(), repdir); win.setFilename(optionRepname.toString()); win.transMeta.clearChanged(); } else { log.logError(APP_NAME, Messages.getString("Spoon.Log.UnableFindDirectory",optionDirname.toString()));//"Can't find directory ["+dirname+"] in the repository." } } else { log.logError(APP_NAME, Messages.getString("Spoon.Log.UnableVerifyUser"));//"Can't verify username and password." win.rep.disconnect(); win.rep=null; } } else { log.logError(APP_NAME, Messages.getString("Spoon.Log.UnableConnectToRepository"));//"Can't connect to the repository." } } else { log.logError(APP_NAME, Messages.getString("Spoon.Log.NoRepositoryRrovided"));//"No repository provided, can't load transformation." } } else { log.logError(APP_NAME, Messages.getString("Spoon.Log.NoRepositoriesDefined"));//"No repositories defined on this system." } } else if (!Const.isEmpty(optionFilename)) { win.transMeta = new TransMeta(optionFilename.toString()); win.setFilename(optionFilename.toString()); win.transMeta.clearChanged(); } } else // Normal operations, nothing on the commandline... { // Can we connect to the repository? if (repinfo!=null && userinfo!=null) { win.rep = new Repository(log, repinfo, userinfo); if (!win.rep.connect(Messages.getString("Spoon.Application.Name"))) //"Spoon" { win.rep = null; } } if (win.props.openLastFile()) { log.logDetailed(APP_NAME, Messages.getString("Spoon.Log.TryingOpenLastUsedFile"));//"Trying to open the last file used." String lastfiles[] = win.props.getLastFiles(); String lastdirs[] = win.props.getLastDirs(); boolean lasttypes[] = win.props.getLastTypes(); String lastrepos[] = win.props.getLastRepositories(); if (lastfiles.length>0) { boolean use_repository = repinfo!=null; // Perhaps we need to connect to the repository? if (lasttypes[0]) { if (lastrepos[0]!=null && lastrepos[0].length()>0) { if (use_repository && !lastrepos[0].equalsIgnoreCase(repinfo.getName())) { // We just asked... use_repository = false; } } } if (use_repository || !lasttypes[0]) { if (win.rep!=null) // load from repository... { if (win.rep.getName().equalsIgnoreCase(lastrepos[0])) { RepositoryDirectory repdir = win.rep.getDirectoryTree().findDirectory(lastdirs[0]); if (repdir!=null) { log.logDetailed(APP_NAME, Messages.getString("Spoon.Log.AutoLoadingTransformation",lastfiles[0],lastdirs[0]));//"Auto loading transformation ["+lastfiles[0]+"] from repository directory ["+lastdirs[0]+"]" TransLoadProgressDialog tlpd = new TransLoadProgressDialog(win.shell, win.rep, lastfiles[0], repdir); TransMeta transInfo = tlpd.open(); // = new TransInfo(log, win.rep, lastfiles[0], repdir); if (transInfo != null) { win.transMeta = transInfo; win.setFilename(lastfiles[0]); } } } } else // Load from XML? { win.transMeta = new TransMeta(lastfiles[0]); win.setFilename(lastfiles[0]); } } win.transMeta.clearChanged(); } } } } catch(KettleException ke) { log.logError(APP_NAME, Messages.getString("Spoon.Log.ErrorOccurred")+Const.CR+ke.getMessage());//"An error occurred: " win.rep=null; // ke.printStackTrace(); } win.open (); splash.dispose(); try { while (!win.isDisposed ()) { if (!win.readAndDispatch ()) win.sleep (); } } catch(Throwable e) { log.logError(APP_NAME, Messages.getString("Spoon.Log.UnexpectedErrorOccurred")+Const.CR+e.getMessage());//"An unexpected error occurred in Spoon: probable cause: please close all windows before stopping Spoon! " e.printStackTrace(); } win.dispose(); log.logBasic(APP_NAME, APP_NAME+" "+Messages.getString("Spoon.Log.AppHasEnded"));//" has ended." // Close the logfile log.close(); // Kill all remaining things in this VM! System.exit(0); } /** * @return Returns the transMeta. */ public TransMeta getTransMeta() { return transMeta; } /** * @param transMeta The transMeta to set. */ public void setTransMeta(TransMeta transMeta) { this.transMeta = transMeta; } /** * Create a new SelectValues step in between this step and the previous. * If the previous fields are not there, no mapping can be made, same with the required fields. * @param stepMeta The target step to map against. */ public void generateMapping(StepMeta stepMeta) { try { if (stepMeta!=null) { StepMetaInterface smi = stepMeta.getStepMetaInterface(); Row targetFields = smi.getRequiredFields(); Row sourceFields = transMeta.getPrevStepFields(stepMeta); // Build the mapping: let the user decide!! String[] source = sourceFields.getFieldNames(); for (int i=0;i<source.length;i++) { Value v = sourceFields.getValue(i); source[i]+=EnterMappingDialog.STRING_ORIGIN_SEPARATOR+v.getOrigin()+")"; } String[] target = targetFields.getFieldNames(); EnterMappingDialog dialog = new EnterMappingDialog(shell, source, target); ArrayList mappings = dialog.open(); if (mappings!=null) { // OK, so we now know which field maps where. // This allows us to generate the mapping using a SelectValues Step... SelectValuesMeta svm = new SelectValuesMeta(); svm.allocate(mappings.size(), 0, 0); for (int i=0;i<mappings.size();i++) { SourceToTargetMapping mapping = (SourceToTargetMapping) mappings.get(i); svm.getSelectName()[i] = sourceFields.getValue(mapping.getSourcePosition()).getName(); svm.getSelectRename()[i] = target[mapping.getTargetPosition()]; svm.getSelectLength()[i] = -1; svm.getSelectPrecision()[i] = -1; } // Now that we have the meta-data, create a new step info object String stepName = stepMeta.getName()+" Mapping"; stepName = transMeta.getAlternativeStepname(stepName); // if it's already there, rename it. StepMeta newStep = new StepMeta(log, "SelectValues", stepName, svm); newStep.setLocation(stepMeta.getLocation().x+20, stepMeta.getLocation().y+20); newStep.setDraw(true); transMeta.addStep(newStep); addUndoNew(new StepMeta[] { newStep }, new int[] { transMeta.indexOfStep(newStep) }); // Redraw stuff... refreshTree(); refreshGraph(); } } else { System.out.println("No target to do mapping against!"); } } catch(KettleException e) { new ErrorDialog(shell, Props.getInstance(), "Error creating mapping", "There was an error when Kettle tried to generate a mapping against the target step", e); } } }
src/be/ibridge/kettle/spoon/Spoon.java
/********************************************************************** ** ** ** This code belongs to the KETTLE project. ** ** ** ** Kettle, from version 2.2 on, is released into the public domain ** ** under the Lesser GNU Public License (LGPL). ** ** ** ** For more details, please read the document LICENSE.txt, included ** ** in this project ** ** ** ** http://www.kettle.be ** ** [email protected] ** ** ** **********************************************************************/ package be.ibridge.kettle.spoon; import java.io.DataOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.Properties; import org.eclipse.jface.dialogs.MessageDialog; import org.eclipse.jface.dialogs.MessageDialogWithToggle; import org.eclipse.jface.wizard.Wizard; import org.eclipse.jface.wizard.WizardDialog; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CTabFolder; import org.eclipse.swt.custom.CTabItem; import org.eclipse.swt.custom.SashForm; import org.eclipse.swt.dnd.Clipboard; import org.eclipse.swt.dnd.DND; import org.eclipse.swt.dnd.DragSource; import org.eclipse.swt.dnd.DragSourceEvent; import org.eclipse.swt.dnd.DragSourceListener; import org.eclipse.swt.dnd.Transfer; import org.eclipse.swt.events.DisposeEvent; import org.eclipse.swt.events.DisposeListener; import org.eclipse.swt.events.KeyAdapter; import org.eclipse.swt.events.KeyEvent; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.events.ShellAdapter; import org.eclipse.swt.events.ShellEvent; import org.eclipse.swt.graphics.Color; import org.eclipse.swt.graphics.Cursor; import org.eclipse.swt.graphics.Image; import org.eclipse.swt.graphics.ImageData; import org.eclipse.swt.graphics.RGB; import org.eclipse.swt.layout.FillLayout; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.printing.Printer; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.FileDialog; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.Menu; import org.eclipse.swt.widgets.MenuItem; import org.eclipse.swt.widgets.MessageBox; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.ToolBar; import org.eclipse.swt.widgets.ToolItem; import org.eclipse.swt.widgets.Tree; import org.eclipse.swt.widgets.TreeItem; import org.w3c.dom.Document; import org.w3c.dom.Node; import be.ibridge.kettle.core.Const; import be.ibridge.kettle.core.DragAndDropContainer; import be.ibridge.kettle.core.GUIResource; import be.ibridge.kettle.core.KettleVariables; import be.ibridge.kettle.core.LogWriter; import be.ibridge.kettle.core.NotePadMeta; import be.ibridge.kettle.core.Point; import be.ibridge.kettle.core.PrintSpool; import be.ibridge.kettle.core.Props; import be.ibridge.kettle.core.Row; import be.ibridge.kettle.core.SourceToTargetMapping; import be.ibridge.kettle.core.TransAction; import be.ibridge.kettle.core.WindowProperty; import be.ibridge.kettle.core.XMLHandler; import be.ibridge.kettle.core.XMLHandlerCache; import be.ibridge.kettle.core.XMLTransfer; import be.ibridge.kettle.core.clipboard.ImageDataTransfer; import be.ibridge.kettle.core.database.Database; import be.ibridge.kettle.core.database.DatabaseMeta; import be.ibridge.kettle.core.dialog.CheckResultDialog; import be.ibridge.kettle.core.dialog.DatabaseDialog; import be.ibridge.kettle.core.dialog.DatabaseExplorerDialog; import be.ibridge.kettle.core.dialog.EnterMappingDialog; import be.ibridge.kettle.core.dialog.EnterOptionsDialog; import be.ibridge.kettle.core.dialog.EnterSearchDialog; import be.ibridge.kettle.core.dialog.EnterStringsDialog; import be.ibridge.kettle.core.dialog.ErrorDialog; import be.ibridge.kettle.core.dialog.PreviewRowsDialog; import be.ibridge.kettle.core.dialog.SQLEditor; import be.ibridge.kettle.core.dialog.SQLStatementsDialog; import be.ibridge.kettle.core.dialog.ShowBrowserDialog; import be.ibridge.kettle.core.dialog.Splash; import be.ibridge.kettle.core.exception.KettleDatabaseException; import be.ibridge.kettle.core.exception.KettleException; import be.ibridge.kettle.core.reflection.StringSearchResult; import be.ibridge.kettle.core.util.EnvUtil; import be.ibridge.kettle.core.value.Value; import be.ibridge.kettle.core.wizards.createdatabase.CreateDatabaseWizard; import be.ibridge.kettle.job.JobEntryLoader; import be.ibridge.kettle.pan.CommandLineOption; import be.ibridge.kettle.repository.PermissionMeta; import be.ibridge.kettle.repository.RepositoriesMeta; import be.ibridge.kettle.repository.Repository; import be.ibridge.kettle.repository.RepositoryDirectory; import be.ibridge.kettle.repository.RepositoryMeta; import be.ibridge.kettle.repository.UserInfo; import be.ibridge.kettle.repository.dialog.RepositoriesDialog; import be.ibridge.kettle.repository.dialog.RepositoryExplorerDialog; import be.ibridge.kettle.repository.dialog.SelectObjectDialog; import be.ibridge.kettle.repository.dialog.UserDialog; import be.ibridge.kettle.spoon.dialog.AnalyseImpactProgressDialog; import be.ibridge.kettle.spoon.dialog.CheckTransProgressDialog; import be.ibridge.kettle.spoon.dialog.GetSQLProgressDialog; import be.ibridge.kettle.spoon.dialog.ShowCreditsDialog; import be.ibridge.kettle.spoon.dialog.TipsDialog; import be.ibridge.kettle.spoon.wizards.CopyTableWizardPage1; import be.ibridge.kettle.spoon.wizards.CopyTableWizardPage2; import be.ibridge.kettle.spoon.wizards.CopyTableWizardPage3; import be.ibridge.kettle.trans.DatabaseImpact; import be.ibridge.kettle.trans.StepLoader; import be.ibridge.kettle.trans.StepPlugin; import be.ibridge.kettle.trans.TransHopMeta; import be.ibridge.kettle.trans.TransMeta; import be.ibridge.kettle.trans.dialog.TransDialog; import be.ibridge.kettle.trans.dialog.TransHopDialog; import be.ibridge.kettle.trans.dialog.TransLoadProgressDialog; import be.ibridge.kettle.trans.dialog.TransSaveProgressDialog; import be.ibridge.kettle.trans.step.BaseStep; import be.ibridge.kettle.trans.step.StepDialogInterface; import be.ibridge.kettle.trans.step.StepMeta; import be.ibridge.kettle.trans.step.StepMetaInterface; import be.ibridge.kettle.trans.step.selectvalues.SelectValuesMeta; import be.ibridge.kettle.trans.step.tableinput.TableInputMeta; import be.ibridge.kettle.trans.step.tableoutput.TableOutputMeta; /** * This class handles the main window of the Spoon graphical transformation editor. * * @author Matt * @since 16-may-2003 * * Add i18n support * import the package:be.ibridge.kettle.i18n.Messages * @modified by vitoelv since 07-Feb-2006 */ public class Spoon { public static final String APP_NAME = Messages.getString("Spoon.Application.Name"); //"Spoon"; private LogWriter log; private Display disp; private Shell shell; private boolean destroy; private SpoonGraph spoongraph; private SpoonLog spoonlog; private SashForm sashform; public CTabFolder tabfolder; public Row variables; /** * These are the arguments that were given at Spoon launch time... */ private String[] arguments; /** * A list of remarks on the current Transformation... */ private ArrayList remarks; /** * A list of impacts of the current transformation on the used databases. */ private ArrayList impact; /** * Indicates whether or not an impact analyses has already run. */ private boolean impactHasRun; private boolean stopped; private Cursor cursor_hourglass, cursor_hand; public Props props; public Repository rep; public TransMeta transMeta; private ToolBar tBar; private Menu msFile; private MenuItem miFileSep3; private MenuItem miEditUndo, miEditRedo; private Tree selectionTree; private TreeItem tiConn, tiHops, tiStep, tiBase, tiPlug; private Tree pluginHistoryTree; private Listener lsNew, lsEdit, lsDupe, lsCopy, lsDel, lsSQL, lsCache, lsExpl; private SelectionAdapter lsEditDef, lsEditSel; public static final String STRING_CONNECTIONS = Messages.getString("Spoon.STRING_CONNECTIONS"); //"Connections"; public static final String STRING_STEPS = Messages.getString("Spoon.STRING_STEPS"); //"Steps"; public static final String STRING_HOPS = Messages.getString("Spoon.STRING_HOPS"); //"Hops"; public static final String STRING_BASE = Messages.getString("Spoon.STRING_BASE"); //"Base step types"; public static final String STRING_PLUGIN = Messages.getString("Spoon.STRING_PLUGIN"); //"Plugin step types"; public static final String STRING_HISTORY = Messages.getString("Spoon.STRING_HISTORY"); //"Step creation history"; private static final String APPL_TITLE = APP_NAME; public KeyAdapter defKeys; public KeyAdapter modKeys; private SpoonHistory spoonhist; private Menu mBar; private Composite tabComp; private SashForm leftSash; public Spoon(LogWriter l, Repository rep) { this(l, null, null, rep); } public Spoon(LogWriter l, Display d, Repository rep) { this(l, d, null, rep); } public Spoon(LogWriter log, Display d, TransMeta ti, Repository rep) { this.log = log; this.rep = rep; if (d!=null) { disp=d; destroy=false; } else { disp=new Display(); destroy=true; } shell=new Shell(disp); shell.setText(APPL_TITLE); FormLayout layout = new FormLayout(); layout.marginWidth = 0; layout.marginHeight = 0; shell.setLayout (layout); // INIT Data structure if (ti==null) { this.transMeta = new TransMeta(); } else { this.transMeta = ti; } if (!Props.isInitialized()) { //log.logDetailed(toString(), "Load properties for Spoon..."); log.logDetailed(toString(),Messages.getString("Spoon.Log.LoadProperties")); Props.init(disp, Props.TYPE_PROPERTIES_SPOON); // things to remember... } props=Props.getInstance(); // Load settings in the props loadSettings(); remarks = new ArrayList(); impact = new ArrayList(); impactHasRun = false; // Clean out every time we start, auto-loading etc, is not a good idea // If they are needed that often, set them in the kettle.properties file // variables = new Row(); // props.setLook(shell); shell.setImage(GUIResource.getInstance().getImageSpoon()); cursor_hourglass = new Cursor(disp, SWT.CURSOR_WAIT); cursor_hand = new Cursor(disp, SWT.CURSOR_HAND); // widgets = new WidgetContainer(); defKeys = new KeyAdapter() { public void keyPressed(KeyEvent e) { // ESC --> Unselect All steps if (e.keyCode == SWT.ESC) { spoongraph.clearSettings(); transMeta.unselectAll(); refreshGraph(); }; // F3 --> createDatabaseWizard if (e.keyCode == SWT.F3) { createDatabaseWizard(); } // F4 --> copyTableWizard if (e.keyCode == SWT.F4) { copyTableWizard(); } // F5 --> refresh if (e.keyCode == SWT.F5) { refreshGraph(); refreshTree(true); } // F6 --> show last impact analyses if (e.keyCode == SWT.F6) { showLastImpactAnalyses(); } // F7 --> show last verify results if (e.keyCode == SWT.F7) { showLastTransCheck(); } // F8 --> show last preview if (e.keyCode == SWT.F8) { spoonlog.showPreview(); } // F9 --> run if (e.keyCode == SWT.F9) { tabfolder.setSelection(1); spoonlog.startstop(); } // F10 --> preview if (e.keyCode == SWT.F10) { spoonlog.preview(); } // F11 --> Verify if (e.keyCode == SWT.F11) { checkTrans(); spoongraph.clearSettings(); } // CTRL-A --> Select All steps if ((int)e.character == 1) { transMeta.selectAll(); }; // CTRL-D --> Disconnect from repository if ((int)e.character == 4) { closeRepository(); spoongraph.clearSettings(); }; // CTRL-E --> Explore the repository if ((int)e.character == 5) { exploreRepository(); spoongraph.clearSettings(); }; // CTRL-F --> Java examination if ((int)e.character == 6 && (( e.stateMask&SWT.CONTROL)!=0) && (( e.stateMask&SWT.ALT)==0) ) { searchMetaData(); spoongraph.clearSettings(); }; // CTRL-I --> Import from XML file && (e.keyCode&SWT.CONTROL)!=0 if ((int)e.character == 9 && (( e.stateMask&SWT.CONTROL)!=0) && (( e.stateMask&SWT.ALT)==0) ) { openFile(true); spoongraph.clearSettings(); }; // CTRL-J --> Get variables if ((int)e.character == 10 && (( e.stateMask&SWT.CONTROL)!=0) && (( e.stateMask&SWT.ALT)==0) ) { getVariables(); spoongraph.clearSettings(); }; // CTRL-N --> new if ((int)e.character == 14) { newFile(); spoongraph.clearSettings(); } // CTRL-O --> open if ((int)e.character == 15) { openFile(false); spoongraph.clearSettings(); } // CTRL-P --> print if ((int)e.character == 16) { printFile(); spoongraph.clearSettings(); } // CTRL-Q --> Impact analyses if ((int)e.character == 17) { analyseImpact(); spoongraph.clearSettings(); } // CTRL-R --> Connect to repository if ((int)e.character == 18) { openRepository(); spoongraph.clearSettings(); }; // CTRL-S --> save if ((int)e.character == 19) { saveFile(); spoongraph.clearSettings(); } // CTRL-T --> transformation if ((int)e.character == 20) { setTrans(); spoongraph.clearSettings(); } // CTRL-Y --> redo action if ((int)e.character == 25) { redoAction(); spoongraph.clearSettings(); } // CTRL-Z --> undo action if ((int)e.character == 26) { spoongraph.clearSettings(); undoAction(); } // CTRL-SHIFT-I --> Copy Transformation Image to clipboard if ((int)e.character == 9 && (( e.stateMask&SWT.CONTROL)!=0) && (( e.stateMask&SWT.ALT)!=0)) { copyTransformationImage(); } // System.out.println("(int)e.character = "+(int)e.character+", keycode = "+e.keyCode+", stateMask="+e.stateMask); } }; modKeys = new KeyAdapter() { public void keyPressed(KeyEvent e) { spoongraph.shift = (e.keyCode == SWT.SHIFT ); spoongraph.control = (e.keyCode == SWT.CONTROL); } public void keyReleased(KeyEvent e) { spoongraph.shift = (e.keyCode == SWT.SHIFT ); spoongraph.control = (e.keyCode == SWT.CONTROL); } }; addBar(); FormData fdBar = new FormData(); fdBar.left = new FormAttachment(0, 0); fdBar.top = new FormAttachment(0, 0); tBar.setLayoutData(fdBar); sashform = new SashForm(shell, SWT.HORIZONTAL); // props.setLook(sashform); FormData fdSash = new FormData(); fdSash.left = new FormAttachment(0, 0); fdSash.top = new FormAttachment(tBar, 0); fdSash.bottom = new FormAttachment(100, 0); fdSash.right = new FormAttachment(100, 0); sashform.setLayoutData(fdSash); addMenu(); addTree(); addTabs(); setTreeImages(); // In case someone dares to press the [X] in the corner ;-) shell.addShellListener( new ShellAdapter() { public void shellClosed(ShellEvent e) { e.doit=quitFile(); } } ); shell.layout(); // Set the shell size, based upon previous time... WindowProperty winprop = props.getScreen(APPL_TITLE); if (winprop!=null) winprop.setShell(shell); else { shell.pack(); shell.setMaximized(true); // Default = maximized! } } /** * Search the transformation meta-data. * */ public void searchMetaData() { EnterSearchDialog esd = new EnterSearchDialog(shell); if (esd.open()) { String filterString = esd.getFilterString(); String filter = filterString; if (filter!=null) filter = filter.toUpperCase(); List stringList = transMeta.getStringList(esd.isSearchingSteps(), esd.isSearchingDatabases(), esd.isSearchingNotes()); ArrayList rows = new ArrayList(); for (int i=0;i<stringList.size();i++) { StringSearchResult result = (StringSearchResult) stringList.get(i); boolean add = Const.isEmpty(filter); if (filter!=null && result.getString().toUpperCase().indexOf(filter)>=0) add=true; if (filter!=null && result.getFieldName().toUpperCase().indexOf(filter)>=0) add=true; if (filter!=null && result.getParentObject().toString().toUpperCase().indexOf(filter)>=0) add=true; if (add) rows.add(result.toRow()); } if (rows.size()!=0) { PreviewRowsDialog prd = new PreviewRowsDialog(shell, SWT.NONE, "String searcher", rows); prd.open(); } else { MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_INFORMATION); mb.setMessage(Messages.getString("Spoon.Dialog.NothingFound.Message")); // Nothing found that matches your criteria mb.setText(Messages.getString("Spoon.Dialog.NothingFound.Title")); // Sorry! mb.open(); } } } public void getVariables() { Properties sp = new Properties(); KettleVariables kettleVariables = KettleVariables.getInstance(); sp.putAll(kettleVariables.getProperties()); sp.putAll(System.getProperties()); List list = transMeta.getUsedVariables(); for (int i=0;i<list.size();i++) { String varName = (String)list.get(i); String varValue = sp.getProperty(varName, ""); System.out.println("variable ["+varName+"] is defined as : "+varValue); if (variables.searchValueIndex(varName)<0) { variables.addValue(new Value(varName, varValue)); } } // Now ask the use for more info on these! EnterStringsDialog esd = new EnterStringsDialog(shell, SWT.NONE, variables); esd.setReadOnly(false); if (esd.open()!=null) { for (int i=0;i<variables.size();i++) { Value varval = variables.getValue(i); if (!Const.isEmpty(varval.getString())) { kettleVariables.setVariable(varval.getName(), varval.getString()); System.out.println("Variable ${"+varval.getName()+"} set to ["+varval.getString()+"] for thread ["+Thread.currentThread()+"]"); } } } } public void clear() { remarks = new ArrayList(); impact = new ArrayList(); impactHasRun = false; transMeta.clear(); XMLHandlerCache.getInstance().clear(); setUndoMenu(); } public void open() { shell.open(); // Shared database entries to load from repository? loadRepositoryObjects(); // What plugins did we use previously? refreshPluginHistory(); // Perhaps the transformation contains elements at startup? if (transMeta.nrSteps()>0 || transMeta.nrDatabases()>0 || transMeta.nrTransHops()>0) { refreshTree(true); // Do a complete refresh then... } transMeta.clearChanged(); // Clear changed: they were artificial (databases loaded, etc.) setShellText(); if (props.showTips()) { TipsDialog tip = new TipsDialog(shell, props); tip.open(); } } public boolean readAndDispatch () { return disp.readAndDispatch(); } /** * @return check whether or not the application was stopped. */ public boolean isStopped() { return stopped; } /** * @param stopped True to stop this application. */ public void setStopped(boolean stopped) { this.stopped = stopped; } /** * @param destroy Whether or not to distroy the display. */ public void setDestroy(boolean destroy) { this.destroy = destroy; } /** * @return Returns whether or not we should distroy the display. */ public boolean doDestroy() { return destroy; } /** * @param arguments The arguments to set. */ public void setArguments(String[] arguments) { this.arguments = arguments; } /** * @return Returns the arguments. */ public String[] getArguments() { return arguments; } public synchronized void dispose() { setStopped(true); cursor_hand.dispose(); cursor_hourglass.dispose(); if (destroy && !disp.isDisposed()) disp.dispose(); } public boolean isDisposed() { return disp.isDisposed(); } public void sleep() { disp.sleep(); } public void addMenu() { if (mBar!=null) { mBar.dispose(); } mBar = new Menu(shell, SWT.BAR); shell.setMenuBar(mBar); // main File menu... MenuItem mFile = new MenuItem(mBar, SWT.CASCADE); //mFile.setText("&File"); mFile.setText(Messages.getString("Spoon.Menu.File") ); msFile = new Menu(shell, SWT.DROP_DOWN); mFile.setMenu(msFile); MenuItem miFileNew = new MenuItem(msFile, SWT.CASCADE); miFileNew.setText(Messages.getString("Spoon.Menu.File.New")); //miFileNew.setText("&New \tCTRL-N"); MenuItem miFileOpen = new MenuItem(msFile, SWT.CASCADE); miFileOpen.setText(Messages.getString("Spoon.Menu.File.Open")); //&Open \tCTRL-O MenuItem miFileImport = new MenuItem(msFile, SWT.CASCADE); miFileImport.setText(Messages.getString("Spoon.Menu.File.Import")); //"&Import from an XML file\tCTRL-I" MenuItem miFileExport = new MenuItem(msFile, SWT.CASCADE); miFileExport.setText(Messages.getString("Spoon.Menu.File.Export")); //&Export to an XML file MenuItem miFileSave = new MenuItem(msFile, SWT.CASCADE); miFileSave.setText(Messages.getString("Spoon.Menu.File.Save")); //"&Save \tCTRL-S" MenuItem miFileSaveAs = new MenuItem(msFile, SWT.CASCADE); miFileSaveAs.setText(Messages.getString("Spoon.Menu.File.SaveAs")); //"Save &as..." new MenuItem(msFile, SWT.SEPARATOR); MenuItem miFilePrint = new MenuItem(msFile, SWT.CASCADE); miFilePrint.setText(Messages.getString("Spoon.Menu.File.Print")); //"&Print \tCTRL-P" new MenuItem(msFile, SWT.SEPARATOR); MenuItem miFileQuit = new MenuItem(msFile, SWT.CASCADE); miFileQuit.setText(Messages.getString("Spoon.Menu.File.Quit")); //miFileQuit.setText("&Quit"); miFileSep3 = new MenuItem(msFile, SWT.SEPARATOR); addMenuLast(); Listener lsFileOpen = new Listener() { public void handleEvent(Event e) { openFile(false); } }; Listener lsFileImport = new Listener() { public void handleEvent(Event e) { openFile(true); } }; Listener lsFileExport = new Listener() { public void handleEvent(Event e) { saveXMLFile(); } }; Listener lsFileNew = new Listener() { public void handleEvent(Event e) { newFile(); } }; Listener lsFileSave = new Listener() { public void handleEvent(Event e) { saveFile(); } }; Listener lsFileSaveAs = new Listener() { public void handleEvent(Event e) { saveFileAs(); } }; Listener lsFilePrint = new Listener() { public void handleEvent(Event e) { printFile(); } }; Listener lsFileQuit = new Listener() { public void handleEvent(Event e) { quitFile(); } }; miFileOpen .addListener (SWT.Selection, lsFileOpen ); miFileImport .addListener (SWT.Selection, lsFileImport ); miFileExport .addListener (SWT.Selection, lsFileExport ); miFileNew .addListener (SWT.Selection, lsFileNew ); miFileSave .addListener (SWT.Selection, lsFileSave ); miFileSaveAs .addListener (SWT.Selection, lsFileSaveAs ); miFilePrint .addListener (SWT.Selection, lsFilePrint ); miFileQuit .addListener (SWT.Selection, lsFileQuit ); // main Edit menu... MenuItem mEdit = new MenuItem(mBar, SWT.CASCADE); mEdit.setText(Messages.getString("Spoon.Menu.Edit")); //&Edit Menu msEdit = new Menu(shell, SWT.DROP_DOWN); mEdit.setMenu(msEdit); miEditUndo = new MenuItem(msEdit, SWT.CASCADE); miEditRedo = new MenuItem(msEdit, SWT.CASCADE); setUndoMenu(); new MenuItem(msEdit, SWT.SEPARATOR); MenuItem miEditSearch = new MenuItem(msEdit, SWT.CASCADE); miEditSearch.setText(Messages.getString("Spoon.Menu.Edit.Search")); //Search Metadata \tCTRL-F MenuItem miEditVars = new MenuItem(msEdit, SWT.CASCADE); miEditVars.setText(Messages.getString("Spoon.Menu.Edit.Variables")); //Edit/Enter variables \tCTRL-F new MenuItem(msEdit, SWT.SEPARATOR); MenuItem miEditUnselectAll = new MenuItem(msEdit, SWT.CASCADE); miEditUnselectAll.setText(Messages.getString("Spoon.Menu.Edit.ClearSelection")); //&Clear selection \tESC MenuItem miEditSelectAll = new MenuItem(msEdit, SWT.CASCADE); miEditSelectAll.setText(Messages.getString("Spoon.Menu.Edit.SelectAllSteps")); //"&Select all steps \tCTRL-A" new MenuItem(msEdit, SWT.SEPARATOR); MenuItem miEditCopy = new MenuItem(msEdit, SWT.CASCADE); miEditCopy.setText(Messages.getString("Spoon.Menu.Edit.CopyToClipboard")); //Copy selected steps to clipboard\tCTRL-C MenuItem miEditPaste = new MenuItem(msEdit, SWT.CASCADE); miEditPaste.setText(Messages.getString("Spoon.Menu.Edit.PasteFromClipboard")); //Paste steps from clipboard\tCTRL-V new MenuItem(msEdit, SWT.SEPARATOR); MenuItem miEditRefresh = new MenuItem(msEdit, SWT.CASCADE); miEditRefresh.setText(Messages.getString("Spoon.Menu.Edit.Refresh")); //&Refresh \tF5 new MenuItem(msEdit, SWT.SEPARATOR); MenuItem miEditOptions = new MenuItem(msEdit, SWT.CASCADE); miEditOptions.setText(Messages.getString("Spoon.Menu.Edit.Options")); //&Options... Listener lsEditUndo = new Listener() { public void handleEvent(Event e) { undoAction(); } }; Listener lsEditRedo = new Listener() { public void handleEvent(Event e) { redoAction(); } }; Listener lsEditSearch = new Listener() { public void handleEvent(Event e) { searchMetaData(); } }; Listener lsEditVars = new Listener() { public void handleEvent(Event e) { getVariables(); } }; Listener lsEditUnselectAll = new Listener() { public void handleEvent(Event e) { editUnselectAll(); } }; Listener lsEditSelectAll = new Listener() { public void handleEvent(Event e) { editSelectAll(); } }; Listener lsEditOptions = new Listener() { public void handleEvent(Event e) { editOptions(); } }; miEditUndo .addListener(SWT.Selection, lsEditUndo); miEditRedo .addListener(SWT.Selection, lsEditRedo); miEditSearch .addListener(SWT.Selection, lsEditSearch); miEditVars .addListener(SWT.Selection, lsEditVars); miEditUnselectAll.addListener(SWT.Selection, lsEditUnselectAll); miEditSelectAll .addListener(SWT.Selection, lsEditSelectAll); miEditOptions .addListener(SWT.Selection, lsEditOptions); // main Repository menu... MenuItem mRep = new MenuItem(mBar, SWT.CASCADE); mRep.setText(Messages.getString("Spoon.Menu.Repository")); //&Repository Menu msRep = new Menu(shell, SWT.DROP_DOWN); mRep.setMenu(msRep); MenuItem miRepConnect = new MenuItem(msRep, SWT.CASCADE); miRepConnect.setText(Messages.getString("Spoon.Menu.Repository.ConnectToRepository")); //&Connect to repository \tCTRL-R MenuItem miRepDisconnect = new MenuItem(msRep, SWT.CASCADE); miRepDisconnect.setText(Messages.getString("Spoon.Menu.Repository.DisconnectRepository")); //&Disconnect repository \tCTRL-D MenuItem miRepExplore = new MenuItem(msRep, SWT.CASCADE); miRepExplore.setText(Messages.getString("Spoon.Menu.Repository.ExploreRepository")); //&Explore repository \tCTRL-E new MenuItem(msRep, SWT.SEPARATOR); MenuItem miRepUser = new MenuItem(msRep, SWT.CASCADE); miRepUser.setText(Messages.getString("Spoon.Menu.Repository.EditCurrentUser")); //&Edit current user\tCTRL-U Listener lsRepConnect = new Listener() { public void handleEvent(Event e) { openRepository(); } }; Listener lsRepDisconnect = new Listener() { public void handleEvent(Event e) { closeRepository(); } }; Listener lsRepExplore = new Listener() { public void handleEvent(Event e) { exploreRepository(); } }; Listener lsRepUser = new Listener() { public void handleEvent(Event e) { editRepositoryUser();} }; miRepConnect .addListener (SWT.Selection, lsRepConnect ); miRepDisconnect .addListener (SWT.Selection, lsRepDisconnect); miRepExplore .addListener (SWT.Selection, lsRepExplore ); miRepUser .addListener (SWT.Selection, lsRepUser ); // main Transformation menu... MenuItem mTrans = new MenuItem(mBar, SWT.CASCADE); mTrans.setText(Messages.getString("Spoon.Menu.Transformation")); //&Transformation Menu msTrans = new Menu(shell, SWT.DROP_DOWN ); mTrans.setMenu(msTrans); MenuItem miTransRun = new MenuItem(msTrans, SWT.CASCADE); miTransRun .setText(Messages.getString("Spoon.Menu.Transformation.Run"));//&Run \tF9 MenuItem miTransPreview = new MenuItem(msTrans, SWT.CASCADE); miTransPreview.setText(Messages.getString("Spoon.Menu.Transformation.Preview"));//&Preview \tF10 MenuItem miTransCheck = new MenuItem(msTrans, SWT.CASCADE); miTransCheck .setText(Messages.getString("Spoon.Menu.Transformation.Verify"));//&Verify \tF11 MenuItem miTransImpact = new MenuItem(msTrans, SWT.CASCADE); miTransImpact .setText(Messages.getString("Spoon.Menu.Transformation.Impact"));//&Impact MenuItem miTransSQL = new MenuItem(msTrans, SWT.CASCADE); miTransSQL .setText(Messages.getString("Spoon.Menu.Transformation.GetSQL"));//&Get SQL new MenuItem(msTrans, SWT.SEPARATOR); MenuItem miLastImpact = new MenuItem(msTrans, SWT.CASCADE); miLastImpact .setText(Messages.getString("Spoon.Menu.Transformation.ShowLastImpactAnalyses"));//Show last impact analyses \tF6 MenuItem miLastCheck = new MenuItem(msTrans, SWT.CASCADE); miLastCheck .setText(Messages.getString("Spoon.Menu.Transformation.ShowLastVerifyResults"));//Show last verify results \tF7 MenuItem miLastPreview = new MenuItem(msTrans, SWT.CASCADE); miLastPreview .setText(Messages.getString("Spoon.Menu.Transformation.ShowLastPreviewResults"));//Show last preview results \tF8 new MenuItem(msTrans, SWT.SEPARATOR); MenuItem miTransCopy = new MenuItem(msTrans, SWT.CASCADE); miTransCopy .setText(Messages.getString("Spoon.Menu.Transformation.CopyTransformationToClipboard"));//&Copy transformation to clipboard MenuItem miTransPaste = new MenuItem(msTrans, SWT.CASCADE); miTransPaste .setText(Messages.getString("Spoon.Menu.Transformation.PasteTransformationFromClipboard"));//P&aste transformation from clipboard MenuItem miTransImage = new MenuItem(msTrans, SWT.CASCADE); miTransImage .setText(Messages.getString("Spoon.Menu.Transformation.CopyTransformationImageClipboard"));//Copy the transformation image clipboard \tCTRL-ALT-I new MenuItem(msTrans, SWT.SEPARATOR); MenuItem miTransDetails = new MenuItem(msTrans, SWT.CASCADE); miTransDetails.setText(Messages.getString("Spoon.Menu.Transformation.Settings"));//&Settings... \tCTRL-T Listener lsTransDetails = new Listener() { public void handleEvent(Event e) { setTrans(); } }; Listener lsTransRun = new Listener() { public void handleEvent(Event e) { tabfolder.setSelection(1); spoonlog.startstop(); } }; Listener lsTransPreview = new Listener() { public void handleEvent(Event e) { spoonlog.preview(); } }; Listener lsTransCheck = new Listener() { public void handleEvent(Event e) { checkTrans(); } }; Listener lsTransImpact = new Listener() { public void handleEvent(Event e) { analyseImpact(); } }; Listener lsTransSQL = new Listener() { public void handleEvent(Event e) { getSQL(); } }; Listener lsLastPreview = new Listener() { public void handleEvent(Event e) { spoonlog.showPreview(); } }; Listener lsLastCheck = new Listener() { public void handleEvent(Event e) { showLastTransCheck(); } }; Listener lsLastImpact = new Listener() { public void handleEvent(Event e) { showLastImpactAnalyses(); } }; Listener lsTransCopy = new Listener() { public void handleEvent(Event e) { copyTransformation(); } }; Listener lsTransImage = new Listener() { public void handleEvent(Event e) { copyTransformationImage(); } }; Listener lsTransPaste = new Listener() { public void handleEvent(Event e) { pasteTransformation(); } }; miTransDetails.addListener(SWT.Selection, lsTransDetails); miTransRun .addListener(SWT.Selection, lsTransRun); miTransPreview.addListener(SWT.Selection, lsTransPreview); miTransCheck .addListener(SWT.Selection, lsTransCheck); miTransImpact .addListener(SWT.Selection, lsTransImpact); miTransSQL .addListener(SWT.Selection, lsTransSQL); miLastPreview .addListener(SWT.Selection, lsLastPreview); miLastCheck .addListener(SWT.Selection, lsLastCheck); miLastImpact .addListener(SWT.Selection, lsLastImpact); miTransCopy .addListener(SWT.Selection, lsTransCopy); miTransPaste .addListener(SWT.Selection, lsTransPaste); miTransImage .addListener(SWT.Selection, lsTransImage); // Wizard menu MenuItem mWizard = new MenuItem(mBar, SWT.CASCADE); mWizard.setText(Messages.getString("Spoon.Menu.Wizard")); //"&Wizard" Menu msWizard = new Menu(shell, SWT.DROP_DOWN ); mWizard.setMenu(msWizard); MenuItem miWizardNewConnection = new MenuItem(msWizard, SWT.CASCADE); miWizardNewConnection.setText(Messages.getString("Spoon.Menu.Wizard.CreateDatabaseConnectionWizard"));//&Create database connection wizard...\tF3 Listener lsWizardNewConnection= new Listener() { public void handleEvent(Event e) { createDatabaseWizard(); } }; miWizardNewConnection.addListener(SWT.Selection, lsWizardNewConnection); MenuItem miWizardCopyTable = new MenuItem(msWizard, SWT.CASCADE); miWizardCopyTable.setText(Messages.getString("Spoon.Menu.Wizard.CopyTableWizard"));//&Copy table wizard...\tF4 Listener lsWizardCopyTable= new Listener() { public void handleEvent(Event e) { copyTableWizard(); } }; miWizardCopyTable.addListener(SWT.Selection, lsWizardCopyTable); // main Help menu... MenuItem mHelp = new MenuItem(mBar, SWT.CASCADE); mHelp.setText(Messages.getString("Spoon.Menu.Help")); //"&Help" Menu msHelp = new Menu(shell, SWT.DROP_DOWN ); mHelp.setMenu(msHelp); MenuItem miHelpCredit = new MenuItem(msHelp, SWT.CASCADE); miHelpCredit.setText(Messages.getString("Spoon.Menu.Help.Credits"));//&Credits Listener lsHelpCredit = new Listener() { public void handleEvent(Event e) { ShowCreditsDialog scd = new ShowCreditsDialog(shell, props, GUIResource.getInstance().getImageCredits()); scd.open(); } }; miHelpCredit.addListener (SWT.Selection, lsHelpCredit ); MenuItem miHelpTOTD = new MenuItem(msHelp, SWT.CASCADE); miHelpTOTD.setText(Messages.getString("Spoon.Menu.Help.Tip"));//&Tip of the day Listener lsHelpTOTD = new Listener() { public void handleEvent(Event e) { TipsDialog td = new TipsDialog(shell, props); td.open(); } }; miHelpTOTD.addListener (SWT.Selection, lsHelpTOTD ); new MenuItem(msHelp, SWT.SEPARATOR); MenuItem miHelpAbout = new MenuItem(msHelp, SWT.CASCADE); miHelpAbout.setText(Messages.getString("Spoon.Menu.About"));//"&About" Listener lsHelpAbout = new Listener() { public void handleEvent(Event e) { helpAbout(); } }; miHelpAbout.addListener (SWT.Selection, lsHelpAbout ); } private void addMenuLast() { int idx = msFile.indexOf(miFileSep3); int max = msFile.getItemCount(); // Remove everything until end... for (int i=max-1;i>idx;i--) { MenuItem mi = msFile.getItem(i); mi.dispose(); } // Previously loaded files... String lf[] = props.getLastFiles(); String ld[] = props.getLastDirs(); boolean lt[] = props.getLastTypes(); String lr[] = props.getLastRepositories(); for (int i=0;i<lf.length;i++) { MenuItem miFileLast = new MenuItem(msFile, SWT.CASCADE); char chr = (char)('1'+i ); int accel = SWT.CTRL | chr; String repository = ( lr[i]!=null && lr[i].length()>0 ) ? ( "["+lr[i]+"] " ) : ""; String filename = RepositoryDirectory.DIRECTORY_SEPARATOR + lf[i]; if (!lt[i]) filename = lf[i]; if (!ld[i].equals(RepositoryDirectory.DIRECTORY_SEPARATOR)) { filename=ld[i]+filename; } if (i<9) { miFileLast.setAccelerator(accel); miFileLast.setText("&"+chr+" "+repository+filename+ "\tCTRL-"+chr); } else { miFileLast.setText(" "+repository+filename); } final String fn = lf[i]; // filename final String fd = ld[i]; // Repository directory ... final boolean ft = lt[i]; // type: true=repository, false=file final String fr = lr[i]; // repository name Listener lsFileLast = new Listener() { public void handleEvent(Event e) { if (showChangedWarning()) { // If the file comes from a repository and it's not the same as // the one we're connected to, ask for a username/password! // boolean noRepository=false; if (ft && (rep==null || !rep.getRepositoryInfo().getName().equalsIgnoreCase(fr) )) { int perms[] = new int[] { PermissionMeta.TYPE_PERMISSION_TRANSFORMATION }; RepositoriesDialog rd = new RepositoriesDialog(disp, SWT.NONE, perms, Messages.getString("Spoon.Application.Name")); //RepositoriesDialog.ToolName="Spoon" rd.setRepositoryName(fr); if (rd.open()) { // Close the previous connection... if (rep!=null) rep.disconnect(); rep = new Repository(log, rd.getRepository(), rd.getUser()); try { rep.connect(APP_NAME); } catch(KettleException ke) { rep=null; new ErrorDialog(shell, props, Messages.getString("Spoon.Dialog.UnableConnectRepository.Title"), Messages.getString("Spoon.Dialog.UnableConnectRepository.Message"), ke); //$NON-NLS-1$ //$NON-NLS-2$ } } else { noRepository=true; } } if (ft) { if (!noRepository && rep!=null && rep.getRepositoryInfo().getName().equalsIgnoreCase(fr)) { // OK, we're connected to the new repository... // Load the transformation... RepositoryDirectory fdRepdir = rep.getDirectoryTree().findDirectory(fd); TransLoadProgressDialog tlpd = new TransLoadProgressDialog(shell, rep, fn, fdRepdir); TransMeta transInfo = tlpd.open(); if (transInfo!=null) { transMeta = transInfo; transMeta.clearChanged(); props.addLastFile(Props.TYPE_PROPERTIES_SPOON, fn, fdRepdir.getPath(), true, rep.getName()); } } else { clear(); MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR ); mb.setMessage(Messages.getString("Spoon.Dialog.UnableLoadTransformation.Message"));//Can't load this transformation. Please connect to the correct repository first. mb.setText(Messages.getString("Spoon.Dialog.UnableLoadTransformation.Title"));//Error! mb.open(); } } else // Load from XML! { try { transMeta = new TransMeta(fn); transMeta.clearChanged(); transMeta.setFilename(fn); props.addLastFile(Props.TYPE_PROPERTIES_SPOON, fn, null, false, null); } catch(KettleException ke) { clear(); //"Error loading transformation", "I was unable to load this transformation from the XML file because of an error" new ErrorDialog(shell, props, Messages.getString("Spoon.Dialog.LoadTransformationError.Title"), Messages.getString("Spoon.Dialog.LoadTransformationError.Message"), ke); } } setShellText(); addMenuLast(); refreshTree(); refreshGraph(); refreshHistory(); } } }; miFileLast.addListener(SWT.Selection, lsFileLast); } } private void addBar() { tBar = new ToolBar(shell, SWT.HORIZONTAL | SWT.FLAT ); // props.setLook(tBar); final ToolItem tiFileNew = new ToolItem(tBar, SWT.PUSH); final Image imFileNew = new Image(disp, getClass().getResourceAsStream(Const.IMAGE_DIRECTORY+"new.png")); tiFileNew.setImage(imFileNew); tiFileNew.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { newFile(); }}); tiFileNew.setToolTipText(Messages.getString("Spoon.Tooltip.NewTranformation"));//New transformation, clear all settings final ToolItem tiFileOpen = new ToolItem(tBar, SWT.PUSH); final Image imFileOpen = new Image(disp, getClass().getResourceAsStream(Const.IMAGE_DIRECTORY+"open.png")); tiFileOpen.setImage(imFileOpen); tiFileOpen.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { openFile(false); }}); tiFileOpen.setToolTipText(Messages.getString("Spoon.Tooltip.OpenTranformation"));//Open tranformation final ToolItem tiFileSave = new ToolItem(tBar, SWT.PUSH); final Image imFileSave = new Image(disp, getClass().getResourceAsStream(Const.IMAGE_DIRECTORY+"save.png")); tiFileSave.setImage(imFileSave); tiFileSave.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { saveFile(); }}); tiFileSave.setToolTipText(Messages.getString("Spoon.Tooltip.SaveCurrentTranformation"));//Save current transformation final ToolItem tiFileSaveAs = new ToolItem(tBar, SWT.PUSH); final Image imFileSaveAs = new Image(disp, getClass().getResourceAsStream(Const.IMAGE_DIRECTORY+"saveas.png")); tiFileSaveAs.setImage(imFileSaveAs); tiFileSaveAs.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { saveFileAs(); }}); tiFileSaveAs.setToolTipText(Messages.getString("Spoon.Tooltip.SaveDifferentNameTranformation"));//Save transformation with different name new ToolItem(tBar, SWT.SEPARATOR); final ToolItem tiFilePrint = new ToolItem(tBar, SWT.PUSH); final Image imFilePrint = new Image(disp, getClass().getResourceAsStream(Const.IMAGE_DIRECTORY+"print.png")); tiFilePrint.setImage(imFilePrint); tiFilePrint.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { printFile(); }}); tiFilePrint.setToolTipText(Messages.getString("Spoon.Tooltip.Print"));//Print new ToolItem(tBar, SWT.SEPARATOR); final ToolItem tiFileRun = new ToolItem(tBar, SWT.PUSH); final Image imFileRun = new Image(disp, getClass().getResourceAsStream(Const.IMAGE_DIRECTORY+"run.png")); tiFileRun.setImage(imFileRun); tiFileRun.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { tabfolder.setSelection(1); spoonlog.startstop(); }}); tiFileRun.setToolTipText(Messages.getString("Spoon.Tooltip.RunTranformation"));//Run this transformation final ToolItem tiFilePreview = new ToolItem(tBar, SWT.PUSH); final Image imFilePreview = new Image(disp, getClass().getResourceAsStream(Const.IMAGE_DIRECTORY+"preview.png")); tiFilePreview.setImage(imFilePreview); tiFilePreview.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { spoonlog.preview(); }}); tiFilePreview.setToolTipText(Messages.getString("Spoon.Tooltip.PreviewTranformation"));//Preview this transformation final ToolItem tiFileReplay = new ToolItem(tBar, SWT.PUSH); final Image imFileReplay = new Image(disp, getClass().getResourceAsStream(Const.IMAGE_DIRECTORY+"replay.png")); tiFileReplay.setImage(imFileReplay); tiFileReplay.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { tabfolder.setSelection(1); spoonlog.startstopReplay(); }}); tiFileReplay.setToolTipText("Replay this transformation"); new ToolItem(tBar, SWT.SEPARATOR); final ToolItem tiFileCheck = new ToolItem(tBar, SWT.PUSH); final Image imFileCheck = new Image(disp, getClass().getResourceAsStream(Const.IMAGE_DIRECTORY+"check.png")); tiFileCheck.setImage(imFileCheck); tiFileCheck.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { checkTrans(); }}); tiFileCheck.setToolTipText(Messages.getString("Spoon.Tooltip.VerifyTranformation"));//Verify this transformation new ToolItem(tBar, SWT.SEPARATOR); final ToolItem tiImpact = new ToolItem(tBar, SWT.PUSH); final Image imImpact = new Image(disp, getClass().getResourceAsStream(Const.IMAGE_DIRECTORY+"impact.png")); // Can't seem to get the transparency correct for this image! ImageData idImpact = imImpact.getImageData(); int impactPixel = idImpact.palette.getPixel(new RGB(255, 255, 255)); idImpact.transparentPixel = impactPixel; Image imImpact2 = new Image(disp, idImpact); tiImpact.setImage(imImpact2); tiImpact.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { analyseImpact(); }}); tiImpact.setToolTipText(Messages.getString("Spoon.Tooltip.AnalyzeTranformation"));//Analyze the impact of this transformation on the database(s) new ToolItem(tBar, SWT.SEPARATOR); final ToolItem tiSQL = new ToolItem(tBar, SWT.PUSH); final Image imSQL = new Image(disp, getClass().getResourceAsStream(Const.IMAGE_DIRECTORY+"SQLbutton.png")); // Can't seem to get the transparency correct for this image! ImageData idSQL = imSQL.getImageData(); int sqlPixel= idSQL.palette.getPixel(new RGB(255, 255, 255)); idSQL.transparentPixel = sqlPixel; Image imSQL2= new Image(disp, idSQL); tiSQL.setImage(imSQL2); tiSQL.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { getSQL(); }}); tiSQL.setToolTipText(Messages.getString("Spoon.Tooltip.GenerateSQLForTranformation"));//Generate the SQL needed to run this transformation tBar.addDisposeListener(new DisposeListener() { public void widgetDisposed(DisposeEvent e) { imFileNew.dispose(); imFileOpen.dispose(); imFileSave.dispose(); imFileSaveAs.dispose(); } } ); tBar.addKeyListener(defKeys); tBar.addKeyListener(modKeys); tBar.pack(); } private void addTree() { if (leftSash!=null) { leftSash.dispose(); } // Split the left side of the screen in half leftSash = new SashForm(sashform, SWT.VERTICAL); // Now set up the main CSH tree selectionTree = new Tree(leftSash, SWT.SINGLE | SWT.BORDER); props.setLook(selectionTree); selectionTree.setLayout(new FillLayout()); tiConn = new TreeItem(selectionTree, SWT.NONE); tiConn.setText(STRING_CONNECTIONS); tiStep = new TreeItem(selectionTree, SWT.NONE); tiStep.setText(STRING_STEPS); tiHops = new TreeItem(selectionTree, SWT.NONE); tiHops.setText(STRING_HOPS); tiBase = new TreeItem(selectionTree, SWT.NONE); tiBase.setText(STRING_BASE); tiPlug = new TreeItem(selectionTree, SWT.NONE); tiPlug.setText(STRING_PLUGIN); // Fill the base components... StepLoader steploader = StepLoader.getInstance(); StepPlugin basesteps[] = steploader.getStepsWithType(StepPlugin.TYPE_NATIVE); String basecat[] = steploader.getCategories(StepPlugin.TYPE_NATIVE); TreeItem tiBaseCat[] = new TreeItem[basecat.length]; for (int i=0;i<basecat.length;i++) { tiBaseCat[i] = new TreeItem(tiBase, SWT.NONE); tiBaseCat[i].setText(basecat[i]); for (int j=0;j<basesteps.length;j++) { if (basesteps[j].getCategory().equalsIgnoreCase(basecat[i])) { TreeItem ti = new TreeItem(tiBaseCat[i], 0); ti.setText(basesteps[j].getDescription()); } } } // Show the plugins... StepPlugin plugins[] = steploader.getStepsWithType(StepPlugin.TYPE_PLUGIN); String plugcat[] = steploader.getCategories(StepPlugin.TYPE_PLUGIN); TreeItem tiPlugCat[] = new TreeItem[plugcat.length]; for (int i=0;i<plugcat.length;i++) { tiPlugCat[i] = new TreeItem(tiPlug, SWT.NONE); tiPlugCat[i].setText(plugcat[i]); for (int j=0;j<plugins.length;j++) { if (plugins[j].getCategory().equalsIgnoreCase(plugcat[i])) { TreeItem ti = new TreeItem(tiPlugCat[i], 0); ti.setText(plugins[j].getDescription()); } } } tiConn.setExpanded(true); tiStep.setExpanded(false); tiBase.setExpanded(true); tiPlug.setExpanded(true); addToolTipsToTree(selectionTree); // Popup-menu selection lsNew = new Listener() { public void handleEvent(Event e) { newSelected(); } }; lsEdit = new Listener() { public void handleEvent(Event e) { editSelected(); } }; lsDupe = new Listener() { public void handleEvent(Event e) { dupeSelected(); } }; lsCopy = new Listener() { public void handleEvent(Event e) { clipSelected(); } }; lsDel = new Listener() { public void handleEvent(Event e) { delSelected(); } }; lsSQL = new Listener() { public void handleEvent(Event e) { sqlSelected(); } }; lsCache = new Listener() { public void handleEvent(Event e) { clearDBCache(); } }; lsExpl = new Listener() { public void handleEvent(Event e) { exploreDB(); } }; // Default selection (double-click, enter) lsEditDef = new SelectionAdapter() { public void widgetDefaultSelected(SelectionEvent e){ editSelected(); } }; //lsNewDef = new SelectionAdapter() { public void widgetDefaultSelected(SelectionEvent e){ newSelected(); } }; lsEditSel = new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { setMenu(e); } }; // Add all the listeners... selectionTree.addSelectionListener(lsEditDef); // double click somewhere in the tree... //tCSH.addSelectionListener(lsNewDef); // double click somewhere in the tree... selectionTree.addSelectionListener(lsEditSel); // Keyboard shortcuts! selectionTree.addKeyListener(defKeys); selectionTree.addKeyListener(modKeys); // Set a listener on the tree addDragSourceToTree(selectionTree); // OK, now add a list of often-used icons to the bottom of the tree... pluginHistoryTree = new Tree(leftSash, SWT.SINGLE ); // Add tooltips for history tree too addToolTipsToTree(pluginHistoryTree); // Set the same listener on this tree addDragSourceToTree(pluginHistoryTree); leftSash.setWeights(new int[] { 70, 30 } ); } private void addToolTipsToTree(Tree tree) { tree.addListener(SWT.MouseHover, new Listener() { public void handleEvent(Event e) { String tooltip=null; Tree tree = (Tree)e.widget; TreeItem item = tree.getItem(new org.eclipse.swt.graphics.Point(e.x, e.y)); if (item!=null) { StepLoader steploader = StepLoader.getInstance(); StepPlugin sp = steploader.findStepPluginWithDescription(item.getText()); if (sp!=null) { tooltip = sp.getTooltip(); } else if (item.getText().equalsIgnoreCase(STRING_BASE) || item.getText().equalsIgnoreCase(STRING_PLUGIN) ) { tooltip=Messages.getString("Spoon.Tooltip.SelectStepType",Const.CR); //"Select one of the step types listed below and"+Const.CR+"drag it onto the graphical view tab to the right."; } } tree.setToolTipText(tooltip); } } ); } private void addDragSourceToTree(Tree tree) { final Tree fTree = tree; // Drag & Drop for steps Transfer[] ttypes = new Transfer[] { XMLTransfer.getInstance() }; DragSource ddSource = new DragSource(fTree, DND.DROP_MOVE); ddSource.setTransfer(ttypes); ddSource.addDragListener(new DragSourceListener() { public void dragStart(DragSourceEvent event){ } public void dragSetData(DragSourceEvent event) { TreeItem ti[] = fTree.getSelection(); if (ti.length>0) { String data = null; int type = 0; String ts[] = Const.getTreeStrings(ti[0]); if (ts!=null && ts.length > 0) { // Drop of existing hidden step onto canvas? if (ts[0].equalsIgnoreCase(STRING_STEPS)) { type = DragAndDropContainer.TYPE_STEP; data=ti[0].getText(); // name of the step. } else if ( ts[0].equalsIgnoreCase(STRING_BASE) || ts[0].equalsIgnoreCase(STRING_PLUGIN) || ts[0].equalsIgnoreCase(STRING_HISTORY) ) { type = DragAndDropContainer.TYPE_BASE_STEP_TYPE; data=ti[0].getText(); // Step type } else if (ts[0].equalsIgnoreCase(STRING_CONNECTIONS)) { type = DragAndDropContainer.TYPE_DATABASE_CONNECTION; data=ti[0].getText(); // Database connection name to use } else if (ts[0].equalsIgnoreCase(STRING_HOPS)) { type = DragAndDropContainer.TYPE_TRANS_HOP; data=ti[0].getText(); // nothing for really ;-) } else { event.doit=false; return; // ignore anything else you drag. } event.data = new DragAndDropContainer(type, data); } } else // Nothing got dragged, only can happen on OSX :-) { event.doit=false; } } public void dragFinished(DragSourceEvent event) {} } ); } public void refreshPluginHistory() { pluginHistoryTree.removeAll(); TreeItem tiMain = new TreeItem(pluginHistoryTree, SWT.NONE); tiMain.setText(STRING_HISTORY); List pluginHistory = props.getPluginHistory(); for (int i=0;i<pluginHistory.size();i++) { String pluginID = (String)pluginHistory.get(i); StepPlugin stepPlugin = StepLoader.getInstance().findStepPluginWithID(pluginID); if (stepPlugin!=null) { Image image = (Image) GUIResource.getInstance().getImagesSteps().get(pluginID); TreeItem ti = new TreeItem(tiMain, SWT.NONE); ti.setText(stepPlugin.getDescription()); ti.setImage(image); } } tiMain.setExpanded(true); } private void setMenu(SelectionEvent e) { TreeItem ti = (TreeItem)e.item; String strti = ti.getText(); Tree root = ti.getParent(); log.logDebug(toString(), Messages.getString("Spoon.Log.ClickedOn") +ti.getText());//Clicked on TreeItem sel[] = root.getSelection(); Menu mCSH = new Menu(shell, SWT.POP_UP); // Find the level we clicked on: Top level (only NEW in the menu) or below (edit, insert, ...) TreeItem parent = ti.getParentItem(); if (parent==null) // Top level { if (!strti.equalsIgnoreCase(STRING_BASE) && !strti.equalsIgnoreCase(STRING_PLUGIN)) { MenuItem miNew = new MenuItem(mCSH, SWT.PUSH); miNew.setText(Messages.getString("Spoon.Menu.Popup.BASE.New"));//"New" miNew.addListener( SWT.Selection, lsNew ); } if (strti.equalsIgnoreCase(STRING_STEPS)) { MenuItem miNew = new MenuItem(mCSH, SWT.PUSH); miNew.setText(Messages.getString("Spoon.Menu.Popup.STEPS.SortSteps"));//Sort steps miNew.addSelectionListener( new SelectionAdapter() { public void widgetSelected(SelectionEvent arg0) { transMeta.sortSteps(); refreshTree(true); } }); } if (strti.equalsIgnoreCase(STRING_HOPS)) { MenuItem miNew = new MenuItem(mCSH, SWT.PUSH); miNew.setText(Messages.getString("Spoon.Menu.Popup.HOPS.SortHops"));//Sort hops miNew.addSelectionListener( new SelectionAdapter() { public void widgetSelected(SelectionEvent arg0) { transMeta.sortHops(); refreshTree(true); } }); } if (strti.equalsIgnoreCase(STRING_CONNECTIONS)) { MenuItem miNew = new MenuItem(mCSH, SWT.PUSH); miNew.setText(Messages.getString("Spoon.Menu.Popup.CONNECTIONS.NewConnectionWizard"));//New Connection Wizard miNew.addSelectionListener( new SelectionAdapter() { public void widgetSelected(SelectionEvent arg0) { createDatabaseWizard(); } } ); MenuItem miCache = new MenuItem(mCSH, SWT.PUSH); miCache.setText(Messages.getString("Spoon.Menu.Popup.CONNECTIONS.ClearDBCache"));//Clear complete DB Cache miCache.addListener( SWT.Selection, lsCache ); } } else { String strparent = parent.getText(); if (strparent.equalsIgnoreCase(STRING_CONNECTIONS)) { MenuItem miNew = new MenuItem(mCSH, SWT.PUSH); miNew.setText(Messages.getString("Spoon.Menu.Popup.CONNECTIONS.New"));//New MenuItem miEdit = new MenuItem(mCSH, SWT.PUSH); miEdit.setText(Messages.getString("Spoon.Menu.Popup.CONNECTIONS.Edit"));//Edit MenuItem miDupe = new MenuItem(mCSH, SWT.PUSH); miDupe.setText(Messages.getString("Spoon.Menu.Popup.CONNECTIONS.Duplicate"));//Duplicate MenuItem miCopy = new MenuItem(mCSH, SWT.PUSH); miCopy.setText(Messages.getString("Spoon.Menu.Popup.CONNECTIONS.CopyToClipboard"));//Copy to clipboard MenuItem miDel = new MenuItem(mCSH, SWT.PUSH); miDel.setText(Messages.getString("Spoon.Menu.Popup.CONNECTIONS.Delete"));//Delete new MenuItem(mCSH, SWT.SEPARATOR); MenuItem miSQL = new MenuItem(mCSH, SWT.PUSH); miSQL.setText(Messages.getString("Spoon.Menu.Popup.CONNECTIONS.SQLEditor"));//SQL Editor MenuItem miCache= new MenuItem(mCSH, SWT.PUSH); miCache.setText(Messages.getString("Spoon.Menu.Popup.CONNECTIONS.ClearDBCache")+ti.getText());//Clear DB Cache of new MenuItem(mCSH, SWT.SEPARATOR); MenuItem miExpl = new MenuItem(mCSH, SWT.PUSH); miExpl.setText(Messages.getString("Spoon.Menu.Popup.CONNECTIONS.Explore"));//Explore // disable for now if the connection is an SAP R/3 type of database... DatabaseMeta dbMeta = transMeta.findDatabase(strti); if (dbMeta==null || dbMeta.getDatabaseType()==DatabaseMeta.TYPE_DATABASE_SAPR3) miExpl.setEnabled(false); miNew.addListener( SWT.Selection, lsNew ); miEdit.addListener(SWT.Selection, lsEdit ); miDupe.addListener(SWT.Selection, lsDupe ); miCopy.addListener(SWT.Selection, lsCopy ); miDel.addListener(SWT.Selection, lsDel ); miSQL.addListener(SWT.Selection, lsSQL ); miCache.addListener(SWT.Selection, lsCache); miExpl.addListener(SWT.Selection, lsExpl); } if (strparent.equalsIgnoreCase(STRING_STEPS)) { if (sel.length==2) { MenuItem miNewHop = new MenuItem(mCSH, SWT.PUSH); miNewHop.setText(Messages.getString("Spoon.Menu.Popup.STEPS.NewHop"));//New Hop miNewHop.addListener(SWT.Selection, lsNew); } MenuItem miEdit = new MenuItem(mCSH, SWT.PUSH); miEdit.setText(Messages.getString("Spoon.Menu.Popup.STEPS.Edit"));//Edit MenuItem miDupe = new MenuItem(mCSH, SWT.PUSH); miDupe.setText(Messages.getString("Spoon.Menu.Popup.STEPS.Duplicate"));//Duplicate MenuItem miDel = new MenuItem(mCSH, SWT.PUSH); miDel.setText(Messages.getString("Spoon.Menu.Popup.STEPS.Delete"));//Delete miEdit.addListener(SWT.Selection, lsEdit ); miDupe.addListener(SWT.Selection, lsDupe ); miDel.addListener(SWT.Selection, lsDel ); } if (strparent.equalsIgnoreCase(STRING_HOPS)) { MenuItem miEdit = new MenuItem(mCSH, SWT.PUSH); miEdit.setText(Messages.getString("Spoon.Menu.Popup.HOPS.Edit"));//Edit MenuItem miDel = new MenuItem(mCSH, SWT.PUSH); miDel.setText(Messages.getString("Spoon.Menu.Popup.HOPS.Delete"));//Delete miEdit.addListener( SWT.Selection, lsEdit ); miDel.addListener ( SWT.Selection, lsDel ); } TreeItem grandparent = parent.getParentItem(); if (grandparent!=null) { String strgrandparent = grandparent.getText(); if (strgrandparent.equalsIgnoreCase(STRING_BASE) || strgrandparent.equalsIgnoreCase(STRING_PLUGIN)) { MenuItem miNew = new MenuItem(mCSH, SWT.PUSH); miNew.setText(Messages.getString("Spoon.Menu.Popup.BASE_PLUGIN.New"));//New miNew.addListener( SWT.Selection, lsNew ); } } } selectionTree.setMenu(mCSH); } private void addTabs() { if (tabComp!=null) { tabComp.dispose(); } tabComp = new Composite(sashform, SWT.BORDER ); props.setLook(tabComp); FormLayout childLayout = new FormLayout(); childLayout.marginWidth = 0; childLayout.marginHeight = 0; tabComp.setLayout(childLayout); tabfolder= new CTabFolder(tabComp, SWT.BORDER); props.setLook(tabfolder, Props.WIDGET_STYLE_TAB); FormData fdTabfolder = new FormData(); fdTabfolder.left = new FormAttachment(0, 0); fdTabfolder.right = new FormAttachment(100, 0); fdTabfolder.top = new FormAttachment(0, 0); fdTabfolder.bottom = new FormAttachment(100, 0); tabfolder.setLayoutData(fdTabfolder); CTabItem tiTabsGraph = new CTabItem(tabfolder, SWT.NONE); tiTabsGraph.setText(Messages.getString("Spoon.Title.GraphicalView"));//"Graphical view" tiTabsGraph.setToolTipText(Messages.getString("Spoon.Tooltip.DisplaysTransformationGraphical"));//Displays the transformation graphically. CTabItem tiTabsList = new CTabItem(tabfolder, SWT.NULL); tiTabsList.setText(Messages.getString("Spoon.Title.LogView"));//Log view tiTabsList.setToolTipText(Messages.getString("Spoon.Tooltip.DisplaysTransformationLog"));//Displays the log of the running transformation. CTabItem tiTabsHist = new CTabItem(tabfolder, SWT.NULL); tiTabsHist.setText(Messages.getString("Spoon.Title.LogHistory"));//Log view tiTabsHist.setToolTipText(Messages.getString("Spoon.Tooltip.DisplaysHistoryLogging"));//Displays the history of previous transformation runs. spoongraph = new SpoonGraph(tabfolder, SWT.V_SCROLL | SWT.H_SCROLL | SWT.NO_BACKGROUND, log, this); spoonlog = new SpoonLog(tabfolder, SWT.NONE, this, log, null); spoonhist = new SpoonHistory(tabfolder, SWT.NONE, this, log, null, spoonlog, shell); tabfolder.addKeyListener(defKeys); tabfolder.addKeyListener(modKeys); SpoonHistoryRefresher spoonHistoryRefresher = new SpoonHistoryRefresher(tiTabsHist, spoonhist); tabfolder.addSelectionListener(spoonHistoryRefresher); spoonlog.setSpoonHistoryRefresher(spoonHistoryRefresher); tiTabsGraph.setControl(spoongraph); tiTabsList.setControl(spoonlog); tiTabsHist.setControl(spoonhist); tabfolder.setSelection(0); sashform.addKeyListener(defKeys); sashform.addKeyListener(modKeys); int weights[] = props.getSashWeights(); sashform.setWeights(weights); sashform.setVisible(true); } public String getRepositoryName() { if (rep==null) return null; return rep.getRepositoryInfo().getName(); } public void newSelected() { log.logDebug(toString(), Messages.getString("Spoon.Log.NewSelected"));//"New Selected" // Determine what menu we selected from... TreeItem ti[] = selectionTree.getSelection(); // Then call newConnection or newTrans if (ti.length>=1) { String name = ti[0].getText(); TreeItem parent = ti[0].getParentItem(); if (parent == null) { log.logDebug(toString(), Messages.getString("Spoon.Log.ElementHasNoParent"));//Element has no parent if (name.equalsIgnoreCase(STRING_CONNECTIONS)) newConnection(); if (name.equalsIgnoreCase(STRING_HOPS )) newHop(); if (name.equalsIgnoreCase(STRING_STEPS )) { MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_INFORMATION); mb.setMessage(Messages.getString("Spoon.Dialog.WarningCreateNewSteps.Message"));//Please use the 'Base step types' below to create new steps. mb.setText(Messages.getString("Spoon.Dialog.WarningCreateNewSteps.Title"));//Tip! mb.open(); } //refreshTree(); } else { String strparent = parent.getText(); log.logDebug(toString(), Messages.getString("Spoon.Log.ElementHasParent")+strparent);//Element has parent: if (strparent.equalsIgnoreCase(STRING_CONNECTIONS)) newConnection(); if (strparent.equalsIgnoreCase(STRING_STEPS )) { log.logDebug(toString(), Messages.getString("Spoon.Log.NewHop"));//New hop! StepMeta from = transMeta.findStep( ti[0].getText() ); StepMeta to = transMeta.findStep( ti[1].getText() ); if (from!=null && to!=null) newHop(from, to); } TreeItem grandparent = parent.getParentItem(); if (grandparent!=null) { String strgrandparent = grandparent.getText(); if (strgrandparent.equalsIgnoreCase(STRING_BASE) || strgrandparent.equalsIgnoreCase(STRING_PLUGIN)) { newStep(); } } } } } public void editSelected() { // Determine what menu we selected from... TreeItem ti[] = selectionTree.getSelection(); // Then call editConnection or editStep or editTrans if (ti.length==1) { String name = ti[0].getText(); TreeItem parent = ti[0].getParentItem(); if (parent != null) { log.logDebug(toString(), Messages.getString("Spoon.Log.EDIT.ElementHasParent"));//(EDIT) Element has parent. String strparent = parent.getText(); if (strparent.equalsIgnoreCase(STRING_CONNECTIONS)) editConnection(name); if (strparent.equalsIgnoreCase(STRING_STEPS )) editStep(name); if (strparent.equalsIgnoreCase(STRING_HOPS )) editHop(name); TreeItem grandparent = parent.getParentItem(); if (grandparent!=null) { String strgrandparent = grandparent.getText(); if (strgrandparent.equalsIgnoreCase(STRING_BASE ) || strgrandparent.equalsIgnoreCase(STRING_PLUGIN ) ) { newStep(); } } } else { log.logDebug(toString(), Messages.getString("Spoon.Log.ElementHasNoParent"));//Element has no parent if (name.equalsIgnoreCase(STRING_CONNECTIONS)) newConnection(); if (name.equalsIgnoreCase(STRING_HOPS )) newHop(); } } } public void dupeSelected() { // Determine what menu we selected from... TreeItem ti[] = selectionTree.getSelection(); // Then call editConnection or editStep or editTrans if (ti.length==1) { String name = ti[0].getText(); TreeItem parent = ti[0].getParentItem(); if (parent != null) { log.logDebug(toString(), Messages.getString("Spoon.Log.DUPE.ElementHasParent"));//"(DUPE) Element has parent." String type = parent.getText(); if (type.equalsIgnoreCase(STRING_CONNECTIONS)) dupeConnection(name); if (type.equalsIgnoreCase(STRING_STEPS )) dupeStep(name); } } } /** * Copy selected tree item to the clipboard in XML format * */ public void clipSelected() { // Determine what menu we selected from... TreeItem ti[] = selectionTree.getSelection(); // Then call editConnection or editStep or editTrans if (ti.length==1) { String name = ti[0].getText(); TreeItem parent = ti[0].getParentItem(); if (parent != null) { log.logDebug(toString(), Messages.getString("Spoon.Log.DUPE.ElementHasParent"));//"(DUPE) Element has parent." String type = parent.getText(); if (type.equalsIgnoreCase(STRING_CONNECTIONS)) clipConnection(name); if (type.equalsIgnoreCase(STRING_STEPS )) clipStep(name); } } } public void delSelected() { // Determine what menu we selected from... int i; TreeItem ti[] = selectionTree.getSelection(); String name[] = new String[ti.length]; TreeItem parent[] = new TreeItem[ti.length]; for (i=0;i<ti.length;i++) { name[i] = ti[i].getText(); parent[i] = ti[i].getParentItem(); } // Then call editConnection or editStep or editTrans for (i=name.length-1;i>=0;i--) { log.logDebug(toString(), Messages.getString("Spoon.Log.DELETE.TryToDelete")+"#"+i+"/"+(ti.length-1)+" : "+name[i]);//(DELETE) Trying to delete if (parent[i] != null) { String type = parent[i].getText(); log.logDebug(toString(), Messages.getString("Spoon.Log.DELETE.ElementHasParent")+type);//(DELETE) Element has parent: if (type.equalsIgnoreCase(STRING_CONNECTIONS)) delConnection(name[i]); if (type.equalsIgnoreCase(STRING_STEPS )) delStep(name[i]); if (type.equalsIgnoreCase(STRING_HOPS )) delHop(name[i]); } } } public void sqlSelected() { // Determine what menu we selected from... int i; TreeItem ti[] = selectionTree.getSelection(); for (i=0;i<ti.length;i++) { String name = ti[i].getText(); TreeItem parent = ti[i].getParentItem(); String type = parent.getText(); if (type.equalsIgnoreCase(STRING_CONNECTIONS)) { DatabaseMeta ci = transMeta.findDatabase(name); SQLEditor sql = new SQLEditor(shell, SWT.NONE, ci, transMeta.getDbCache(), ""); sql.open(); } } } public void editConnection(String name) { DatabaseMeta db = transMeta.findDatabase(name); if (db!=null) { DatabaseMeta before = (DatabaseMeta)db.clone(); DatabaseDialog con = new DatabaseDialog(shell, SWT.NONE, log, db, props); con.setDatabases(transMeta.getDatabases()); String newname = con.open(); if (newname != null && newname.length()>0) // null: CANCEL { // Store undo/redo information DatabaseMeta after = (DatabaseMeta)db.clone(); addUndoChange(new DatabaseMeta[] { before }, new DatabaseMeta[] { after }, new int[] { transMeta.indexOfDatabase(db) } ); saveConnection(db); // The connection is saved, clear the changed flag. db.setChanged(false); if (!name.equalsIgnoreCase(newname)) refreshTree(true); } } setShellText(); } public void dupeConnection(String name) { DatabaseMeta db = transMeta.findDatabase(name); int pos = transMeta.indexOfDatabase(db); if (db!=null) { DatabaseMeta newdb = (DatabaseMeta)db.clone(); String dupename = Messages.getString("Spoon.Various.DupeName") +name; //"(copy of) " newdb.setName(dupename); transMeta.addDatabase(pos+1, newdb); refreshTree(); DatabaseDialog con = new DatabaseDialog(shell, SWT.NONE, log, newdb, props); String newname = con.open(); if (newname != null) // null: CANCEL { transMeta.removeDatabase(pos+1); transMeta.addDatabase(pos+1, newdb); if (!newname.equalsIgnoreCase(dupename)) refreshTree(); } else { addUndoNew(new DatabaseMeta[] { (DatabaseMeta)db.clone() }, new int[] { pos }); saveConnection(db); } } } public void clipConnection(String name) { DatabaseMeta db = transMeta.findDatabase(name); if (db!=null) { String xml = XMLHandler.getXMLHeader() + db.getXML(); toClipboard(xml); } } /** * Delete a database connection * @param name The name of the database connection. */ public void delConnection(String name) { DatabaseMeta db = transMeta.findDatabase(name); int pos = transMeta.indexOfDatabase(db); if (db!=null) { boolean worked=false; // delete from repository? if (rep!=null) { if (!rep.getUserInfo().isReadonly()) { try { long id_database = rep.getDatabaseID(db.getName()); rep.delDatabase(id_database); worked=true; } catch(KettleDatabaseException dbe) { new ErrorDialog(shell, props, Messages.getString("Spoon.Dialog.ErrorDeletingConnection.Title"), Messages.getString("Spoon.Dialog.ErrorDeletingConnection.Message",name), dbe);//"Error deleting connection ["+db+"] from repository!" } } else { new ErrorDialog(shell, props, Messages.getString("Spoon.Dialog.ErrorDeletingConnection.Title"),Messages.getString("Spoon.Dialog.ErrorDeletingConnection.Message",name) , new KettleException(Messages.getString("Spoon.Dialog.Exception.ReadOnlyUser")));//"Error deleting connection ["+db+"] from repository!" //This user is read-only! } } if (rep==null || worked) { addUndoDelete(new DatabaseMeta[] { (DatabaseMeta)db.clone() }, new int[] { pos }); transMeta.removeDatabase(pos); } refreshTree(); } setShellText(); } public void editStep(String name) { log.logDebug(toString(), Messages.getString("Spoon.Log.EditStep") +name);//"Edit step: " editStepInfo(transMeta.findStep(name)); } public String editStepInfo(StepMeta stepMeta) { String stepname = null; if (stepMeta != null) { try { String name = stepMeta.getName(); // Before we do anything, let's store the situation the way it was... StepMeta before = (StepMeta) stepMeta.clone(); StepMetaInterface stepint = stepMeta.getStepMetaInterface(); StepDialogInterface dialog = stepint.getDialog(shell, stepMeta.getStepMetaInterface(), transMeta, name); dialog.setRepository(rep); stepname = dialog.open(); if (stepname != null) { // OK, so the step has changed... // // First, backup the situation for undo/redo StepMeta after = (StepMeta) stepMeta.clone(); addUndoChange(new StepMeta[] { before }, new StepMeta[] { after }, new int[] { transMeta.indexOfStep(stepMeta) }); // Then, store the size of the // See if the new name the user enter, doesn't collide with another step. // If so, change the stepname and warn the user! // String newname = stepname; StepMeta smeta = transMeta.findStep(newname, stepMeta); int nr = 2; while (smeta != null) { newname = stepname + " " + nr; smeta = transMeta.findStep(newname); nr++; } if (nr > 2) { stepname = newname; MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_INFORMATION); mb.setMessage(Messages.getString("Spoon.Dialog.StepnameExists.Message", stepname)); // $NON-NLS-1$ mb.setText(Messages.getString("Spoon.Dialog.StepnameExists.Title")); // $NON-NLS-1$ mb.open(); } stepMeta.setName(stepname); refreshTree(true); // Perhaps new connections were created in the step dialog. } else { // Scenario: change connections and click cancel... // Perhaps new connections were created in the step dialog? if (transMeta.haveConnectionsChanged()) { refreshTree(true); } } refreshGraph(); // name is displayed on the graph too. setShellText(); } catch (Throwable e) { if (shell.isDisposed()) return null; new ErrorDialog(shell, props, Messages.getString("Spoon.Dialog.UnableOpenDialog.Title"), Messages .getString("Spoon.Dialog.UnableOpenDialog.Message"), new Exception(e));//"Unable to open dialog for this step" } } return stepname; } public void dupeStep(String name) { log.logDebug(toString(), Messages.getString("Spoon.Log.DuplicateStep")+name);//Duplicate step: StepMeta stMeta = null, stepMeta = null, look=null; for (int i=0;i<transMeta.nrSteps() && stepMeta==null;i++) { look = transMeta.getStep(i); if (look.getName().equalsIgnoreCase(name)) { stepMeta=look; } } if (stepMeta!=null) { stMeta = (StepMeta)stepMeta.clone(); if (stMeta!=null) { String newname = transMeta.getAlternativeStepname(stepMeta.getName()); int nr=2; while (transMeta.findStep(newname)!=null) { newname = stepMeta.getName()+" (copy "+nr+")"; nr++; } stMeta.setName(newname); // Don't select this new step! stMeta.setSelected(false); Point loc = stMeta.getLocation(); stMeta.setLocation(loc.x+20, loc.y+20); transMeta.addStep(stMeta); addUndoNew(new StepMeta[] { (StepMeta)stMeta.clone() }, new int[] { transMeta.indexOfStep(stMeta) }); refreshTree(); refreshGraph(); } } } public void clipStep(String name) { log.logDebug(toString(), Messages.getString("Spoon.Log.CopyStepToClipboard")+name);//copy step to clipboard: StepMeta stepMeta = transMeta.findStep(name); if (stepMeta!=null) { String xml = stepMeta.getXML(); toClipboard(xml); } } public void pasteXML(String clipcontent, Point loc) { try { //System.out.println(clipcontent); Document doc = XMLHandler.loadXMLString(clipcontent); Node transnode = XMLHandler.getSubNode(doc, "transformation"); // De-select all, re-select pasted steps... transMeta.unselectAll(); Node stepsnode = XMLHandler.getSubNode(transnode, "steps"); int nr = XMLHandler.countNodes(stepsnode, "step"); log.logDebug(toString(), Messages.getString("Spoon.Log.FoundSteps",""+nr)+loc);//"I found "+nr+" steps to paste on location: " StepMeta steps[] = new StepMeta[nr]; //Point min = new Point(loc.x, loc.y); Point min = new Point(99999999,99999999); // Load the steps... for (int i=0;i<nr;i++) { Node stepnode = XMLHandler.getSubNodeByNr(stepsnode, "step", i); steps[i] = new StepMeta(log, stepnode, transMeta.getDatabases(), transMeta.getCounters()); if (loc!=null) { Point p = steps[i].getLocation(); if (min.x > p.x) min.x = p.x; if (min.y > p.y) min.y = p.y; } } // Load the hops... Node hopsnode = XMLHandler.getSubNode(transnode, "order"); nr = XMLHandler.countNodes(hopsnode, "hop"); log.logDebug(toString(), Messages.getString("Spoon.Log.FoundHops",""+nr));//"I found "+nr+" hops to paste." TransHopMeta hops[] = new TransHopMeta[nr]; ArrayList alSteps = new ArrayList(); for (int i=0;i<steps.length;i++) alSteps.add(steps[i]); for (int i=0;i<nr;i++) { Node hopnode = XMLHandler.getSubNodeByNr(hopsnode, "hop", i); hops[i] = new TransHopMeta(hopnode, alSteps); } // What's the difference between loc and min? // This is the offset: Point offset = new Point(loc.x-min.x, loc.y-min.y); // Undo/redo object positions... int position[] = new int[steps.length]; for (int i=0;i<steps.length;i++) { Point p = steps[i].getLocation(); String name = steps[i].getName(); steps[i].setLocation(p.x+offset.x, p.y+offset.y); steps[i].setDraw(true); // Check the name, find alternative... steps[i].setName( transMeta.getAlternativeStepname(name) ); transMeta.addStep(steps[i]); position[i] = transMeta.indexOfStep(steps[i]); } // Add the hops too... for (int i=0;i<hops.length;i++) { transMeta.addTransHop(hops[i]); } // Load the notes... Node notesnode = XMLHandler.getSubNode(transnode, "notepads"); nr = XMLHandler.countNodes(notesnode, "notepad"); log.logDebug(toString(), Messages.getString("Spoon.Log.FoundNotepads",""+nr));//"I found "+nr+" notepads to paste." NotePadMeta notes[] = new NotePadMeta[nr]; for (int i=0;i<notes.length;i++) { Node notenode = XMLHandler.getSubNodeByNr(notesnode, "notepad", i); notes[i] = new NotePadMeta(notenode); Point p = notes[i].getLocation(); notes[i].setLocation(p.x+offset.x, p.y+offset.y); transMeta.addNote(notes[i]); } // Set the source and target steps ... for (int i=0;i<steps.length;i++) { StepMetaInterface smi = steps[i].getStepMetaInterface(); smi.searchInfoAndTargetSteps(transMeta.getSteps()); } // Save undo information too... addUndoNew(steps, position, false); int hoppos[] = new int[hops.length]; for (int i=0;i<hops.length;i++) hoppos[i] = transMeta.indexOfTransHop(hops[i]); addUndoNew(hops, hoppos, true); int notepos[] = new int[notes.length]; for (int i=0;i<notes.length;i++) notepos[i] = transMeta.indexOfNote(notes[i]); addUndoNew(notes, notepos, true); if (transMeta.haveStepsChanged()) { refreshTree(); refreshGraph(); } } catch(KettleException e) { new ErrorDialog(shell, props, Messages.getString("Spoon.Dialog.UnablePasteSteps.Title"),Messages.getString("Spoon.Dialog.UnablePasteSteps.Message") , e);//"Error pasting steps...", "I was unable to paste steps to this transformation" } } public void copySelected(StepMeta stepMeta[], NotePadMeta notePadMeta[]) { if (stepMeta==null || stepMeta.length==0) return; String xml = XMLHandler.getXMLHeader(); xml+="<transformation>"+Const.CR; xml+=" <steps>"+Const.CR; for (int i=0;i<stepMeta.length;i++) { xml+=stepMeta[i].getXML(); } xml+=" </steps>"+Const.CR; // // Also check for the hops in between the selected steps... // xml+="<order>"+Const.CR; if (stepMeta!=null) for (int i=0;i<stepMeta.length;i++) { for (int j=0;j<stepMeta.length;j++) { if (i!=j) { TransHopMeta hop = transMeta.findTransHop(stepMeta[i], stepMeta[j]); if (hop!=null) // Ok, we found one... { xml+=hop.getXML()+Const.CR; } } } } xml+=" </order>"+Const.CR; xml+=" <notepads>"+Const.CR; if (notePadMeta!=null) for (int i=0;i<notePadMeta.length;i++) { xml+= notePadMeta[i].getXML(); } xml+=" </notepads>"+Const.CR; xml+=" </transformation>"+Const.CR; toClipboard(xml); } public void delStep(String name) { log.logDebug(toString(), Messages.getString("Spoon.Log.DeleteStep")+name);//"Delete step: " int i, pos=0; StepMeta stepMeta = null, look=null; for (i=0;i<transMeta.nrSteps() && stepMeta==null;i++) { look = transMeta.getStep(i); if (look.getName().equalsIgnoreCase(name)) { stepMeta=look; pos=i; } } if (stepMeta!=null) { for (i=transMeta.nrTransHops()-1;i>=0;i--) { TransHopMeta hi = transMeta.getTransHop(i); if ( hi.getFromStep().equals(stepMeta) || hi.getToStep().equals(stepMeta) ) { addUndoDelete(new TransHopMeta[] { hi }, new int[] { transMeta.indexOfTransHop(hi) }, true); transMeta.removeTransHop(i); refreshTree(); } } transMeta.removeStep(pos); addUndoDelete(new StepMeta[] { stepMeta }, new int[] { pos }); refreshTree(); refreshGraph(); } else { log.logDebug(toString(),Messages.getString("Spoon.Log.UnableFindStepToDelete",name) );//"Couldn't find step ["+name+"] to delete..." } } public void editHop(String name) { TransHopMeta hi = transMeta.findTransHop(name); if (hi!=null) { // Backup situation BEFORE edit: TransHopMeta before = (TransHopMeta)hi.clone(); TransHopDialog hd = new TransHopDialog(shell, SWT.NONE, hi, transMeta); if (hd.open()!=null) { // Backup situation for redo/undo: TransHopMeta after = (TransHopMeta)hi.clone(); addUndoChange(new TransHopMeta[] { before }, new TransHopMeta[] { after }, new int[] { transMeta.indexOfTransHop(hi) } ); String newname = hi.toString(); if (!name.equalsIgnoreCase(newname)) { refreshTree(); refreshGraph(); // color, nr of copies... } } } setShellText(); } public void delHop(String name) { int i,n; n=transMeta.nrTransHops(); for (i=0;i<n;i++) { TransHopMeta hi = transMeta.getTransHop(i); if (hi.toString().equalsIgnoreCase(name)) { addUndoDelete(new Object[] { (TransHopMeta)hi.clone() }, new int[] { transMeta.indexOfTransHop(hi) }); transMeta.removeTransHop(i); refreshTree(); refreshGraph(); return; } } setShellText(); } public void newHop(StepMeta fr, StepMeta to) { TransHopMeta hi = new TransHopMeta(fr, to); TransHopDialog hd = new TransHopDialog(shell, SWT.NONE, hi, transMeta); if (hd.open()!=null) { boolean error=false; if (transMeta.findTransHop(hi.getFromStep(), hi.getToStep())!=null) { MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR ); mb.setMessage(Messages.getString("Spoon.Dialog.HopExists.Message"));//"This hop already exists!" mb.setText(Messages.getString("Spoon.Dialog.HopExists.Title"));//Error! mb.open(); error=true; } if (transMeta.hasLoop(fr) || transMeta.hasLoop(to)) { refreshTree(); refreshGraph(); MessageBox mb = new MessageBox(shell, SWT.YES | SWT.ICON_WARNING ); mb.setMessage(Messages.getString("Spoon.Dialog.AddingHopCausesLoop.Message"));//Adding this hop causes a loop in the transformation. Loops are not allowed! mb.setText(Messages.getString("Spoon.Dialog.AddingHopCausesLoop.Title"));//Warning! mb.open(); error=true; } if (!error) { transMeta.addTransHop(hi); addUndoNew(new TransHopMeta[] { (TransHopMeta)hi.clone() }, new int[] { transMeta.indexOfTransHop(hi) }); hi.getFromStep().drawStep(); hi.getToStep().drawStep(); refreshTree(); refreshGraph(); } } } public void newHop() { newHop(null, null); } public void newConnection() { DatabaseMeta db = new DatabaseMeta(); DatabaseDialog con = new DatabaseDialog(shell, SWT.APPLICATION_MODAL, log, db, props); String con_name = con.open(); if (con_name!=null && con_name.length()>0) { transMeta.addDatabase(db); addUndoNew(new DatabaseMeta[] { (DatabaseMeta)db.clone() }, new int[] { transMeta.indexOfDatabase(db) }); saveConnection(db); refreshTree(); } } public void saveConnection(DatabaseMeta db) { // Also add to repository? if (rep!=null) { if (!rep.userinfo.isReadonly()) { try { db.saveRep(rep); log.logDetailed(toString(), Messages.getString("Spoon.Log.SavedDatabaseConnection",db.getDatabaseName()));//"Saved database connection ["+db+"] to the repository." // Put a commit behind it! rep.commit(); } catch(KettleException ke) { rep.rollback(); // In case of failure: undo changes! new ErrorDialog(shell, props, Messages.getString("Spoon.Dialog.ErrorSavingConnection.Title"),Messages.getString("Spoon.Dialog.ErrorSavingConnection.Message",db.getDatabaseName()), ke);//"Can't save...","Error saving connection ["+db+"] to repository!" } } else { new ErrorDialog(shell, props, Messages.getString("Spoon.Dialog.UnableSave.Title"),Messages.getString("Spoon.Dialog.ErrorSavingConnection.Message",db.getDatabaseName()), new KettleException(Messages.getString("Spoon.Dialog.Exception.ReadOnlyRepositoryUser")));//This repository user is read-only! } } } /** * Shows a 'model has changed' warning if required * @return true if nothing has changed or the changes are rejected by the user. */ public boolean showChangedWarning() { boolean answer = true; if (transMeta.hasChanged()) { MessageBox mb = new MessageBox(shell, SWT.YES | SWT.NO | SWT.CANCEL | SWT.ICON_WARNING ); mb.setMessage(Messages.getString("Spoon.Dialog.PromptSave.Message"));//"This model has changed. Do you want to save it?" mb.setText(Messages.getString("Spoon.Dialog.PromptSave.Title")); int reply = mb.open(); if (reply==SWT.YES) { answer=saveFile(); } else { if (reply==SWT.CANCEL) { answer = false; } else { answer = true; } } } return answer; } public void openRepository() { int perms[] = new int[] { PermissionMeta.TYPE_PERMISSION_TRANSFORMATION }; RepositoriesDialog rd = new RepositoriesDialog(disp, SWT.NONE, perms, APP_NAME); rd.getShell().setImage(GUIResource.getInstance().getImageSpoon()); if (rd.open()) { // Close previous repository... if (rep!=null) { rep.disconnect(); } rep = new Repository(log, rd.getRepository(), rd.getUser()); try { rep.connect(APP_NAME); } catch(KettleException ke) { rep=null; new ErrorDialog(shell, props, Messages.getString("Spoon.Dialog.ErrorConnectingRepository.Title"), Messages.getString("Spoon.Dialog.ErrorConnectingRepository.Message",Const.CR), ke); //$NON-NLS-1$ //$NON-NLS-2$ } // Set for the existing databases, the ID's at -1! for (int i=0;i<transMeta.nrDatabases();i++) { transMeta.getDatabase(i).setID(-1L); } // Set for the existing transformation the ID at -1! transMeta.setID(-1L); // Keep track of the old databases for now. ArrayList oldDatabases = transMeta.getDatabases(); // In order to re-match the databases on name (not content), we need to load the databases from the new repository. // NOTE: for purposes such as DEVELOP - TEST - PRODUCTION sycles. // first clear the list of databases. transMeta.setDatabases(new ArrayList()); // Read them from the new repository. readDatabases(); /* for (int i=0;i<transMeta.nrDatabases();i++) { System.out.println("NEW REP: ["+transMeta.getDatabase(i).getName()+"]"); } */ // Then we need to re-match the databases at save time... for (int i=0;i<oldDatabases.size();i++) { DatabaseMeta oldDatabase = (DatabaseMeta) oldDatabases.get(i); DatabaseMeta newDatabase = Const.findDatabase(transMeta.getDatabases(), oldDatabase.getName()); // If it exists, change the settings... if (newDatabase!=null) { // System.out.println("Found the new database in the repository ["+oldDatabase.getName()+"]"); // A database connection with the same name exists in the new repository. // Change the old connections to reflect the settings in the new repository oldDatabase.setDatabaseInterface(newDatabase.getDatabaseInterface()); } else { // System.out.println("Couldn't find the new database in the repository ["+oldDatabase.getName()+"]"); // The old database is not present in the new repository: simply add it to the list. // When the transformation gets saved, it will be added to the repository. transMeta.addDatabase(oldDatabase); } } // For the existing transformation, change the directory too: // Try to find the same directory in the new repository... RepositoryDirectory redi = rep.getDirectoryTree().findDirectory(transMeta.getDirectory().getPath()); if (redi!=null) { transMeta.setDirectory(redi); } else { transMeta.setDirectory(rep.getDirectoryTree()); // the root is the default! } refreshTree(true); setShellText(); } else { // Not cancelled? --> Clear repository... if (!rd.isCancelled()) { closeRepository(); } } } public void exploreRepository() { if (rep!=null) { RepositoryExplorerDialog erd = new RepositoryExplorerDialog(shell, SWT.NONE, rep, rep.getUserInfo()); String objname = erd.open(); if (objname!=null) { String object_type = erd.getObjectType(); RepositoryDirectory repdir = erd.getObjectDirectory(); // System.out.println("Load ["+object_type+"] --> ["+objname+"] from dir ["+(repdir==null)+"]"); // Try to open it as a transformation. if (object_type.equals(RepositoryExplorerDialog.STRING_TRANSFORMATIONS)) { if (showChangedWarning()) { try { transMeta = new TransMeta(rep, objname, repdir); transMeta.clearChanged(); setFilename(objname); refreshTree(); refreshGraph(); } catch(KettleException e) { MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR ); mb.setMessage(Messages.getString("Spoon.Dialog.ErrorOpening.Message")+objname+Const.CR+e.getMessage());//"Error opening : " mb.setText(Messages.getString("Spoon.Dialog.ErrorOpening.Title")); mb.open(); } } } } } } public void editRepositoryUser() { if (rep!=null) { UserInfo userinfo = rep.getUserInfo(); UserDialog ud = new UserDialog(shell, SWT.NONE, log, props, rep, userinfo); UserInfo ui = ud.open(); if (!userinfo.isReadonly()) { if (ui!=null) { try { ui.saveRep(rep); } catch(KettleException e) { MessageBox mb = new MessageBox(shell, SWT.ICON_WARNING | SWT.OK); mb.setMessage(Messages.getString("Spoon.Dialog.UnableChangeUser.Message")+Const.CR+e.getMessage());//Sorry, I was unable to change this user in the repository: mb.setText(Messages.getString("Spoon.Dialog.UnableChangeUser.Title"));//"Edit user" mb.open(); } } } else { MessageBox mb = new MessageBox(shell, SWT.ICON_WARNING | SWT.OK); mb.setMessage(Messages.getString("Spoon.Dialog.NotAllowedChangeUser.Message"));//"Sorry, you are not allowed to change this user." mb.setText(Messages.getString("Spoon.Dialog.NotAllowedChangeUser.Title")); mb.open(); } } } public void readDatabases() { transMeta.readDatabases(rep); } public void closeRepository() { if (rep!=null) rep.disconnect(); rep = null; setShellText(); } public void openFile(boolean importfile) { if (showChangedWarning()) { if (rep==null || importfile) // Load from XML { FileDialog dialog = new FileDialog(shell, SWT.OPEN); // dialog.setFilterPath("C:\\Projects\\kettle\\source\\"); dialog.setFilterExtensions(Const.STRING_TRANS_FILTER_EXT); dialog.setFilterNames(Const.STRING_TRANS_FILTER_NAMES); String fname = dialog.open(); if (fname!=null) { try { transMeta = new TransMeta(fname); props.addLastFile(Props.TYPE_PROPERTIES_SPOON, fname, Const.FILE_SEPARATOR, false, ""); addMenuLast(); if (!importfile) transMeta.clearChanged(); setFilename(fname); } catch(KettleException e) { clear(); MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR ); mb.setMessage(Messages.getString("Spoon.Dialog.ErrorOpening.Message")+fname+Const.CR+e.getMessage());//"Error opening : " mb.setText(Messages.getString("Spoon.Dialog.ErrorOpening.Title"));//"Error!" mb.open(); } refreshGraph(); refreshTree(true); refreshHistory(); } } else // Read a transformation from the repository! { SelectObjectDialog sod = new SelectObjectDialog(shell, props, rep, true, false, false); String transname = sod.open(); RepositoryDirectory repdir = sod.getDirectory(); if (transname!=null && repdir!=null) { TransLoadProgressDialog tlpd = new TransLoadProgressDialog(shell, rep, transname, repdir); TransMeta transInfo = tlpd.open(); if (transInfo!=null) { transMeta = transInfo; // transMeta = new TransInfo(log, rep, transname, repdir); log.logDetailed(toString(),Messages.getString("Spoon.Log.LoadToTransformation",transname,repdir.getDirectoryName()) );//"Transformation ["+transname+"] in directory ["+repdir+"] loaded from the repository." //System.out.println("name="+transMeta.getName()); props.addLastFile(Props.TYPE_PROPERTIES_SPOON, transname, repdir.getPath(), true, rep.getName()); addMenuLast(); transMeta.clearChanged(); setFilename(transname); } refreshGraph(); refreshTree(true); refreshHistory(); } } } } public void newFile() { if (showChangedWarning()) { clear(); loadRepositoryObjects(); // Add databases if connected to repository setFilename(null); refreshTree(true); refreshGraph(); refreshHistory(); } } public void loadRepositoryObjects() { // Load common database info from active repository... if (rep!=null) { transMeta.readDatabases(rep); } } public boolean quitFile() { boolean exit = true; boolean showWarning = true; log.logDetailed(toString(), Messages.getString("Spoon.Log.QuitApplication"));//"Quit application." saveSettings(); if (transMeta.hasChanged()) { MessageBox mb = new MessageBox(shell, SWT.YES | SWT.NO | SWT.CANCEL | SWT.ICON_WARNING ); mb.setMessage(Messages.getString("Spoon.Dialog.SaveChangedFile.Message"));//"File has changed! Do you want to save first?" mb.setText(Messages.getString("Spoon.Dialog.SaveChangedFile.Title"));//"Warning!" int answer = mb.open(); switch(answer) { case SWT.YES: exit=saveFile(); showWarning=false; break; case SWT.NO: exit=true; showWarning=false; break; case SWT.CANCEL: exit=false; showWarning=false; break; } } // System.out.println("exit="+exit+", showWarning="+showWarning+", running="+spoonlog.isRunning()+", showExitWarning="+props.showExitWarning()); // Show warning on exit when spoon is still running // Show warning on exit when a warning needs to be displayed, but only if we didn't ask to save before. (could have pressed cancel then!) // if ( (exit && spoonlog.isRunning() ) || (exit && showWarning && props.showExitWarning() ) ) { String message = Messages.getString("Spoon.Message.Warning.PromptExit"); //"Are you sure you want to exit?" if (spoonlog.isRunning()) message = Messages.getString("Spoon.Message.Warning.PromptExitWhenRunTransformation");//There is a running transformation. Are you sure you want to exit? MessageDialogWithToggle md = new MessageDialogWithToggle(shell, Messages.getString("System.Warning"),//"Warning!" null, message, MessageDialog.WARNING, new String[] { Messages.getString("Spoon.Message.Warning.Yes"), Messages.getString("Spoon.Message.Warning.No") },//"Yes", "No" 1, Messages.getString("Spoon.Message.Warning.NotShowWarning"),//"Please, don't show this warning anymore." !props.showExitWarning() ); int idx = md.open(); props.setExitWarningShown(!md.getToggleState()); props.saveProps(); if (idx==1) exit=false; // No selected: don't exit! else exit=true; } if (exit) dispose(); return exit; } public boolean saveFile() { boolean saved=false; log.logDetailed(toString(), Messages.getString("Spoon.Log.SaveToFileOrRepository"));//"Save to file or repository..." if (rep!=null) { saved=saveRepository(); } else { if (transMeta.getFilename()!=null) { saved=save(transMeta.getFilename()); } else { saved=saveFileAs(); } } try { if (props.useDBCache()) transMeta.getDbCache().saveCache(log); } catch(KettleException e) { new ErrorDialog(shell, props, Messages.getString("Spoon.Dialog.ErrorSavingDatabaseCache.Title"), Messages.getString("Spoon.Dialog.ErrorSavingDatabaseCache.Message"), e);//"An error occured saving the database cache to disk" } return saved; } public boolean saveRepository() { return saveRepository(false); } public boolean saveRepository(boolean ask_name) { log.logDetailed(toString(), Messages.getString("Spoon.Log.SaveToRepository"));//"Save to repository..." if (rep!=null) { boolean answer = true; boolean ask = ask_name; while (answer && ( ask || transMeta.getName()==null || transMeta.getName().length()==0 ) ) { if (!ask) { MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_WARNING); mb.setMessage(Messages.getString("Spoon.Dialog.PromptTransformationName.Message"));//"Please give this transformation a name before saving it in the database." mb.setText(Messages.getString("Spoon.Dialog.PromptTransformationName.Title"));//"Transformation has no name." mb.open(); } ask=false; answer = setTrans(); // System.out.println("answer="+answer+", ask="+ask+", transMeta.getName()="+transMeta.getName()); } if (answer && transMeta.getName()!=null && transMeta.getName().length()>0) { if (!rep.getUserInfo().isReadonly()) { int response = SWT.YES; if (transMeta.showReplaceWarning(rep)) { MessageBox mb = new MessageBox(shell, SWT.YES | SWT.NO | SWT.ICON_QUESTION); mb.setMessage(Messages.getString("Spoon.Dialog.PromptOverwriteTransformation.Message",transMeta.getName(),Const.CR));//"There already is a transformation called ["+transMeta.getName()+"] in the repository."+Const.CR+"Do you want to overwrite the transformation?" mb.setText(Messages.getString("Spoon.Dialog.PromptOverwriteTransformation.Title"));//"Overwrite?" response = mb.open(); } boolean saved=false; if (response == SWT.YES) { shell.setCursor(cursor_hourglass); // Keep info on who & when this transformation was changed... transMeta.setModifiedDate( new Value("MODIFIED_DATE", Value.VALUE_TYPE_DATE) ); transMeta.getModifiedDate().sysdate(); transMeta.setModifiedUser( rep.getUserInfo().getLogin() ); TransSaveProgressDialog tspd = new TransSaveProgressDialog(log, props, shell, rep, transMeta); if (tspd.open()) { saved=true; if (!props.getSaveConfirmation()) { MessageDialogWithToggle md = new MessageDialogWithToggle(shell, Messages.getString("Spoon.Message.Warning.SaveOK"), //"Save OK!" null, Messages.getString("Spoon.Message.Warning.TransformationWasStored"),//"This transformation was stored in repository" MessageDialog.QUESTION, new String[] { Messages.getString("Spoon.Message.Warning.OK") },//"OK!" 0, Messages.getString("Spoon.Message.Warning.NotShowThisMessage"),//"Don't show this message again." props.getSaveConfirmation() ); md.open(); props.setSaveConfirmation(md.getToggleState()); } // Handle last opened files... props.addLastFile(Props.TYPE_PROPERTIES_SPOON, transMeta.getName(), transMeta.getDirectory().getPath(), true, getRepositoryName()); saveSettings(); addMenuLast(); setShellText(); } shell.setCursor(null); } return saved; } else { MessageBox mb = new MessageBox(shell, SWT.CLOSE | SWT.ICON_ERROR); mb.setMessage(Messages.getString("Spoon.Dialog.OnlyreadRepository.Message"));//"Sorry, the user you're logged on with, can only read from the repository" mb.setText(Messages.getString("Spoon.Dialog.OnlyreadRepository.Title"));//"Transformation not saved!" mb.open(); } } } else { MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR); mb.setMessage(Messages.getString("Spoon.Dialog.NoRepositoryConnection.Message"));//"There is no repository connection available." mb.setText(Messages.getString("Spoon.Dialog.NoRepositoryConnection.Title"));//"No repository available." mb.open(); } return false; } public boolean saveFileAs() { boolean saved=false; log.logBasic(toString(), Messages.getString("Spoon.Log.SaveAs"));//"Save as..." if (rep!=null) { transMeta.setID(-1L); saved=saveRepository(true); } else { saved=saveXMLFile(); } return saved; } private boolean saveXMLFile() { boolean saved=false; FileDialog dialog = new FileDialog(shell, SWT.SAVE); dialog.setFilterPath("C:\\Projects\\kettle\\source\\"); dialog.setFilterExtensions(Const.STRING_TRANS_FILTER_EXT); dialog.setFilterNames(Const.STRING_TRANS_FILTER_NAMES); String fname = dialog.open(); if (fname!=null) { // Is the filename ending on .ktr, .xml? boolean ending=false; for (int i=0;i<Const.STRING_TRANS_FILTER_EXT.length-1;i++) { if (fname.endsWith(Const.STRING_TRANS_FILTER_EXT[i].substring(1))) { ending=true; } } if (fname.endsWith(Const.STRING_TRANS_DEFAULT_EXT)) ending=true; if (!ending) { fname+=Const.STRING_TRANS_DEFAULT_EXT; } // See if the file already exists... File f = new File(fname); int id = SWT.YES; if (f.exists()) { MessageBox mb = new MessageBox(shell, SWT.NO | SWT.YES | SWT.ICON_WARNING); mb.setMessage(Messages.getString("Spoon.Dialog.PromptOverwriteFile.Message"));//"This file already exists. Do you want to overwrite it?" mb.setText(Messages.getString("Spoon.Dialog.PromptOverwriteFile.Title"));//"This file already exists!" id = mb.open(); } if (id==SWT.YES) { saved=save(fname); setFilename(fname); } } return saved; } private boolean save(String fname) { boolean saved = false; String xml = XMLHandler.getXMLHeader() + transMeta.getXML(); try { DataOutputStream dos = new DataOutputStream(new FileOutputStream(new File(fname))); dos.write(xml.getBytes(Const.XML_ENCODING)); dos.close(); saved=true; // Handle last opened files... props.addLastFile(Props.TYPE_PROPERTIES_SPOON, fname, Const.FILE_SEPARATOR, false, ""); saveSettings(); addMenuLast(); transMeta.clearChanged(); setShellText(); log.logDebug(toString(), Messages.getString("Spoon.Log.FileWritten")+" ["+fname+"]"); //"File written to } catch(Exception e) { log.logDebug(toString(), Messages.getString("Spoon.Log.ErrorOpeningFileForWriting")+e.toString());//"Error opening file for writing! --> " MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR); mb.setMessage(Messages.getString("Spoon.Dialog.ErrorSavingFile.Message")+Const.CR+e.toString());//"Error saving file:" mb.setText(Messages.getString("Spoon.Dialog.ErrorSavingFile.Title"));//"ERROR" mb.open(); } return saved; } public void helpAbout() { MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_INFORMATION | SWT.CENTER); String mess = Messages.getString("System.ProductInfo")+Const.VERSION+Const.CR+Const.CR+Const.CR;//Kettle - Spoon version mess+=Messages.getString("System.CompanyInfo")+Const.CR+" "+Messages.getString("System.ProductWebsiteUrl")+Const.CR; //(c) 2001-2004 i-Bridge bvba www.kettle.be mb.setMessage(mess); mb.setText(APP_NAME); mb.open(); } public void editUnselectAll() { transMeta.unselectAll(); spoongraph.redraw(); } public void editSelectAll() { transMeta.selectAll(); spoongraph.redraw(); } public void editOptions() { EnterOptionsDialog eod = new EnterOptionsDialog(shell, props); if (eod.open()!=null) { props.saveProps(); loadSettings(); changeLooks(); MessageBox mb = new MessageBox(shell, SWT.ICON_INFORMATION); mb.setMessage(Messages.getString("Spoon.Dialog.PleaseRestartApplication.Message")); mb.setText(Messages.getString("Spoon.Dialog.PleaseRestartApplication.Title")); mb.open(); } } public int getTreePosition(TreeItem ti, String item) { if (ti!=null) { TreeItem items[] = ti.getItems(); for (int x=0;x<items.length;x++) { if (items[x].getText().equalsIgnoreCase(item)) { return x; } } } return -1; } public void refreshTree() { refreshTree(false); refreshPluginHistory(); } /** * Refresh the object selection tree (on the left of the screen) * @param complete true refreshes the complete tree, false tries to do a differential update to avoid flickering. */ public void refreshTree(boolean complete) { if (shell.isDisposed()) return; if (!transMeta.hasChanged() && !complete) return; // Nothing changed: nothing to do! int idx; TreeItem ti[]; // Refresh the connections... // if (transMeta.haveConnectionsChanged() || complete) { tiConn.setText(STRING_CONNECTIONS); // TreeItem tiConn= this.tiConn (TreeItem)widgets.getWidget(STRING_CONNECTIONS); ti = tiConn.getItems(); // In complete refresh: delete all items first if (complete) { for (int i=0;i<ti.length;i++) ti[i].dispose(); ti = tiConn.getItems(); } // First delete no longer used items... for (int i=0;i<ti.length;i++) { String str = ti[i].getText(); DatabaseMeta inf = transMeta.findDatabase(str); if (inf!=null) idx = transMeta.indexOfDatabase(inf); else idx=-1; if (idx<0 || idx>i) ti[i].dispose(); } ti = tiConn.getItems(); // Insert missing items in tree... int j=0; for (int i=0;i<transMeta.nrDatabases();i++) { DatabaseMeta inf = transMeta.getDatabase(i); String con_name = inf.getName(); String ti_name = ""; if (j<ti.length) ti_name = ti[j].getText(); if (!con_name.equalsIgnoreCase(ti_name)) { // insert at position j in tree TreeItem newitem = new TreeItem(tiConn, j); newitem.setText(inf.getName()); newitem.setForeground(GUIResource.getInstance().getColorBlack()); newitem.setImage(GUIResource.getInstance().getImageConnection()); j++; ti = tiConn.getItems(); } else { j++; } } // tiConn.setExpanded(true); } //ni.setImage(gv.hop_image); //ni.setImage(gv.step_images_small[steptype]); // Refresh the Steps... // if (transMeta.haveStepsChanged() || complete) { tiStep.setText(STRING_STEPS); ti = tiStep.getItems(); // In complete refresh: delete all items first if (complete) { for (int i=0;i<ti.length;i++) ti[i].dispose(); ti = tiStep.getItems(); } // First delete no longer used items... log.logDebug(toString(), Messages.getString("Spoon.Log.CheckSteps"));//"check steps" for (int i=0;i<ti.length;i++) { String str = ti[i].getText(); log.logDebug(toString(), " "+Messages.getString("Spoon.Log.CheckStepTreeItem")+i+" : ["+str+"]"); StepMeta inf = transMeta.findStep(str); if (inf!=null) idx = transMeta.indexOfStep(inf); else idx=-1; if (idx<0 || idx>i) { log.logDebug(toString(), " "+ Messages.getString("Spoon.Log.RemoveTreeItem")+ "["+str+"]");//remove tree item ti[i].dispose(); } } ti = tiStep.getItems(); // Insert missing items in tree... int j=0; for (int i=0;i<transMeta.nrSteps();i++) { StepMeta inf = transMeta.getStep(i); String step_name = inf.getName(); String step_id = inf.getStepID(); String ti_name = ""; if (j<ti.length) ti_name = ti[j].getText(); if (!step_name.equalsIgnoreCase(ti_name)) { // insert at position j in tree TreeItem newitem = new TreeItem(tiStep, j); newitem.setText(inf.getName()); // Set the small image... Image img = (Image)GUIResource.getInstance().getImagesStepsSmall().get(step_id); newitem.setImage(img); j++; ti = tiStep.getItems(); } else { j++; } } // See if the colors are still OK! for (int i=0;i<ti.length;i++) { StepMeta inf = transMeta.findStep(ti[i].getText()); Color col = ti[i].getForeground(); Color newcol; if (transMeta.isStepUsedInTransHops(inf)) newcol=GUIResource.getInstance().getColorBlack(); else newcol=GUIResource.getInstance().getColorGray(); if (!newcol.equals(col)) ti[i].setForeground(newcol); } //tiStep.setExpanded(true); } // Refresh the Hops... // if (transMeta.haveHopsChanged() || complete) { tiHops.setText(STRING_HOPS); ti = tiHops.getItems(); // In complete refresh: delete all items first if (complete) { for (int i=0;i<ti.length;i++) ti[i].dispose(); ti = tiHops.getItems(); } // First delete no longer used items... for (int i=0;i<ti.length;i++) { String str = ti[i].getText(); TransHopMeta inf = transMeta.findTransHop(str); if (inf!=null) idx = transMeta.indexOfTransHop(inf); else idx=-1; if (idx<0 || idx>i) ti[i].dispose(); } ti = tiHops.getItems(); // Insert missing items in tree... int j=0; for (int i=0;i<transMeta.nrTransHops();i++) { TransHopMeta inf = transMeta.getTransHop(i); String trans_name = inf.toString(); String ti_name = ""; if (j<ti.length) ti_name = ti[j].getText(); if (!trans_name.equalsIgnoreCase(ti_name)) { // insert at position j in tree TreeItem newitem = new TreeItem(tiHops, j); newitem.setText(inf.toString()); newitem.setForeground(GUIResource.getInstance().getColorBlack()); newitem.setImage(GUIResource.getInstance().getImageHop()); j++; ti = tiHops.getItems(); } else { j++; } } // tiTrns.setExpanded(false); } selectionTree.setFocus(); setShellText(); } public void refreshGraph() { if (shell.isDisposed()) return; spoongraph.redraw(); setShellText(); } public void refreshHistory() { spoonhist.refreshHistory(); } public StepMeta newStep() { return newStep(true, true); } public StepMeta newStep(boolean openit, boolean rename) { TreeItem ti[] = selectionTree.getSelection(); StepMeta inf = null; if (ti.length==1) { String steptype = ti[0].getText(); log.logDebug(toString(), Messages.getString("Spoon.Log.NewStep")+steptype);//"New step: " inf = newStep(steptype, steptype, openit, rename); } return inf; } /** * Allocate new step, optionally open and rename it. * * @param name Name of the new step * @param description Description of the type of step * @param openit Open the dialog for this step? * @param rename Rename this step? * * @return The newly created StepMeta object. * */ public StepMeta newStep(String name, String description, boolean openit, boolean rename) { StepMeta inf = null; // See if we need to rename the step to avoid doubles! if (rename && transMeta.findStep(name)!=null) { int i=2; String newname = name+" "+i; while (transMeta.findStep(newname)!=null) { i++; newname = name+" "+i; } name=newname; } StepLoader steploader = StepLoader.getInstance(); StepPlugin stepPlugin = null; try { stepPlugin = steploader.findStepPluginWithDescription(description); if (stepPlugin!=null) { StepMetaInterface info = BaseStep.getStepInfo(stepPlugin, steploader); info.setDefault(); if (openit) { StepDialogInterface dialog = info.getDialog(shell, info, transMeta, name); name = dialog.open(); } inf=new StepMeta(log, stepPlugin.getID()[0], name, info); if (name!=null) // OK pressed in the dialog: we have a step-name { String newname=name; StepMeta stepMeta = transMeta.findStep(newname); int nr=2; while (stepMeta!=null) { newname = name+" "+nr; stepMeta = transMeta.findStep(newname); nr++; } if (nr>2) { inf.setName(newname); MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_INFORMATION); mb.setMessage(Messages.getString("Spoon.Dialog.ChangeStepname.Message",newname));//"This stepname already exists. Spoon changed the stepname to ["+newname+"]" mb.setText(Messages.getString("Spoon.Dialog.ChangeStepname.Title"));//"Info!" mb.open(); } inf.setLocation(20, 20); // default location at (20,20) transMeta.addStep(inf); // Save for later: // if openit is false: we drag&drop it onto the canvas! if (openit) { addUndoNew(new StepMeta[] { inf }, new int[] { transMeta.indexOfStep(inf) }); } // Also store it in the pluginHistory list... props.addPluginHistory(stepPlugin.getID()[0]); refreshTree(); } else { return null; // Cancel pressed in dialog. } setShellText(); } } catch(KettleException e) { String filename = stepPlugin.getErrorHelpFile(); if (stepPlugin!=null && filename!=null) { // OK, in stead of a normal error message, we give back the content of the error help file... (HTML) try { StringBuffer content=new StringBuffer(); System.out.println("Filename = "+filename); FileInputStream fis = new FileInputStream(new File(filename)); int ch = fis.read(); while (ch>=0) { content.append( (char)ch); ch = fis.read(); } System.out.println("Content = "+content); ShowBrowserDialog sbd = new ShowBrowserDialog(shell, Messages.getString("Spoon.Dialog.ErrorHelpText.Title"), content.toString());//"Error help text" sbd.open(); } catch(Exception ex) { new ErrorDialog(shell, props, Messages.getString("Spoon.Dialog.ErrorShowingHelpText.Title"), Messages.getString("Spoon.Dialog.ErrorShowingHelpText.Message"), ex);//"Error showing help text" } } else { new ErrorDialog(shell, props, Messages.getString("Spoon.Dialog.UnableCreateNewStep.Title"),Messages.getString("Spoon.Dialog.UnableCreateNewStep.Message") , e);//"Error creating step" "I was unable to create a new step" } return null; } catch(Throwable e) { if (!shell.isDisposed()) new ErrorDialog(shell, props, Messages.getString("Spoon.Dialog.ErrorCreatingStep.Title"), Messages.getString("Spoon.Dialog.UnableCreateNewStep.Message"), new Exception(e));//"Error creating step" return null; } return inf; } private void setTreeImages() { tiConn.setImage(GUIResource.getInstance().getImageConnection()); tiHops.setImage(GUIResource.getInstance().getImageHop()); tiStep.setImage(GUIResource.getInstance().getImageBol()); tiBase.setImage(GUIResource.getInstance().getImageBol()); tiPlug.setImage(GUIResource.getInstance().getImageBol()); TreeItem tiBaseCat[]=tiBase.getItems(); for (int x=0;x<tiBaseCat.length;x++) { tiBaseCat[x].setImage(GUIResource.getInstance().getImageBol()); TreeItem ti[] = tiBaseCat[x].getItems(); for (int i=0;i<ti.length;i++) { TreeItem stepitem = ti[i]; String description = stepitem.getText(); StepLoader steploader = StepLoader.getInstance(); StepPlugin sp = steploader.findStepPluginWithDescription(description); if (sp!=null) { Image stepimg = (Image)GUIResource.getInstance().getImagesStepsSmall().get(sp.getID()[0]); if (stepimg!=null) { stepitem.setImage(stepimg); } } } } TreeItem tiPlugCat[]=tiPlug.getItems(); for (int x=0;x<tiPlugCat.length;x++) { tiPlugCat[x].setImage(GUIResource.getInstance().getImageBol()); TreeItem ti[] = tiPlugCat[x].getItems(); for (int i=0;i<ti.length;i++) { TreeItem stepitem = ti[i]; String description = stepitem.getText(); StepLoader steploader = StepLoader.getInstance(); StepPlugin sp = steploader.findStepPluginWithDescription(description); if (sp!=null) { Image stepimg = (Image)GUIResource.getInstance().getImagesStepsSmall().get(sp.getID()); if (stepimg!=null) { stepitem.setImage(stepimg); } } } } } public DatabaseMeta getConnection(String name) { int i; for (i=0;i<transMeta.nrDatabases();i++) { DatabaseMeta ci = transMeta.getDatabase(i); if (ci.getName().equalsIgnoreCase(name)) { return ci; } } return null; } public void setShellText() { String fname = transMeta.getFilename(); if (shell.isDisposed()) return; if (rep!=null) { String repository = "["+getRepositoryName()+"]"; String transname = transMeta.getName(); if (transname==null) transname=Messages.getString("Spoon.Various.NoName");//"[no name]" shell.setText(APPL_TITLE+" - "+repository+" "+transname+(transMeta.hasChanged()?(" "+Messages.getString("Spoon.Various.Changed")):""));//(changed) } else { String repository = Messages.getString("Spoon.Various.NoRepository");//"[no repository]" if (fname!=null) { shell.setText(APPL_TITLE+" - "+repository+" File: "+fname+(transMeta.hasChanged()?(" "+Messages.getString("Spoon.Various.Changed")):"")); } else { shell.setText(APPL_TITLE+" - "+repository+" "+(transMeta.hasChanged()?(" "+Messages.getString("Spoon.Various.Changed")):"")); } } } public void setFilename(String fname) { if (fname!=null) transMeta.setFilename(fname); setShellText(); } private void printFile() { PrintSpool ps = new PrintSpool(); Printer printer = ps.getPrinter(shell); // Create an image of the screen Point max = transMeta.getMaximum(); Image img = spoongraph.getTransformationImage(printer, max.x, max.y); ps.printImage(shell, props, img); img.dispose(); ps.dispose(); } private boolean setTrans() { TransDialog tid = new TransDialog(shell, SWT.NONE, transMeta, rep); TransMeta ti = tid.open(); setShellText(); return ti!=null; } public void saveSettings() { WindowProperty winprop = new WindowProperty(shell); winprop.setName(APPL_TITLE); props.setScreen(winprop); props.setLogLevel(log.getLogLevelDesc()); props.setLogFilter(log.getFilter()); props.setSashWeights(sashform.getWeights()); props.saveProps(); } public void loadSettings() { log.setLogLevel(props.getLogLevel()); log.setFilter(props.getLogFilter()); transMeta.setMaxUndo(props.getMaxUndo()); transMeta.getDbCache().setActive(props.useDBCache()); } public void changeLooks() { props.setLook(selectionTree); props.setLook(tabfolder, Props.WIDGET_STYLE_TAB); spoongraph.newProps(); refreshTree(); refreshGraph(); } public void undoAction() { spoongraph.forceFocus(); TransAction ta = transMeta.previousUndo(); if (ta==null) return; setUndoMenu(); // something changed: change the menu switch(ta.getType()) { // // NEW // // We created a new step : undo this... case TransAction.TYPE_ACTION_NEW_STEP: // Delete the step at correct location: for (int i=ta.getCurrent().length-1;i>=0;i--) { int idx = ta.getCurrentIndex()[i]; transMeta.removeStep(idx); } refreshTree(); refreshGraph(); break; // We created a new connection : undo this... case TransAction.TYPE_ACTION_NEW_CONNECTION: // Delete the connection at correct location: for (int i=ta.getCurrent().length-1;i>=0;i--) { int idx = ta.getCurrentIndex()[i]; transMeta.removeDatabase(idx); } refreshTree(); refreshGraph(); break; // We created a new note : undo this... case TransAction.TYPE_ACTION_NEW_NOTE: // Delete the note at correct location: for (int i=ta.getCurrent().length-1;i>=0;i--) { int idx = ta.getCurrentIndex()[i]; transMeta.removeNote(idx); } refreshTree(); refreshGraph(); break; // We created a new hop : undo this... case TransAction.TYPE_ACTION_NEW_HOP: // Delete the hop at correct location: for (int i=ta.getCurrent().length-1;i>=0;i--) { int idx = ta.getCurrentIndex()[i]; transMeta.removeTransHop(idx); } refreshTree(); refreshGraph(); break; // // DELETE // // We delete a step : undo this... case TransAction.TYPE_ACTION_DELETE_STEP: // un-Delete the step at correct location: re-insert for (int i=0;i<ta.getCurrent().length;i++) { StepMeta stepMeta = (StepMeta)ta.getCurrent()[i]; int idx = ta.getCurrentIndex()[i]; transMeta.addStep(idx, stepMeta); } refreshTree(); refreshGraph(); break; // We deleted a connection : undo this... case TransAction.TYPE_ACTION_DELETE_CONNECTION: // re-insert the connection at correct location: for (int i=0;i<ta.getCurrent().length;i++) { DatabaseMeta ci = (DatabaseMeta)ta.getCurrent()[i]; int idx = ta.getCurrentIndex()[i]; transMeta.addDatabase(idx, ci); } refreshTree(); refreshGraph(); break; // We delete new note : undo this... case TransAction.TYPE_ACTION_DELETE_NOTE: // re-insert the note at correct location: for (int i=0;i<ta.getCurrent().length;i++) { NotePadMeta ni = (NotePadMeta)ta.getCurrent()[i]; int idx = ta.getCurrentIndex()[i]; transMeta.addNote(idx, ni); } refreshTree(); refreshGraph(); break; // We deleted a hop : undo this... case TransAction.TYPE_ACTION_DELETE_HOP: // re-insert the hop at correct location: for (int i=0;i<ta.getCurrent().length;i++) { TransHopMeta hi = (TransHopMeta)ta.getCurrent()[i]; int idx = ta.getCurrentIndex()[i]; // Build a new hop: StepMeta from = transMeta.findStep(hi.getFromStep().getName()); StepMeta to = transMeta.findStep(hi.getToStep().getName()); TransHopMeta hinew = new TransHopMeta(from, to); transMeta.addTransHop(idx, hinew); } refreshTree(); refreshGraph(); break; // // CHANGE // // We changed a step : undo this... case TransAction.TYPE_ACTION_CHANGE_STEP: // Delete the current step, insert previous version. for (int i=0;i<ta.getCurrent().length;i++) { StepMeta prev = (StepMeta)ta.getPrevious()[i]; int idx = ta.getCurrentIndex()[i]; transMeta.removeStep(idx); transMeta.addStep(idx, prev); } refreshTree(); refreshGraph(); break; // We changed a connection : undo this... case TransAction.TYPE_ACTION_CHANGE_CONNECTION: // Delete & re-insert for (int i=0;i<ta.getCurrent().length;i++) { DatabaseMeta prev = (DatabaseMeta)ta.getPrevious()[i]; int idx = ta.getCurrentIndex()[i]; transMeta.removeDatabase(idx); transMeta.addDatabase(idx, prev); } refreshTree(); refreshGraph(); break; // We changed a note : undo this... case TransAction.TYPE_ACTION_CHANGE_NOTE: // Delete & re-insert for (int i=0;i<ta.getCurrent().length;i++) { int idx = ta.getCurrentIndex()[i]; transMeta.removeNote(idx); NotePadMeta prev = (NotePadMeta)ta.getPrevious()[i]; transMeta.addNote(idx, prev); } refreshTree(); refreshGraph(); break; // We changed a hop : undo this... case TransAction.TYPE_ACTION_CHANGE_HOP: // Delete & re-insert for (int i=0;i<ta.getCurrent().length;i++) { TransHopMeta prev = (TransHopMeta)ta.getPrevious()[i]; int idx = ta.getCurrentIndex()[i]; transMeta.removeTransHop(idx); transMeta.addTransHop(idx, prev); } refreshTree(); refreshGraph(); break; // // POSITION // // The position of a step has changed: undo this... case TransAction.TYPE_ACTION_POSITION_STEP: // Find the location of the step: for (int i = 0; i < ta.getCurrentIndex().length; i++) { StepMeta stepMeta = transMeta.getStep(ta.getCurrentIndex()[i]); stepMeta.setLocation(ta.getPreviousLocation()[i]); } refreshGraph(); break; // The position of a note has changed: undo this... case TransAction.TYPE_ACTION_POSITION_NOTE: for (int i=0;i<ta.getCurrentIndex().length;i++) { int idx = ta.getCurrentIndex()[i]; NotePadMeta npi = transMeta.getNote(idx); Point prev = ta.getPreviousLocation()[i]; npi.setLocation(prev); } refreshGraph(); break; default: break; } // OK, now check if we need to do this again... if (transMeta.viewNextUndo()!=null) { if (transMeta.viewNextUndo().getNextAlso()) undoAction(); } } public void redoAction() { spoongraph.forceFocus(); TransAction ta = transMeta.nextUndo(); if (ta==null) return; setUndoMenu(); // something changed: change the menu switch(ta.getType()) { // // NEW // case TransAction.TYPE_ACTION_NEW_STEP: // re-delete the step at correct location: for (int i=0;i<ta.getCurrent().length;i++) { StepMeta stepMeta = (StepMeta)ta.getCurrent()[i]; int idx = ta.getCurrentIndex()[i]; transMeta.addStep(idx, stepMeta); refreshTree(); refreshGraph(); } break; case TransAction.TYPE_ACTION_NEW_CONNECTION: // re-insert the connection at correct location: for (int i=0;i<ta.getCurrent().length;i++) { DatabaseMeta ci = (DatabaseMeta)ta.getCurrent()[i]; int idx = ta.getCurrentIndex()[i]; transMeta.addDatabase(idx, ci); refreshTree(); refreshGraph(); } break; case TransAction.TYPE_ACTION_NEW_NOTE: // re-insert the note at correct location: for (int i=0;i<ta.getCurrent().length;i++) { NotePadMeta ni = (NotePadMeta)ta.getCurrent()[i]; int idx = ta.getCurrentIndex()[i]; transMeta.addNote(idx, ni); refreshTree(); refreshGraph(); } break; case TransAction.TYPE_ACTION_NEW_HOP: // re-insert the hop at correct location: for (int i=0;i<ta.getCurrent().length;i++) { TransHopMeta hi = (TransHopMeta)ta.getCurrent()[i]; int idx = ta.getCurrentIndex()[i]; transMeta.addTransHop(idx, hi); refreshTree(); refreshGraph(); } break; // // DELETE // case TransAction.TYPE_ACTION_DELETE_STEP: // re-remove the step at correct location: for (int i=ta.getCurrent().length-1;i>=0;i--) { int idx = ta.getCurrentIndex()[i]; transMeta.removeStep(idx); } refreshTree(); refreshGraph(); break; case TransAction.TYPE_ACTION_DELETE_CONNECTION: // re-remove the connection at correct location: for (int i=ta.getCurrent().length-1;i>=0;i--) { int idx = ta.getCurrentIndex()[i]; transMeta.removeDatabase(idx); } refreshTree(); refreshGraph(); break; case TransAction.TYPE_ACTION_DELETE_NOTE: // re-remove the note at correct location: for (int i=ta.getCurrent().length-1;i>=0;i--) { int idx = ta.getCurrentIndex()[i]; transMeta.removeNote(idx); } refreshTree(); refreshGraph(); break; case TransAction.TYPE_ACTION_DELETE_HOP: // re-remove the hop at correct location: for (int i=ta.getCurrent().length-1;i>=0;i--) { int idx = ta.getCurrentIndex()[i]; transMeta.removeTransHop(idx); } refreshTree(); refreshGraph(); break; // // CHANGE // // We changed a step : undo this... case TransAction.TYPE_ACTION_CHANGE_STEP: // Delete the current step, insert previous version. for (int i=0;i<ta.getCurrent().length;i++) { StepMeta stepMeta = (StepMeta)ta.getCurrent()[i]; int idx = ta.getCurrentIndex()[i]; transMeta.removeStep(idx); transMeta.addStep(idx, stepMeta); } refreshTree(); refreshGraph(); break; // We changed a connection : undo this... case TransAction.TYPE_ACTION_CHANGE_CONNECTION: // Delete & re-insert for (int i=0;i<ta.getCurrent().length;i++) { DatabaseMeta ci = (DatabaseMeta)ta.getCurrent()[i]; int idx = ta.getCurrentIndex()[i]; transMeta.removeDatabase(idx); transMeta.addDatabase(idx, ci); } refreshTree(); refreshGraph(); break; // We changed a note : undo this... case TransAction.TYPE_ACTION_CHANGE_NOTE: // Delete & re-insert for (int i=0;i<ta.getCurrent().length;i++) { NotePadMeta ni = (NotePadMeta)ta.getCurrent()[i]; int idx = ta.getCurrentIndex()[i]; transMeta.removeNote(idx); transMeta.addNote(idx, ni); } refreshTree(); refreshGraph(); break; // We changed a hop : undo this... case TransAction.TYPE_ACTION_CHANGE_HOP: // Delete & re-insert for (int i=0;i<ta.getCurrent().length;i++) { TransHopMeta hi = (TransHopMeta)ta.getCurrent()[i]; int idx = ta.getCurrentIndex()[i]; transMeta.removeTransHop(idx); transMeta.addTransHop(idx, hi); } refreshTree(); refreshGraph(); break; // // CHANGE POSITION // case TransAction.TYPE_ACTION_POSITION_STEP: for (int i=0;i<ta.getCurrentIndex().length;i++) { // Find & change the location of the step: StepMeta stepMeta = transMeta.getStep(ta.getCurrentIndex()[i]); stepMeta.setLocation(ta.getCurrentLocation()[i]); } refreshGraph(); break; case TransAction.TYPE_ACTION_POSITION_NOTE: for (int i=0;i<ta.getCurrentIndex().length;i++) { int idx = ta.getCurrentIndex()[i]; NotePadMeta npi = transMeta.getNote(idx); Point curr = ta.getCurrentLocation()[i]; npi.setLocation(curr); } refreshGraph(); break; default: break; } // OK, now check if we need to do this again... if (transMeta.viewNextUndo()!=null) { if (transMeta.viewNextUndo().getNextAlso()) redoAction(); } } public void setUndoMenu() { if (shell.isDisposed()) return; TransAction prev = transMeta.viewThisUndo(); TransAction next = transMeta.viewNextUndo(); if (prev!=null) { miEditUndo.setEnabled(true); miEditUndo.setText(Messages.getString("Spoon.Menu.Undo.Available", prev.toString()));//"Undo : "+prev.toString()+" \tCTRL-Z" } else { miEditUndo.setEnabled(false); miEditUndo.setText(Messages.getString("Spoon.Menu.Undo.NotAvailable"));//"Undo : not available \tCTRL-Z" } if (next!=null) { miEditRedo.setEnabled(true); miEditRedo.setText(Messages.getString("Spoon.Menu.Redo.Available",next.toString()));//"Redo : "+next.toString()+" \tCTRL-Y" } else { miEditRedo.setEnabled(false); miEditRedo.setText(Messages.getString("Spoon.Menu.Redo.NotAvailable"));//"Redo : not available \tCTRL-Y" } } public void addUndoNew(Object obj[], int position[]) { addUndoNew(obj, position, false); } public void addUndoNew(Object obj[], int position[], boolean nextAlso) { // New object? transMeta.addUndo(obj, null, position, null, null, TransMeta.TYPE_UNDO_NEW, nextAlso); setUndoMenu(); } // Undo delete object public void addUndoDelete(Object obj[], int position[]) { addUndoDelete(obj, position, false); } // Undo delete object public void addUndoDelete(Object obj[], int position[], boolean nextAlso) { transMeta.addUndo(obj, null, position, null, null, TransMeta.TYPE_UNDO_DELETE, nextAlso); setUndoMenu(); } // Change of step, connection, hop or note... public void addUndoPosition(Object obj[], int pos[], Point prev[], Point curr[]) { addUndoPosition(obj, pos, prev, curr, false); } // Change of step, connection, hop or note... public void addUndoPosition(Object obj[], int pos[], Point prev[], Point curr[], boolean nextAlso) { // It's better to store the indexes of the objects, not the objects itself! transMeta.addUndo(obj, null, pos, prev, curr, TransMeta.TYPE_UNDO_POSITION, nextAlso); setUndoMenu(); } // Change of step, connection, hop or note... public void addUndoChange(Object from[], Object to[], int[] pos) { addUndoChange(from, to, pos, false); } // Change of step, connection, hop or note... public void addUndoChange(Object from[], Object to[], int[] pos, boolean nextAlso) { transMeta.addUndo(from, to, pos, null, null, TransMeta.TYPE_UNDO_CHANGE, nextAlso); setUndoMenu(); } /** * Checks *all* the steps in the transformation, puts the result in remarks list */ public void checkTrans() { checkTrans(false); } /** * Check the steps in a transformation * * @param only_selected True: Check only the selected steps... */ public void checkTrans(boolean only_selected) { CheckTransProgressDialog ctpd = new CheckTransProgressDialog(log, props, shell, transMeta, remarks, only_selected); ctpd.open(); // manages the remarks arraylist... showLastTransCheck(); } /** * Show the remarks of the last transformation check that was run. * @see #checkTrans() */ public void showLastTransCheck() { CheckResultDialog crd = new CheckResultDialog(shell, SWT.NONE, remarks); String stepname = crd.open(); if (stepname!=null) { // Go to the indicated step! StepMeta stepMeta = transMeta.findStep(stepname); if (stepMeta!=null) { editStepInfo(stepMeta); } } } public void clearDBCache() { // Determine what menu we selected from... TreeItem ti[] = selectionTree.getSelection(); // Then call editConnection or editStep or editTrans if (ti.length==1) { String name = ti[0].getText(); TreeItem parent = ti[0].getParentItem(); if (parent != null) { String type = parent.getText(); if (type.equalsIgnoreCase(STRING_CONNECTIONS)) { transMeta.getDbCache().clear(name); } } else { if (name.equalsIgnoreCase(STRING_CONNECTIONS)) transMeta.getDbCache().clear(null); } } } public void exploreDB() { // Determine what menu we selected from... TreeItem ti[] = selectionTree.getSelection(); // Then call editConnection or editStep or editTrans if (ti.length==1) { String name = ti[0].getText(); TreeItem parent = ti[0].getParentItem(); if (parent != null) { String type = parent.getText(); if (type.equalsIgnoreCase(STRING_CONNECTIONS)) { DatabaseMeta dbinfo = transMeta.findDatabase(name); if (dbinfo!=null) { DatabaseExplorerDialog std = new DatabaseExplorerDialog(shell, props, SWT.NONE, dbinfo, transMeta.getDatabases(), true ); std.open(); } else { MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR ); mb.setMessage(Messages.getString("Spoon.Dialog.CannotFindConnection.Message"));//"Couldn't find connection, please refresh the tree (F5)!" mb.setText(Messages.getString("Spoon.Dialog.CannotFindConnection.Title"));//"Error!" mb.open(); } } } else { if (name.equalsIgnoreCase(STRING_CONNECTIONS)) transMeta.getDbCache().clear(null); } } } public void analyseImpact() { AnalyseImpactProgressDialog aipd = new AnalyseImpactProgressDialog(log, props, shell, transMeta, impact); impactHasRun = aipd.open(); if (impactHasRun) showLastImpactAnalyses(); } public void showLastImpactAnalyses() { ArrayList rows = new ArrayList(); for (int i=0;i<impact.size();i++) { DatabaseImpact ii = (DatabaseImpact)impact.get(i); rows.add(ii.getRow()); } if (rows.size()>0) { // Display all the rows... PreviewRowsDialog prd = new PreviewRowsDialog(shell, SWT.NONE, "-", rows); prd.setTitleMessage(Messages.getString("Spoon.Dialog.ImpactAnalyses.Title"), Messages.getString("Spoon.Dialog.ImpactAnalyses.Message"));//"Impact analyses" "Result of analyses:" prd.open(); } else { MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_INFORMATION ); if (impactHasRun) { mb.setMessage(Messages.getString("Spoon.Dialog.TransformationNoImpactOnDatabase.Message"));//"As far as I can tell, this transformation has no impact on any database." } else { mb.setMessage(Messages.getString("Spoon.Dialog.RunImpactAnalysesFirst.Message"));//"Please run the impact analyses first on this transformation." } mb.setText(Messages.getString("Spoon.Dialog.ImpactAnalyses.Title"));//Impact mb.open(); } } /** * Get & show the SQL required to run the loaded transformation... * */ public void getSQL() { GetSQLProgressDialog pspd = new GetSQLProgressDialog(log, props, shell, transMeta); ArrayList stats = pspd.open(); if (stats!=null) // null means error, but we already displayed the error { if (stats.size()>0) { SQLStatementsDialog ssd = new SQLStatementsDialog(shell, SWT.NONE, stats); ssd.open(); } else { MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_INFORMATION ); mb.setMessage(Messages.getString("Spoon.Dialog.NoSQLNeedEexecuted.Message"));//As far as I can tell, no SQL statements need to be executed before this transformation can run. mb.setText(Messages.getString("Spoon.Dialog.NoSQLNeedEexecuted.Title"));//"SQL" mb.open(); } } } public void toClipboard(String cliptext) { props.toClipboard(cliptext); } public String fromClipboard() { return props.fromClipboard(); } /** * Paste transformation from the clipboard... * */ public void pasteTransformation() { log.logDetailed(toString(), Messages.getString("Spoon.Log.PasteTransformationFromClipboard"));//"Paste transformation from the clipboard!" if (showChangedWarning()) { String xml = fromClipboard(); try { Document doc = XMLHandler.loadXMLString(xml); transMeta = new TransMeta(XMLHandler.getSubNode(doc, "transformation")); refreshGraph(); refreshTree(true); } catch(KettleException e) { new ErrorDialog(shell, props, Messages.getString("Spoon.Dialog.ErrorPastingTransformation.Title"), Messages.getString("Spoon.Dialog.ErrorPastingTransformation.Message"), e);//Error pasting transformation "An error occurred pasting a transformation from the clipboard" } } } public void copyTransformation() { toClipboard(XMLHandler.getXMLHeader()+transMeta.getXML()); } public void copyTransformationImage() { Clipboard clipboard = props.getNewClipboard(); Point area = transMeta.getMaximum(); Image image = spoongraph.getTransformationImage(Display.getCurrent(), area.x, area.y); clipboard.setContents(new Object[] { image.getImageData() }, new Transfer[]{ImageDataTransfer.getInstance()}); /** System.out.println("image obtained: "+area.x+"x"+area.y); ShowImageDialog sid = new ShowImageDialog(shell, image); sid.open(); */ } /** * Shows a wizard that creates a new database connection... * */ private void createDatabaseWizard() { CreateDatabaseWizard cdw=new CreateDatabaseWizard(); DatabaseMeta newDBInfo=cdw.createAndRunDatabaseWizard(shell, props, transMeta.getDatabases()); if(newDBInfo!=null){ //finished transMeta.addDatabase(newDBInfo); refreshTree(true); refreshGraph(); } } /** * Create a transformation that extracts tables & data from a database.<p><p> * * 0) Select the database to rip<p> * 1) Select the table in the database to copy<p> * 2) Select the database to dump to<p> * 3) Select the repository directory in which it will end up<p> * 4) Select a name for the new transformation<p> * 6) Create 1 transformation for the selected table<p> */ private void copyTableWizard() { if (showChangedWarning()) { final CopyTableWizardPage1 page1 = new CopyTableWizardPage1("1", transMeta.getDatabases()); page1.createControl(shell); final CopyTableWizardPage2 page2 = new CopyTableWizardPage2("2"); page2.createControl(shell); final CopyTableWizardPage3 page3 = new CopyTableWizardPage3 ("3", rep); page3.createControl(shell); Wizard wizard = new Wizard() { public boolean performFinish() { return copyTable(page3.getTransformationName(), page3.getDirectory(), page1.getSourceDatabase(), page1.getTargetDatabase(), page2.getSelection() ); } /** * @see org.eclipse.jface.wizard.Wizard#canFinish() */ public boolean canFinish() { return page3.canFinish(); } }; wizard.addPage(page1); wizard.addPage(page2); wizard.addPage(page3); WizardDialog wd = new WizardDialog(shell, wizard); wd.setMinimumPageSize(700,400); wd.open(); } } public boolean copyTable( String transname, RepositoryDirectory repdir, DatabaseMeta sourceDBInfo, DatabaseMeta targetDBInfo, String tablename ) { try { // // Create a new transformation... // TransMeta ti = new TransMeta(); ti.setName(transname); ti.setDirectory(repdir); ti.setDatabases(transMeta.getDatabases()); // // Add a note // String note = Messages.getString("Spoon.Message.Note.ReadInformationFromTableOnDB",tablename,sourceDBInfo.getDatabaseName() )+Const.CR;//"Reads information from table ["+tablename+"] on database ["+sourceDBInfo+"]" note+=Messages.getString("Spoon.Message.Note.WriteInformationToTableOnDB",tablename,targetDBInfo.getDatabaseName() );//"After that, it writes the information to table ["+tablename+"] on database ["+targetDBInfo+"]" NotePadMeta ni = new NotePadMeta(note, 150, 10, -1, -1); ti.addNote(ni); // // create the source step... // String fromstepname = Messages.getString("Spoon.Message.Note.ReadFromTable",tablename); //"read from ["+tablename+"]"; TableInputMeta tii = new TableInputMeta(); tii.setDatabaseMeta(sourceDBInfo); tii.setSQL("SELECT * FROM "+tablename); StepLoader steploader = StepLoader.getInstance(); String fromstepid = steploader.getStepPluginID(tii); StepMeta fromstep = new StepMeta(log, fromstepid, fromstepname, (StepMetaInterface)tii ); fromstep.setLocation(150,100); fromstep.setDraw(true); fromstep.setDescription(Messages.getString("Spoon.Message.Note.ReadInformationFromTableOnDB",tablename,sourceDBInfo.getDatabaseName() )); ti.addStep(fromstep); // // add logic to rename fields in case any of the field names contain reserved words... // Use metadata logic in SelectValues, use SelectValueInfo... // Database sourceDB = new Database(sourceDBInfo); sourceDB.connect(); // Get the fields for the input table... Row fields = sourceDB.getTableFields(tablename); // See if we need to deal with reserved words... int nrReserved = targetDBInfo.getNrReservedWords(fields); if (nrReserved>0) { SelectValuesMeta svi = new SelectValuesMeta(); svi.allocate(0,0,nrReserved); int nr = 0; for (int i=0;i<fields.size();i++) { Value v = fields.getValue(i); if (targetDBInfo.isReservedWord( v.getName() ) ) { svi.getMetaName()[nr] = v.getName(); svi.getMetaRename()[nr] = targetDBInfo.quoteField( v.getName() ); nr++; } } String selstepname =Messages.getString("Spoon.Message.Note.HandleReservedWords"); //"Handle reserved words"; String selstepid = steploader.getStepPluginID(svi); StepMeta selstep = new StepMeta(log, selstepid, selstepname, (StepMetaInterface)svi ); selstep.setLocation(350,100); selstep.setDraw(true); selstep.setDescription(Messages.getString("Spoon.Message.Note.RenamesReservedWords",targetDBInfo.getDatabaseTypeDesc()) );//"Renames reserved words for "+targetDBInfo.getDatabaseTypeDesc() ti.addStep(selstep); TransHopMeta shi = new TransHopMeta(fromstep, selstep); ti.addTransHop(shi); fromstep = selstep; } // // Create the target step... // // // Add the TableOutputMeta step... // String tostepname = Messages.getString("Spoon.Message.Note.WriteToTable",tablename); // "write to ["+tablename+"]"; TableOutputMeta toi = new TableOutputMeta(); toi.setDatabase( targetDBInfo ); toi.setTablename( tablename ); toi.setCommitSize( 200 ); toi.setTruncateTable( true ); String tostepid = steploader.getStepPluginID(toi); StepMeta tostep = new StepMeta(log, tostepid, tostepname, (StepMetaInterface)toi ); tostep.setLocation(550,100); tostep.setDraw(true); tostep.setDescription(Messages.getString("Spoon.Message.Note.WriteInformationToTableOnDB2",tablename,targetDBInfo.getDatabaseName() ));//"Write information to table ["+tablename+"] on database ["+targetDBInfo+"]" ti.addStep(tostep); // // Add a hop between the two steps... // TransHopMeta hi = new TransHopMeta(fromstep, tostep); ti.addTransHop(hi); // OK, if we're still here: overwrite the current transformation... transMeta = ti; refreshGraph(); refreshTree(true); } catch(Exception e) { new ErrorDialog(shell, props, Messages.getString("Spoon.Dialog.UnexpectedError.Title"), Messages.getString("Spoon.Dialog.UnexpectedError.Message"), new KettleException(e.getMessage(), e));//"Unexpected error" "An unexpected error occurred creating the new transformation" return false; } return true; } public String toString() { return APP_NAME; } /** * This is the main procedure for Spoon. * * @param a Arguments are available in the "Get System Info" step. */ public static void main (String [] a) throws KettleException { EnvUtil.environmentInit(); ArrayList args = new ArrayList(); for (int i=0;i<a.length;i++) args.add(a[i]); Display display = new Display(); Splash splash = new Splash(display); StringBuffer optionRepname, optionUsername, optionPassword, optionTransname, optionFilename, optionDirname, optionLogfile, optionLoglevel; CommandLineOption options[] = new CommandLineOption[] { new CommandLineOption("rep", "Repository name", optionRepname=new StringBuffer()), new CommandLineOption("user", "Repository username", optionUsername=new StringBuffer()), new CommandLineOption("pass", "Repository password", optionPassword=new StringBuffer()), new CommandLineOption("trans", "The name of the transformation to launch", optionTransname=new StringBuffer()), new CommandLineOption("dir", "The directory (don't forget the leading /)", optionDirname=new StringBuffer()), new CommandLineOption("file", "The filename (Transformation in XML) to launch", optionFilename=new StringBuffer()), new CommandLineOption("level", "The logging level (Basic, Detailed, Debug, Rowlevel, Error, Nothing)", optionLoglevel=new StringBuffer()), new CommandLineOption("logfile", "The logging file to write to", optionLogfile=new StringBuffer()), new CommandLineOption("log", "The logging file to write to (deprecated)", optionLogfile=new StringBuffer(), false, true), }; // Parse the options... CommandLineOption.parseArguments(args, options); String kettleRepname = Const.getEnvironmentVariable("KETTLE_REPOSITORY", null); String kettleUsername = Const.getEnvironmentVariable("KETTLE_USER", null); String kettlePassword = Const.getEnvironmentVariable("KETTLE_PASSWORD", null); if (!Const.isEmpty(kettleRepname )) optionRepname = new StringBuffer(kettleRepname); if (!Const.isEmpty(kettleUsername)) optionUsername = new StringBuffer(kettleUsername); if (!Const.isEmpty(kettlePassword)) optionPassword = new StringBuffer(kettlePassword); // Before anything else, check the runtime version!!! String version = Const.JAVA_VERSION; if ("1.4".compareToIgnoreCase(version)>0) { System.out.println("The System is running on Java version "+version); System.out.println("Unfortunately, it needs version 1.4 or higher to run."); return; } // Set default Locale: Locale.setDefault(Const.DEFAULT_LOCALE); LogWriter log; if (Const.isEmpty(optionLogfile)) { log=LogWriter.getInstance(Const.SPOON_LOG_FILE, false, LogWriter.LOG_LEVEL_BASIC); } else { log=LogWriter.getInstance( optionLogfile.toString(), true, LogWriter.LOG_LEVEL_BASIC ); } if (log.getRealFilename()!=null) log.logBasic(APP_NAME, Messages.getString("Spoon.Log.LoggingToFile")+log.getRealFilename());//"Logging goes to " if (!Const.isEmpty(optionLoglevel)) { log.setLogLevel(optionLoglevel.toString()); log.logBasic(APP_NAME, Messages.getString("Spoon.Log.LoggingAtLevel")+log.getLogLevelDesc());//"Logging is at level : " } /* Load the plugins etc.*/ StepLoader stloader = StepLoader.getInstance(); if (!stloader.read()) { log.logError(APP_NAME, Messages.getString("Spoon.Log.ErrorLoadingAndHaltSystem"));//Error loading steps & plugins... halting Spoon! return; } /* Load the plugins etc. we need to load jobentry*/ JobEntryLoader jeloader = JobEntryLoader.getInstance(); if (!jeloader.read()) { log.logError("Spoon", "Error loading job entries & plugins... halting Kitchen!"); return; } final Spoon win = new Spoon(log, display, null); win.setDestroy(true); win.setArguments((String[])args.toArray(new String[args.size()])); log.logBasic(APP_NAME, Messages.getString("Spoon.Log.MainWindowCreated"));//Main window is created. RepositoryMeta repinfo = null; UserInfo userinfo = null; if (Const.isEmpty(optionRepname) && Const.isEmpty(optionFilename) && win.props.showRepositoriesDialogAtStartup()) { log.logBasic(APP_NAME, Messages.getString("Spoon.Log.AskingForRepository"));//"Asking for repository" int perms[] = new int[] { PermissionMeta.TYPE_PERMISSION_TRANSFORMATION }; splash.hide(); RepositoriesDialog rd = new RepositoriesDialog(win.disp, SWT.NONE, perms, Messages.getString("Spoon.Application.Name"));//"Spoon" if (rd.open()) { repinfo = rd.getRepository(); userinfo = rd.getUser(); if (!userinfo.useTransformations()) { MessageBox mb = new MessageBox(win.shell, SWT.OK | SWT.ICON_ERROR ); mb.setMessage(Messages.getString("Spoon.Dialog.RepositoryUserCannotWork.Message"));//"Sorry, this repository user can't work with transformations from the repository." mb.setText(Messages.getString("Spoon.Dialog.RepositoryUserCannotWork.Title"));//"Error!" mb.open(); userinfo = null; repinfo = null; } } else { // Exit point: user pressed CANCEL! if (rd.isCancelled()) { splash.dispose(); win.quitFile(); return; } } } try { // Read kettle transformation specified on command-line? if (!Const.isEmpty(optionRepname) || !Const.isEmpty(optionFilename)) { if (!Const.isEmpty(optionRepname)) { RepositoriesMeta repsinfo = new RepositoriesMeta(log); if (repsinfo.readData()) { repinfo = repsinfo.findRepository(optionRepname.toString()); if (repinfo!=null) { // Define and connect to the repository... win.rep = new Repository(log, repinfo, userinfo); if (win.rep.connect(Messages.getString("Spoon.Application.Name")))//"Spoon" { if (Const.isEmpty(optionDirname)) optionDirname=new StringBuffer(RepositoryDirectory.DIRECTORY_SEPARATOR); // Check username, password win.rep.userinfo = new UserInfo(win.rep, optionUsername.toString(), optionPassword.toString()); if (win.rep.userinfo.getID()>0) { RepositoryDirectory repdir = win.rep.getDirectoryTree().findDirectory(optionDirname.toString()); if (repdir!=null) { win.transMeta = new TransMeta(win.rep, optionTransname.toString(), repdir); win.setFilename(optionRepname.toString()); win.transMeta.clearChanged(); } else { log.logError(APP_NAME, Messages.getString("Spoon.Log.UnableFindDirectory",optionDirname.toString()));//"Can't find directory ["+dirname+"] in the repository." } } else { log.logError(APP_NAME, Messages.getString("Spoon.Log.UnableVerifyUser"));//"Can't verify username and password." win.rep.disconnect(); win.rep=null; } } else { log.logError(APP_NAME, Messages.getString("Spoon.Log.UnableConnectToRepository"));//"Can't connect to the repository." } } else { log.logError(APP_NAME, Messages.getString("Spoon.Log.NoRepositoryRrovided"));//"No repository provided, can't load transformation." } } else { log.logError(APP_NAME, Messages.getString("Spoon.Log.NoRepositoriesDefined"));//"No repositories defined on this system." } } else if (!Const.isEmpty(optionFilename)) { win.transMeta = new TransMeta(optionFilename.toString()); win.setFilename(optionFilename.toString()); win.transMeta.clearChanged(); } } else // Normal operations, nothing on the commandline... { // Can we connect to the repository? if (repinfo!=null && userinfo!=null) { win.rep = new Repository(log, repinfo, userinfo); if (!win.rep.connect(Messages.getString("Spoon.Application.Name"))) //"Spoon" { win.rep = null; } } if (win.props.openLastFile()) { log.logDetailed(APP_NAME, Messages.getString("Spoon.Log.TryingOpenLastUsedFile"));//"Trying to open the last file used." String lastfiles[] = win.props.getLastFiles(); String lastdirs[] = win.props.getLastDirs(); boolean lasttypes[] = win.props.getLastTypes(); String lastrepos[] = win.props.getLastRepositories(); if (lastfiles.length>0) { boolean use_repository = repinfo!=null; // Perhaps we need to connect to the repository? if (lasttypes[0]) { if (lastrepos[0]!=null && lastrepos[0].length()>0) { if (use_repository && !lastrepos[0].equalsIgnoreCase(repinfo.getName())) { // We just asked... use_repository = false; } } } if (use_repository || !lasttypes[0]) { if (win.rep!=null) // load from repository... { if (win.rep.getName().equalsIgnoreCase(lastrepos[0])) { RepositoryDirectory repdir = win.rep.getDirectoryTree().findDirectory(lastdirs[0]); if (repdir!=null) { log.logDetailed(APP_NAME, Messages.getString("Spoon.Log.AutoLoadingTransformation",lastfiles[0],lastdirs[0]));//"Auto loading transformation ["+lastfiles[0]+"] from repository directory ["+lastdirs[0]+"]" TransLoadProgressDialog tlpd = new TransLoadProgressDialog(win.shell, win.rep, lastfiles[0], repdir); TransMeta transInfo = tlpd.open(); // = new TransInfo(log, win.rep, lastfiles[0], repdir); if (transInfo != null) { win.transMeta = transInfo; win.setFilename(lastfiles[0]); } } } } else // Load from XML? { win.transMeta = new TransMeta(lastfiles[0]); win.setFilename(lastfiles[0]); } } win.transMeta.clearChanged(); } } } } catch(KettleException ke) { log.logError(APP_NAME, Messages.getString("Spoon.Log.ErrorOccurred")+Const.CR+ke.getMessage());//"An error occurred: " win.rep=null; // ke.printStackTrace(); } win.open (); splash.dispose(); try { while (!win.isDisposed ()) { if (!win.readAndDispatch ()) win.sleep (); } } catch(Throwable e) { log.logError(APP_NAME, Messages.getString("Spoon.Log.UnexpectedErrorOccurred")+Const.CR+e.getMessage());//"An unexpected error occurred in Spoon: probable cause: please close all windows before stopping Spoon! " e.printStackTrace(); } win.dispose(); log.logBasic(APP_NAME, APP_NAME+" "+Messages.getString("Spoon.Log.AppHasEnded"));//" has ended." // Close the logfile log.close(); // Kill all remaining things in this VM! System.exit(0); } /** * @return Returns the transMeta. */ public TransMeta getTransMeta() { return transMeta; } /** * @param transMeta The transMeta to set. */ public void setTransMeta(TransMeta transMeta) { this.transMeta = transMeta; } /** * Create a new SelectValues step in between this step and the previous. * If the previous fields are not there, no mapping can be made, same with the required fields. * @param stepMeta The target step to map against. */ public void generateMapping(StepMeta stepMeta) { try { if (stepMeta!=null) { StepMetaInterface smi = stepMeta.getStepMetaInterface(); Row targetFields = smi.getRequiredFields(); Row sourceFields = transMeta.getPrevStepFields(stepMeta); // Build the mapping: let the user decide!! String[] source = sourceFields.getFieldNames(); for (int i=0;i<source.length;i++) { Value v = sourceFields.getValue(i); source[i]+=EnterMappingDialog.STRING_ORIGIN_SEPARATOR+v.getOrigin()+")"; } String[] target = targetFields.getFieldNames(); EnterMappingDialog dialog = new EnterMappingDialog(shell, source, target); ArrayList mappings = dialog.open(); if (mappings!=null) { // OK, so we now know which field maps where. // This allows us to generate the mapping using a SelectValues Step... SelectValuesMeta svm = new SelectValuesMeta(); svm.allocate(mappings.size(), 0, 0); for (int i=0;i<mappings.size();i++) { SourceToTargetMapping mapping = (SourceToTargetMapping) mappings.get(i); svm.getSelectName()[i] = sourceFields.getValue(mapping.getSourcePosition()).getName(); svm.getSelectRename()[i] = target[mapping.getTargetPosition()]; svm.getSelectLength()[i] = -1; svm.getSelectPrecision()[i] = -1; } // Now that we have the meta-data, create a new step info object String stepName = stepMeta.getName()+" Mapping"; stepName = transMeta.getAlternativeStepname(stepName); // if it's already there, rename it. StepMeta newStep = new StepMeta(log, "SelectValues", stepName, svm); newStep.setLocation(stepMeta.getLocation().x+20, stepMeta.getLocation().y+20); newStep.setDraw(true); transMeta.addStep(newStep); addUndoNew(new StepMeta[] { newStep }, new int[] { transMeta.indexOfStep(newStep) }); // Redraw stuff... refreshTree(); refreshGraph(); } } else { System.out.println("No target to do mapping against!"); } } catch(KettleException e) { new ErrorDialog(shell, Props.getInstance(), "Error creating mapping", "There was an error when Kettle tried to generate a mapping against the target step", e); } } }
Fix for #2925 - added ClearDBCacheComplete git-svn-id: 9499f031eb5c9fb9d11553a06c92651e5446d292@1507 5fb7f6ec-07c1-534a-b4ca-9155e429e800
src/be/ibridge/kettle/spoon/Spoon.java
Fix for #2925 - added ClearDBCacheComplete
Java
apache-2.0
573b965b3590fb35acfa5f89b645f22a227c207b
0
CesarPantoja/jena,adrapereira/jena,jianglili007/jena,jianglili007/jena,atsolakid/jena,apache/jena,atsolakid/jena,kidaa/jena,atsolakid/jena,apache/jena,atsolakid/jena,tr3vr/jena,samaitra/jena,CesarPantoja/jena,atsolakid/jena,tr3vr/jena,CesarPantoja/jena,samaitra/jena,adrapereira/jena,kidaa/jena,atsolakid/jena,jianglili007/jena,adrapereira/jena,tr3vr/jena,adrapereira/jena,samaitra/jena,samaitra/jena,kamir/jena,apache/jena,kamir/jena,CesarPantoja/jena,CesarPantoja/jena,CesarPantoja/jena,kamir/jena,samaitra/jena,apache/jena,kamir/jena,jianglili007/jena,jianglili007/jena,apache/jena,adrapereira/jena,tr3vr/jena,kidaa/jena,tr3vr/jena,tr3vr/jena,kidaa/jena,apache/jena,kamir/jena,samaitra/jena,adrapereira/jena,apache/jena,atsolakid/jena,adrapereira/jena,kamir/jena,kidaa/jena,tr3vr/jena,CesarPantoja/jena,jianglili007/jena,kidaa/jena,samaitra/jena,kamir/jena,kidaa/jena,jianglili007/jena,apache/jena
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hp.hpl.jena.sparql.modify; import static com.hp.hpl.jena.sparql.modify.TemplateLib.template ; import java.util.Iterator ; import java.util.List ; import org.openjena.atlas.data.BagFactory ; import org.openjena.atlas.data.DataBag ; import org.openjena.atlas.data.ThresholdPolicy ; import org.openjena.atlas.data.ThresholdPolicyFactory ; import org.openjena.atlas.iterator.Iter ; import org.openjena.riot.SerializationFactoryFinder ; import com.hp.hpl.jena.graph.Graph ; import com.hp.hpl.jena.graph.Node ; import com.hp.hpl.jena.graph.Triple ; import com.hp.hpl.jena.query.Query ; import com.hp.hpl.jena.query.QueryExecutionFactory ; import com.hp.hpl.jena.rdf.model.Model ; import com.hp.hpl.jena.sparql.ARQInternalErrorException ; import com.hp.hpl.jena.sparql.core.DatasetGraph ; import com.hp.hpl.jena.sparql.core.DatasetGraphMap ; import com.hp.hpl.jena.sparql.core.DatasetGraphWrapper ; import com.hp.hpl.jena.sparql.core.Quad ; import com.hp.hpl.jena.sparql.engine.Plan ; import com.hp.hpl.jena.sparql.engine.binding.Binding ; import com.hp.hpl.jena.sparql.engine.binding.BindingRoot ; import com.hp.hpl.jena.sparql.graph.GraphFactory ; import com.hp.hpl.jena.sparql.graph.NodeTransform ; import com.hp.hpl.jena.sparql.graph.NodeTransformLib ; import com.hp.hpl.jena.sparql.modify.request.* ; import com.hp.hpl.jena.sparql.syntax.Element ; import com.hp.hpl.jena.sparql.syntax.ElementGroup ; import com.hp.hpl.jena.sparql.syntax.ElementNamedGraph ; import com.hp.hpl.jena.sparql.syntax.ElementTriplesBlock ; import com.hp.hpl.jena.sparql.util.Context ; import com.hp.hpl.jena.update.GraphStore ; import com.hp.hpl.jena.update.UpdateException ; import com.hp.hpl.jena.util.FileManager ; /** Implementation of general purpose update request execution */ public class UpdateEngineWorker implements UpdateVisitor { protected final GraphStore graphStore ; protected final Binding initialBinding ; protected final boolean alwaysSilent = true ; protected final Context context ; public UpdateEngineWorker(GraphStore graphStore, Binding initialBinding, Context context) { this.graphStore = graphStore ; this.initialBinding = initialBinding ; this.context = context ; } @Override public void visit(UpdateDrop update) { execDropClear(update, false) ; } @Override public void visit(UpdateClear update) { execDropClear(update, true) ; } // ReDo with gs* primitives protected void execDropClear(UpdateDropClear update, boolean isClear) { if ( update.isAll() ) { execDropClear(update, null, true) ; // Always clear. execDropClearAllNamed(update, isClear) ; } else if ( update.isAllNamed() ) execDropClearAllNamed(update, isClear) ; else if ( update.isDefault() ) execDropClear(update, null, true) ; else if ( update.isOneGraph() ) execDropClear(update, update.getGraph(), isClear) ; else throw new ARQInternalErrorException("Target is undefined: "+update.getTarget()) ; } protected void execDropClear(UpdateDropClear update, Node g, boolean isClear) { if ( ! alwaysSilent ) { if ( g != null && ! graphStore.containsGraph(g) && ! update.isSilent()) error("No such graph: "+g) ; } if ( isClear ) { if ( g == null || graphStore.containsGraph(g) ) graph(graphStore, g).getBulkUpdateHandler().removeAll() ; } else graphStore.removeGraph(g) ; } protected void execDropClearAllNamed(UpdateDropClear update, boolean isClear) { // Avoid ConcurrentModificationException List<Node> list = Iter.toList(graphStore.listGraphNodes()) ; for ( Node gn : list ) execDropClear(update, gn, isClear) ; } @Override public void visit(UpdateCreate update) { Node g = update.getGraph() ; if ( g == null ) return ; if ( graphStore.containsGraph(g) ) { if ( ! alwaysSilent && ! update.isSilent() ) error("Graph store already contains graph : "+g) ; return ; } // In-memory specific graphStore.addGraph(g, GraphFactory.createDefaultGraph()) ; } @Override public void visit(UpdateLoad update) { String source = update.getSource() ; Node dest = update.getDest() ; try { // // Experimental ; quads reading. Needs redoing. No conneg. // if ( dest == null ) // { // // Quads? // Lang guess = Lang.guess(source, Lang.NTRIPLES) ; // if ( guess.isQuads() ) // { // RiotLoader.read(source, graphStore, guess) ; // return ; // } // } // Read into temporary model to protect against parse errors. Model model = FileManager.get().loadModel(source) ; Graph g = graph(graphStore, dest) ; g.getBulkUpdateHandler().add(model.getGraph()) ; } catch (RuntimeException ex) { if ( ! update.getSilent() ) throw ex ; } } @Override public void visit(UpdateAdd update) { if ( ! validBinaryGraphOp(update) ) return ; if ( update.getSrc().equals(update.getDest()) ) return ; // ADD (DEFAULT or GRAPH) TO (DEFAULT or GRAPH) // Different source and destination. gsCopyTriples(graphStore, update.getSrc(), update.getDest()) ; } @Override public void visit(UpdateCopy update) { if ( ! validBinaryGraphOp(update) ) return ; if ( update.getSrc().equals(update.getDest()) ) return ; // COPY (DEFAULT or GRAPH) TO (DEFAULT or GRAPH) gsCopy(graphStore, update.getSrc(), update.getDest(), update.getSilent()) ; } @Override public void visit(UpdateMove update) { if ( ! validBinaryGraphOp(update) ) return ; if ( update.getSrc().equals(update.getDest()) ) return ; // MOVE (DEFAULT or GRAPH) TO (DEFAULT or GRAPH) // Difefrent source and destination. gsCopy(graphStore, update.getSrc(), update.getDest(), update.getSilent()) ; gsDrop(graphStore, update.getSrc(), true) ; } private boolean validBinaryGraphOp(UpdateBinaryOp update) { if ( update.getSrc().isDefault() ) return true ; if ( update.getSrc().isOneNamedGraph() ) { Node gn = update.getSrc().getGraph() ; if ( ! graphStore.containsGraph(gn) ) { if ( ! update.getSilent() ) error("No such graph: "+gn) ; return false ; } return true ; } error("Invalid source target for oepration; "+update.getSrc()) ; return false ; } // ---- // Core operations protected static void gsCopy(GraphStore gStore, Target src, Target dest, boolean isSilent) { if ( dest.equals(src) ) return ; gsClear(gStore, dest, true) ; gsCopyTriples(gStore, src, dest) ; } protected static void gsCopyTriples(GraphStore gStore, Target src, Target dest) { Graph gSrc = graph(gStore, src) ; Graph gDest = graph(gStore, dest) ; // Avoids concurrency problems by reading fully before writing ThresholdPolicy<Triple> policy = ThresholdPolicyFactory.policyFromContext(gStore.getContext()); DataBag<Triple> db = BagFactory.newDefaultBag(policy, SerializationFactoryFinder.tripleSerializationFactory()) ; try { Iterator<Triple> triples = gSrc.find(null, null, null) ; db.addAll(triples) ; Iter.close(triples) ; Iterator<Triple> it = db.iterator() ; gDest.getBulkUpdateHandler().add(it) ; Iter.close(it); } finally { db.close() ; } } protected static void gsClear(GraphStore gStore, Target target, boolean isSilent) { // No create - we tested earlier. Graph g = graph(gStore, target) ; g.getBulkUpdateHandler().removeAll() ; } protected static void gsDrop(GraphStore gStore, Target target, boolean isSilent) { if ( target.isDefault() ) gStore.getDefaultGraph().getBulkUpdateHandler().removeAll() ; else gStore.removeGraph(target.getGraph()) ; } // ---- @Override public void visit(UpdateDataInsert update) { for ( Quad quad : update.getQuads() ) addToGraphStore(graphStore, quad) ; } @Override public void visit(UpdateDataDelete update) { for ( Quad quad : update.getQuads() ) deleteFromGraphStore(graphStore, quad) ; } @Override public void visit(UpdateDeleteWhere update) { List<Quad> quads = update.getQuads() ; // Convert bNodes to named variables first. // if ( false ) // // Removed from SPARQL // quads = convertBNodesToVariables(quads) ; // Convert quads to a pattern. Element el = elementFromQuads(quads) ; // Decided to serialize the bindings, but could also have decided to // serialize the quads after applying the template instead. ThresholdPolicy<Binding> policy = ThresholdPolicyFactory.policyFromContext(graphStore.getContext()); DataBag<Binding> db = BagFactory.newDefaultBag(policy, SerializationFactoryFinder.bindingSerializationFactory()) ; try { Iterator<Binding> bindings = evalBindings(el, null) ; db.addAll(bindings) ; Iter.close(bindings) ; Iterator<Binding> it = db.iterator() ; execDelete(quads, null, it) ; Iter.close(it) ; } finally { db.close() ; } } @Override public void visit(UpdateModify update) { Node withGraph = update.getWithIRI() ; Query query = elementToQuery(update.getWherePattern()) ; // USING/USING NAMED DatasetGraph dsg = processUsing(update) ; // USING overrides WITH if ( dsg == null && withGraph != null ) dsg = processWith(update) ; if ( dsg == null ) dsg = graphStore ; ThresholdPolicy<Binding> policy = ThresholdPolicyFactory.policyFromContext(graphStore.getContext()); DataBag<Binding> db = BagFactory.newDefaultBag(policy, SerializationFactoryFinder.bindingSerializationFactory()) ; try { Iterator<Binding> bindings = evalBindings(query, dsg, initialBinding, context) ; db.addAll(bindings) ; Iter.close(bindings) ; Iterator<Binding> it = db.iterator() ; execDelete(update.getDeleteQuads(), withGraph, it) ; Iter.close(it) ; Iterator<Binding> it2 = db.iterator() ; execInsert(update.getInsertQuads(), withGraph, it2) ; Iter.close(it2) ; } finally { db.close() ; } } // Indirection for subsystems to support USING/USING NAMED. protected DatasetGraph processUsing(UpdateModify update) { if ( update.getUsing().size() == 0 && update.getUsingNamed().size() == 0 ) return null ; Graph dftGraph = GraphFactory.createGraphMem() ; DatasetGraphMap dsg = new DatasetGraphMap(dftGraph) ; if ( update.getUsing().size() > 0 ) { if ( update.getUsing().size() > 1 ) { for ( Node gn : update.getUsing() ) { Graph g2 = graphOrDummy(graphStore, gn) ; dftGraph.getBulkUpdateHandler().add(g2) ; } } else { Node gn = update.getUsing().get(0) ; Graph g = graphOrDummy(graphStore, gn) ; dsg.setDefaultGraph(g) ; } } if ( update.getUsingNamed().size() > 0 ) { // Replace with a no named graphs version. dsg = new DatasetGraphMap(dsg.getDefaultGraph()) ; for ( Node gn : update.getUsingNamed() ) dsg.addGraph(gn, graphOrDummy(graphStore, gn)) ; } return dsg ; } protected DatasetGraph processWith(UpdateModify update) { Node withGraph = update.getWithIRI() ; if ( withGraph == null ) return null ; Graph g = graphOrDummy(graphStore, withGraph) ; DatasetGraph dsg = new DatasetGraphAltDefaultGraph(graphStore, g) ; return dsg ; } private Graph graphOrDummy(DatasetGraph dsg, Node gn) { Graph g = graph(graphStore, gn) ; if ( g == null ) g = GraphFactory.createGraphMem() ; return g ; } protected static List<Quad> unused_convertBNodesToVariables(List<Quad> quads) { NodeTransform bnodesToVariables = new NodeTransformBNodesToVariables() ; return NodeTransformLib.transformQuads(bnodesToVariables, quads) ; } protected Element elementFromQuads(List<Quad> quads) { ElementGroup el = new ElementGroup() ; ElementTriplesBlock x = new ElementTriplesBlock() ; // Maybe empty?? el.addElement(x) ; Node g = Quad.defaultGraphNodeGenerated ; for ( Quad q : quads ) { if ( q.getGraph() != g ) { g = q.getGraph() ; x = new ElementTriplesBlock() ; if ( g == null || g == Quad.defaultGraphNodeGenerated ) el.addElement(x) ; else { ElementNamedGraph eng = new ElementNamedGraph(g, x) ; el.addElement(eng) ; } } x.addTriple(q.asTriple()) ; } return el ; } protected void execDelete(List<Quad> quads, Node dftGraph, Iterator<Binding> bindings) { Iterator<Quad> it = template(quads, dftGraph, bindings) ; if ( it == null ) return ; while (it.hasNext()) { Quad q = it.next(); graphStore.delete(q); } // Alternate implementation that can use the graph BulkUpdateHandler, but forces all quads into // memory (we don't want that!). The issue is that all of the quads can be mixed up based on the // user supplied template. If graph stores can benefit from bulk insert/delete operations, then we // need to expose a bulk update interface on GraphStore, not just Graph. // MultiMap<Node, Triple> acc = MultiMap.createMapList() ; // while (it.hasNext()) // { // Quad q = it.next(); // acc.put(q.getGraph(), q.asTriple()) ; // } // for ( Node gn : acc.keys() ) // { // Collection<Triple> triples = acc.get(gn) ; // graph(graphStore, gn).getBulkUpdateHandler().delete(triples.iterator()) ; // } } protected void execInsert(List<Quad> quads, Node dftGraph, Iterator<Binding> bindings) { Iterator<Quad> it = template(quads, dftGraph, bindings) ; if ( it == null ) return ; while (it.hasNext()) { Quad q = it.next(); addToGraphStore(graphStore, q); } } // Catch all individual adds of quads (and deletes - mainly for symmetry). private static void addToGraphStore(GraphStore graphStore, Quad quad) { // Check legal triple. if ( quad.isLegalAsData() ) graphStore.add(quad); // Else drop. //Log.warn(UpdateEngineWorker.class, "Bad quad as data: "+quad) ; } private static void deleteFromGraphStore(GraphStore graphStore, Quad quad) { graphStore.delete(quad) ; } protected Query elementToQuery(Element pattern) { if ( pattern == null ) return null ; Query query = new Query() ; query.setQueryPattern(pattern) ; query.setQuerySelectType() ; query.setQueryResultStar(true) ; query.setResultVars() ; return query ; } static class DatasetGraphAltDefaultGraph extends DatasetGraphWrapper { private Graph dftGraph ; public DatasetGraphAltDefaultGraph(DatasetGraph dsg, Graph dftGraph) { super(dsg) ; setDefaultGraph(dftGraph) ; } @Override public Graph getDefaultGraph() { return dftGraph; } @Override public void setDefaultGraph(Graph g) { dftGraph = g ; } } protected Iterator<Binding> evalBindings(Element pattern, Node dftGraph) { return evalBindings(elementToQuery(pattern), dftGraph) ; } protected Iterator<Binding> evalBindings(Query query, Node dftGraph) { DatasetGraph dsg = graphStore ; if ( query != null ) { if ( dftGraph != null ) { Graph g = graphOrDummy(dsg, dftGraph) ; dsg = new DatasetGraphAltDefaultGraph(dsg, g) ; } } return evalBindings(query, dsg, initialBinding, context) ; } protected static Iterator<Binding> evalBindings(Query query, DatasetGraph dsg, Binding initialBinding, Context context) { // SET UP CONTEXT // The UpdateProcessorBase already copied the context and made it safe ... but that's going to happen again :-( Iterator<Binding> toReturn ; if ( query != null ) { Plan plan = QueryExecutionFactory.createPlan(query, dsg, initialBinding, context) ; toReturn = plan.iterator(); } else { toReturn = Iter.singleton((initialBinding != null) ? initialBinding : BindingRoot.create()) ; } return toReturn ; } protected static Graph graph(GraphStore graphStore, Node gn) { if ( gn == null || gn == Quad.defaultGraphNodeGenerated ) return graphStore.getDefaultGraph() ; else return graphStore.getGraph(gn) ; } protected static Graph graph(GraphStore graphStore, Target target) { if ( target.isDefault() ) return graphStore.getDefaultGraph() ; if ( target.isOneNamedGraph() ) return graph(graphStore, target.getGraph()) ; error("Target does not name one graph: "+target) ; return null ; } protected static void error(String msg) { throw new UpdateException(msg) ; } }
jena-arq/src/main/java/com/hp/hpl/jena/sparql/modify/UpdateEngineWorker.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hp.hpl.jena.sparql.modify; import static com.hp.hpl.jena.sparql.modify.TemplateLib.template ; import java.util.Iterator ; import java.util.List ; import org.openjena.atlas.data.BagFactory ; import org.openjena.atlas.data.DataBag ; import org.openjena.atlas.data.ThresholdPolicy ; import org.openjena.atlas.data.ThresholdPolicyFactory ; import org.openjena.atlas.iterator.Iter ; import org.openjena.riot.SerializationFactoryFinder ; import com.hp.hpl.jena.graph.Graph ; import com.hp.hpl.jena.graph.Node ; import com.hp.hpl.jena.graph.Triple ; import com.hp.hpl.jena.query.Query ; import com.hp.hpl.jena.query.QueryExecutionFactory ; import com.hp.hpl.jena.rdf.model.Model ; import com.hp.hpl.jena.sparql.ARQInternalErrorException ; import com.hp.hpl.jena.sparql.core.DatasetGraph ; import com.hp.hpl.jena.sparql.core.DatasetGraphMap ; import com.hp.hpl.jena.sparql.core.DatasetGraphWrapper ; import com.hp.hpl.jena.sparql.core.Quad ; import com.hp.hpl.jena.sparql.engine.Plan ; import com.hp.hpl.jena.sparql.engine.binding.Binding ; import com.hp.hpl.jena.sparql.engine.binding.BindingRoot ; import com.hp.hpl.jena.sparql.graph.GraphFactory ; import com.hp.hpl.jena.sparql.graph.NodeTransform ; import com.hp.hpl.jena.sparql.graph.NodeTransformLib ; import com.hp.hpl.jena.sparql.modify.request.* ; import com.hp.hpl.jena.sparql.syntax.Element ; import com.hp.hpl.jena.sparql.syntax.ElementGroup ; import com.hp.hpl.jena.sparql.syntax.ElementNamedGraph ; import com.hp.hpl.jena.sparql.syntax.ElementTriplesBlock ; import com.hp.hpl.jena.sparql.util.Context ; import com.hp.hpl.jena.update.GraphStore ; import com.hp.hpl.jena.update.UpdateException ; import com.hp.hpl.jena.util.FileManager ; /** Implementation of general purpose update request execution */ public class UpdateEngineWorker implements UpdateVisitor { protected final GraphStore graphStore ; protected final Binding initialBinding ; protected final boolean alwaysSilent = true ; protected final Context context ; public UpdateEngineWorker(GraphStore graphStore, Binding initialBinding, Context context) { this.graphStore = graphStore ; this.initialBinding = initialBinding ; this.context = context ; } @Override public void visit(UpdateDrop update) { execDropClear(update, false) ; } @Override public void visit(UpdateClear update) { execDropClear(update, true) ; } // ReDo with gs* primitives protected void execDropClear(UpdateDropClear update, boolean isClear) { if ( update.isAll() ) { execDropClear(update, null, true) ; // Always clear. execDropClearAllNamed(update, isClear) ; } else if ( update.isAllNamed() ) execDropClearAllNamed(update, isClear) ; else if ( update.isDefault() ) execDropClear(update, null, true) ; else if ( update.isOneGraph() ) execDropClear(update, update.getGraph(), isClear) ; else throw new ARQInternalErrorException("Target is undefined: "+update.getTarget()) ; } protected void execDropClear(UpdateDropClear update, Node g, boolean isClear) { if ( ! alwaysSilent ) { if ( g != null && ! graphStore.containsGraph(g) && ! update.isSilent()) error("No such graph: "+g) ; } if ( isClear ) { if ( g == null || graphStore.containsGraph(g) ) graph(graphStore, g).getBulkUpdateHandler().removeAll() ; } else graphStore.removeGraph(g) ; } protected void execDropClearAllNamed(UpdateDropClear update, boolean isClear) { // Avoid ConcurrentModificationException List<Node> list = Iter.toList(graphStore.listGraphNodes()) ; for ( Node gn : list ) execDropClear(update, gn, isClear) ; } @Override public void visit(UpdateCreate update) { Node g = update.getGraph() ; if ( g == null ) return ; if ( graphStore.containsGraph(g) ) { if ( ! alwaysSilent && ! update.isSilent() ) error("Graph store already contains graph : "+g) ; return ; } // In-memory specific graphStore.addGraph(g, GraphFactory.createDefaultGraph()) ; } @Override public void visit(UpdateLoad update) { String source = update.getSource() ; Node dest = update.getDest() ; try { // // Experimental ; quads reading. Needs redoing. No conneg. // if ( dest == null ) // { // // Quads? // Lang guess = Lang.guess(source, Lang.NTRIPLES) ; // if ( guess.isQuads() ) // { // RiotLoader.read(source, graphStore, guess) ; // return ; // } // } // Read into temporary model to protect against parse errors. Model model = FileManager.get().loadModel(source) ; Graph g = graph(graphStore, dest) ; g.getBulkUpdateHandler().add(model.getGraph()) ; } catch (RuntimeException ex) { if ( ! update.getSilent() ) throw ex ; } } @Override public void visit(UpdateAdd update) { if ( ! validBinaryGraphOp(update) ) return ; if ( update.getSrc().equals(update.getDest()) ) return ; // ADD (DEFAULT or GRAPH) TO (DEFAULT or GRAPH) // Different source and destination. gsCopyTriples(graphStore, update.getSrc(), update.getDest()) ; } @Override public void visit(UpdateCopy update) { if ( ! validBinaryGraphOp(update) ) return ; if ( update.getSrc().equals(update.getDest()) ) return ; // COPY (DEFAULT or GRAPH) TO (DEFAULT or GRAPH) gsCopy(graphStore, update.getSrc(), update.getDest(), update.getSilent()) ; } @Override public void visit(UpdateMove update) { if ( ! validBinaryGraphOp(update) ) return ; if ( update.getSrc().equals(update.getDest()) ) return ; // MOVE (DEFAULT or GRAPH) TO (DEFAULT or GRAPH) // Difefrent source and destination. gsCopy(graphStore, update.getSrc(), update.getDest(), update.getSilent()) ; gsDrop(graphStore, update.getSrc(), true) ; } private boolean validBinaryGraphOp(UpdateBinaryOp update) { if ( update.getSrc().isDefault() ) return true ; if ( update.getSrc().isOneNamedGraph() ) { Node gn = update.getSrc().getGraph() ; if ( ! graphStore.containsGraph(gn) ) { if ( ! update.getSilent() ) error("No such graph: "+gn) ; return false ; } return true ; } error("Invalid source target for oepration; "+update.getSrc()) ; return false ; } // ---- // Core operations protected static void gsCopy(GraphStore gStore, Target src, Target dest, boolean isSilent) { if ( dest.equals(src) ) return ; gsClear(gStore, dest, true) ; gsCopyTriples(gStore, src, dest) ; } protected static void gsCopyTriples(GraphStore gStore, Target src, Target dest) { Graph gSrc = graph(gStore, src) ; Graph gDest = graph(gStore, dest) ; // Avoids concurrency problems by reading fully before writing ThresholdPolicy<Triple> policy = ThresholdPolicyFactory.policyFromContext(gStore.getContext()); DataBag<Triple> db = BagFactory.newDefaultBag(policy, SerializationFactoryFinder.tripleSerializationFactory()) ; try { Iterator<Triple> triples = gSrc.find(null, null, null) ; db.addAll(triples) ; Iter.close(triples) ; Iterator<Triple> it = db.iterator() ; gDest.getBulkUpdateHandler().add(it) ; Iter.close(it); } finally { db.close() ; } } protected static void gsClear(GraphStore gStore, Target target, boolean isSilent) { // No create - we tested earlier. Graph g = graph(gStore, target) ; g.getBulkUpdateHandler().removeAll() ; } protected static void gsDrop(GraphStore gStore, Target target, boolean isSilent) { if ( target.isDefault() ) gStore.getDefaultGraph().getBulkUpdateHandler().removeAll() ; else gStore.removeGraph(target.getGraph()) ; } // ---- @Override public void visit(UpdateDataInsert update) { for ( Quad quad : update.getQuads() ) addToGraphStore(graphStore, quad) ; } @Override public void visit(UpdateDataDelete update) { for ( Quad quad : update.getQuads() ) deleteFromGraphStore(graphStore, quad) ; } @Override public void visit(UpdateDeleteWhere update) { List<Quad> quads = update.getQuads() ; // Convert bNodes to named variables first. // if ( false ) // // Removed from SPARQL // quads = convertBNodesToVariables(quads) ; // Convert quads to a pattern. Element el = elementFromQuads(quads) ; // Decided to serialize the bindings, but could also have decided to // serialize the quads after applying the template instead. ThresholdPolicy<Binding> policy = ThresholdPolicyFactory.policyFromContext(graphStore.getContext()); DataBag<Binding> db = BagFactory.newDefaultBag(policy, SerializationFactoryFinder.bindingSerializationFactory()) ; try { Iterator<Binding> bindings = evalBindings(el, null) ; db.addAll(bindings) ; Iter.close(bindings) ; Iterator<Binding> it = db.iterator() ; execDelete(quads, null, it) ; Iter.close(it) ; } finally { db.close() ; } } @Override public void visit(UpdateModify update) { Node withGraph = update.getWithIRI() ; Query query = elementToQuery(update.getWherePattern()) ; // USING/USING NAMED DatasetGraph dsg = processUsing(update) ; // USING overrides WITH if ( dsg == null && withGraph != null ) dsg = processWith(update) ; if ( dsg == null ) dsg = graphStore ; ThresholdPolicy<Binding> policy = ThresholdPolicyFactory.policyFromContext(graphStore.getContext()); DataBag<Binding> db = BagFactory.newDefaultBag(policy, SerializationFactoryFinder.bindingSerializationFactory()) ; try { Iterator<Binding> bindings = evalBindings(query, dsg, initialBinding, context) ; db.addAll(bindings) ; Iter.close(bindings) ; Iterator<Binding> it = db.iterator() ; execDelete(update.getDeleteQuads(), withGraph, it) ; Iter.close(it) ; Iterator<Binding> it2 = db.iterator() ; execInsert(update.getInsertQuads(), withGraph, it2) ; Iter.close(it2) ; } finally { db.close() ; } } // Indirection for subsystems to support USING/USING NAMED. protected DatasetGraph processUsing(UpdateModify update) { if ( update.getUsing().size() == 0 && update.getUsingNamed().size() == 0 ) return null ; DatasetGraphMap dsg = new DatasetGraphMap(graphStore) ; if ( update.getUsing().size() > 0 ) { if ( update.getUsing().size() > 1 ) { // Lack of scaling here // Need to take a copy to merge. Graph g = GraphFactory.createGraphMem() ; for ( Node gn : update.getUsing() ) { Graph g2 = graphOrDummy(graphStore, gn) ; g.getBulkUpdateHandler().add(g2) ; } dsg.setDefaultGraph(g) ; } else { Node gn = update.getUsing().get(0) ; Graph g = graphOrDummy(graphStore, gn) ; dsg.setDefaultGraph(g) ; } } if ( update.getUsingNamed().size() > 0 ) { // Replace with a no named graphs version. dsg = new DatasetGraphMap(dsg.getDefaultGraph()) ; for ( Node gn : update.getUsingNamed() ) dsg.addGraph(gn, graphOrDummy(graphStore, gn)) ; } return dsg ; } protected DatasetGraph processWith(UpdateModify update) { Node withGraph = update.getWithIRI() ; if ( withGraph == null ) return null ; Graph g = graphOrDummy(graphStore, withGraph) ; DatasetGraph dsg = new DatasetGraphAltDefaultGraph(graphStore, g) ; return dsg ; } private Graph graphOrDummy(DatasetGraph dsg, Node gn) { Graph g = graph(graphStore, gn) ; if ( g == null ) g = GraphFactory.createGraphMem() ; return g ; } protected static List<Quad> unused_convertBNodesToVariables(List<Quad> quads) { NodeTransform bnodesToVariables = new NodeTransformBNodesToVariables() ; return NodeTransformLib.transformQuads(bnodesToVariables, quads) ; } protected Element elementFromQuads(List<Quad> quads) { ElementGroup el = new ElementGroup() ; ElementTriplesBlock x = new ElementTriplesBlock() ; // Maybe empty?? el.addElement(x) ; Node g = Quad.defaultGraphNodeGenerated ; for ( Quad q : quads ) { if ( q.getGraph() != g ) { g = q.getGraph() ; x = new ElementTriplesBlock() ; if ( g == null || g == Quad.defaultGraphNodeGenerated ) el.addElement(x) ; else { ElementNamedGraph eng = new ElementNamedGraph(g, x) ; el.addElement(eng) ; } } x.addTriple(q.asTriple()) ; } return el ; } protected void execDelete(List<Quad> quads, Node dftGraph, Iterator<Binding> bindings) { Iterator<Quad> it = template(quads, dftGraph, bindings) ; if ( it == null ) return ; while (it.hasNext()) { Quad q = it.next(); graphStore.delete(q); } // Alternate implementation that can use the graph BulkUpdateHandler, but forces all quads into // memory (we don't want that!). The issue is that all of the quads can be mixed up based on the // user supplied template. If graph stores can benefit from bulk insert/delete operations, then we // need to expose a bulk update interface on GraphStore, not just Graph. // MultiMap<Node, Triple> acc = MultiMap.createMapList() ; // while (it.hasNext()) // { // Quad q = it.next(); // acc.put(q.getGraph(), q.asTriple()) ; // } // for ( Node gn : acc.keys() ) // { // Collection<Triple> triples = acc.get(gn) ; // graph(graphStore, gn).getBulkUpdateHandler().delete(triples.iterator()) ; // } } protected void execInsert(List<Quad> quads, Node dftGraph, Iterator<Binding> bindings) { Iterator<Quad> it = template(quads, dftGraph, bindings) ; if ( it == null ) return ; while (it.hasNext()) { Quad q = it.next(); addToGraphStore(graphStore, q); } } // Catch all individual adds of quads (and deletes - mainly for symmetry). private static void addToGraphStore(GraphStore graphStore, Quad quad) { // Check legal triple. if ( quad.isLegalAsData() ) graphStore.add(quad); // Else drop. //Log.warn(UpdateEngineWorker.class, "Bad quad as data: "+quad) ; } private static void deleteFromGraphStore(GraphStore graphStore, Quad quad) { graphStore.delete(quad) ; } protected Query elementToQuery(Element pattern) { if ( pattern == null ) return null ; Query query = new Query() ; query.setQueryPattern(pattern) ; query.setQuerySelectType() ; query.setQueryResultStar(true) ; query.setResultVars() ; return query ; } static class DatasetGraphAltDefaultGraph extends DatasetGraphWrapper { private Graph dftGraph ; public DatasetGraphAltDefaultGraph(DatasetGraph dsg, Graph dftGraph) { super(dsg) ; setDefaultGraph(dftGraph) ; } @Override public Graph getDefaultGraph() { return dftGraph; } @Override public void setDefaultGraph(Graph g) { dftGraph = g ; } } protected Iterator<Binding> evalBindings(Element pattern, Node dftGraph) { return evalBindings(elementToQuery(pattern), dftGraph) ; } protected Iterator<Binding> evalBindings(Query query, Node dftGraph) { DatasetGraph dsg = graphStore ; if ( query != null ) { if ( dftGraph != null ) { Graph g = graphOrDummy(dsg, dftGraph) ; dsg = new DatasetGraphAltDefaultGraph(dsg, g) ; } } return evalBindings(query, dsg, initialBinding, context) ; } protected static Iterator<Binding> evalBindings(Query query, DatasetGraph dsg, Binding initialBinding, Context context) { // SET UP CONTEXT // The UpdateProcessorBase already copied the context and made it safe ... but that's going to happen again :-( Iterator<Binding> toReturn ; if ( query != null ) { Plan plan = QueryExecutionFactory.createPlan(query, dsg, initialBinding, context) ; toReturn = plan.iterator(); } else { toReturn = Iter.singleton((initialBinding != null) ? initialBinding : BindingRoot.create()) ; } return toReturn ; } protected static Graph graph(GraphStore graphStore, Node gn) { if ( gn == null || gn == Quad.defaultGraphNodeGenerated ) return graphStore.getDefaultGraph() ; else return graphStore.getGraph(gn) ; } protected static Graph graph(GraphStore graphStore, Target target) { if ( target.isDefault() ) return graphStore.getDefaultGraph() ; if ( target.isOneNamedGraph() ) return graph(graphStore, target.getGraph()) ; error("Target does not name one graph: "+target) ; return null ; } protected static void error(String msg) { throw new UpdateException(msg) ; } }
Handling USING NAMED - it implies USING [none]. git-svn-id: bc509ec38c1227b3e85ea1246fda136342965d36@1373612 13f79535-47bb-0310-9956-ffa450edef68
jena-arq/src/main/java/com/hp/hpl/jena/sparql/modify/UpdateEngineWorker.java
Handling USING NAMED - it implies USING [none].
Java
apache-2.0
6ce8389e6414ad1826aad415cbd582a6838cc264
0
awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples
//snippet-sourcedescription:[AttachRolePolicy.java demonstrates how to attach a policy to an existing IAM role.] //snippet-keyword:[SDK for Java 2.0] //snippet-keyword:[Code Sample] //snippet-service:[AWS IAM] //snippet-sourcetype:[full-example] //snippet-sourcedate:[03/02/2020] //snippet-sourceauthor:[scmacdon-aws] /* * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.example.iam; // snippet-start:[iam.java2.attach_role_policy.import] import software.amazon.awssdk.regions.Region; import software.amazon.awssdk.services.iam.IamClient; import software.amazon.awssdk.services.iam.model.IamException; import software.amazon.awssdk.services.iam.model.AttachRolePolicyRequest; import software.amazon.awssdk.services.iam.model.AttachedPolicy; import software.amazon.awssdk.services.iam.model.ListAttachedRolePoliciesRequest; import software.amazon.awssdk.services.iam.model.ListAttachedRolePoliciesResponse; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; // snippet-end:[iam.java2.attach_role_policy.import] public class AttachRolePolicy { public static void main(String[] args) { final String USAGE = "To run this example, supply a role name that you can obtain from the AWS Console\n" + "Ex: AttachRolePolicy <role-name> <policy-arn>\n"; if (args.length != 2) { System.out.println(USAGE); System.exit(1); } String roleName = args[0]; String policyArn = args[1]; Region region = Region.AWS_GLOBAL; IamClient iam = IamClient.builder() .region(region) .build(); attachIAMRolePolicy(iam, roleName, policyArn); } // snippet-start:[iam.java2.attach_role_policy.main] public static void attachIAMRolePolicy(IamClient iam,String roleName, String policyArn ) { try { List<AttachedPolicy> matchingPolicies = new ArrayList<>(); boolean done = false; String newMarker = null; while(!done) { ListAttachedRolePoliciesResponse response; if (newMarker == null) { ListAttachedRolePoliciesRequest request = ListAttachedRolePoliciesRequest.builder() .roleName(roleName).build(); response = iam.listAttachedRolePolicies(request); } else { ListAttachedRolePoliciesRequest request = ListAttachedRolePoliciesRequest.builder() .roleName(roleName) .marker(newMarker).build(); response = iam.listAttachedRolePolicies(request); } matchingPolicies.addAll( response.attachedPolicies() .stream() .filter(p -> p.policyName().equals(roleName)) .collect(Collectors.toList())); if(!response.isTruncated()) { done = true; } else { newMarker = response.marker(); } } if (matchingPolicies.size() > 0) { System.out.println(roleName + " policy is already attached to this role."); return; } // snippet-start:[iam.java2.attach_role_policy.attach] AttachRolePolicyRequest attachRequest = AttachRolePolicyRequest.builder() .roleName(roleName) .policyArn(policyArn).build(); iam.attachRolePolicy(attachRequest); // snippet-end:[iam.java2.attach_role_policy.attach] System.out.println("Successfully attached policy " + policyArn + " to role " + roleName); } catch (IamException e) { System.err.println(e.awsErrorDetails().errorMessage()); System.exit(1); } // snippet-end:[iam.java2.attach_role_policy.main] System.out.println("Done"); } }
javav2/example_code/iam/src/main/java/com/example/iam/AttachRolePolicy.java
//snippet-sourcedescription:[AttachRolePolicy.java demonstrates how to attach a policy to an existing IAM role.] //snippet-keyword:[SDK for Java 2.0] //snippet-keyword:[Code Sample] //snippet-service:[iam] //snippet-sourcetype:[full-example] //snippet-sourcedate:[03/02/2020] //snippet-sourceauthor:[scmacdon-aws] /* * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.example.iam; // snippet-start:[iam.java2.attach_role_policy.complete] // snippet-start:[iam.java2.attach_role_policy.import] import software.amazon.awssdk.regions.Region; import software.amazon.awssdk.services.iam.IamClient; import software.amazon.awssdk.services.iam.model.IamException; import software.amazon.awssdk.services.iam.model.AttachRolePolicyRequest; import software.amazon.awssdk.services.iam.model.AttachedPolicy; import software.amazon.awssdk.services.iam.model.ListAttachedRolePoliciesRequest; import software.amazon.awssdk.services.iam.model.ListAttachedRolePoliciesResponse; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; // snippet-end:[iam.java2.attach_role_policy.import] public class AttachRolePolicy { public static final String POLICY_ARN = "arn:aws:iam::aws:policy/AmazonDynamoDBFullAccess"; public static void main(String[] args) { final String USAGE = "To run this example, supply a role name that you can obtain from the AWS Console\n" + "Ex: AttachRolePolicy <role-name>\n"; if (args.length != 1) { System.out.println(USAGE); System.exit(1); } String roleName = args[0]; // snippet-start:[iam.java2.attach_role_policy.main] // snippet-start:[iam.java2.attach_role_policy.client] Region region = Region.AWS_GLOBAL; IamClient iam = IamClient.builder() .region(region) .build(); try { // snippet-end:[iam.java2.attach_role_policy.client] List<AttachedPolicy> matchingPolicies = new ArrayList<>(); boolean done = false; String newMarker = null; while(!done) { ListAttachedRolePoliciesResponse response; if (newMarker == null) { ListAttachedRolePoliciesRequest request = ListAttachedRolePoliciesRequest.builder() .roleName(roleName).build(); response = iam.listAttachedRolePolicies(request); } else { ListAttachedRolePoliciesRequest request = ListAttachedRolePoliciesRequest.builder() .roleName(roleName) .marker(newMarker).build(); response = iam.listAttachedRolePolicies(request); } matchingPolicies.addAll( response.attachedPolicies() .stream() .filter(p -> p.policyName().equals(roleName)) .collect(Collectors.toList())); if(!response.isTruncated()) { done = true; } else { newMarker = response.marker(); } } if (matchingPolicies.size() > 0) { System.out.println(roleName + " policy is already attached to this role."); return; } // snippet-end:[iam.java2.attach_role_policy.main] // snippet-start:[iam.java2.attach_role_policy.attach] AttachRolePolicyRequest attachRequest = AttachRolePolicyRequest.builder() .roleName(roleName) .policyArn(POLICY_ARN).build(); iam.attachRolePolicy(attachRequest); // snippet-end:[iam.java2.attach_role_policy.attach] System.out.println("Successfully attached policy " + POLICY_ARN + " to role " + roleName); } catch (IamException e) { System.err.println(e.awsErrorDetails().errorMessage()); System.exit(1); } System.out.println("Done"); } } // snippet-end:[iam.java2.attach_role_policy.complete]
Update AttachRolePolicy.java Refactored the code
javav2/example_code/iam/src/main/java/com/example/iam/AttachRolePolicy.java
Update AttachRolePolicy.java
Java
apache-2.0
065f7bf5e90cd1eeb9c651bc4fbd4dd74cf84051
0
wildfly-security/wildfly-elytron,wildfly-security/wildfly-elytron
/* * JBoss, Home of Professional Open Source. * Copyright 2016 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wildfly.security.http.impl; import static java.nio.charset.StandardCharsets.UTF_8; import static org.wildfly.common.Assert.checkNotNullParam; import static org.wildfly.security._private.ElytronMessages.httpSpnego; import static org.wildfly.security.auth.util.GSSCredentialSecurityFactory.SPNEGO; import static org.wildfly.security.http.HttpConstants.AUTHORIZATION; import static org.wildfly.security.http.HttpConstants.CONFIG_CREATE_NAME_GSS_INIT; import static org.wildfly.security.http.HttpConstants.CONFIG_GSS_MANAGER; import static org.wildfly.security.http.HttpConstants.CONFIG_STATE_SCOPES; import static org.wildfly.security.http.HttpConstants.NEGOTIATE; import static org.wildfly.security.http.HttpConstants.SPNEGO_NAME; import static org.wildfly.security.http.HttpConstants.UNAUTHORIZED; import static org.wildfly.security.http.HttpConstants.WWW_AUTHENTICATE; import java.io.IOException; import java.io.Serializable; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.function.BooleanSupplier; import javax.security.auth.Subject; import javax.security.auth.callback.Callback; import javax.security.auth.callback.CallbackHandler; import javax.security.auth.callback.UnsupportedCallbackException; import javax.security.auth.kerberos.KerberosTicket; import javax.security.sasl.AuthorizeCallback; import org.ietf.jgss.GSSContext; import org.ietf.jgss.GSSCredential; import org.ietf.jgss.GSSException; import org.ietf.jgss.GSSManager; import org.ietf.jgss.GSSName; import org.wildfly.security.auth.callback.AuthenticationCompleteCallback; import org.wildfly.security.auth.callback.CachedIdentityAuthorizeCallback; import org.wildfly.security.auth.callback.IdentityCredentialCallback; import org.wildfly.security.auth.callback.ServerCredentialCallback; import org.wildfly.security.auth.principal.NamePrincipal; import org.wildfly.security.auth.server.SecurityIdentity; import org.wildfly.security.cache.CachedIdentity; import org.wildfly.security.cache.IdentityCache; import org.wildfly.security.credential.GSSKerberosCredential; import org.wildfly.security.http.HttpAuthenticationException; import org.wildfly.security.http.HttpScope; import org.wildfly.security.http.HttpServerAuthenticationMechanism; import org.wildfly.security.http.HttpServerRequest; import org.wildfly.security.http.HttpServerResponse; import org.wildfly.security.http.Scope; import org.wildfly.security.mechanism.AuthenticationMechanismException; import org.wildfly.security.mechanism._private.MechanismUtil; import org.wildfly.security.util.ByteIterator; import org.wildfly.security.util._private.Arrays2; /** * A {@link HttpServerAuthenticationMechanism} implementation to support SPNEGO. * * @author <a href="mailto:[email protected]">Darran Lofthouse</a> */ public final class SpnegoAuthenticationMechanism implements HttpServerAuthenticationMechanism { private static final String CHALLENGE_PREFIX = NEGOTIATE + " "; private static final String SPNEGO_CONTEXT_KEY = SpnegoAuthenticationMechanism.class.getName() + ".spnego-context"; private static final String CACHED_IDENTITY_KEY = SpnegoAuthenticationMechanism.class.getName() + ".elytron-identity"; private final CallbackHandler callbackHandler; private final GSSManager gssManager; private final Scope[] storageScopes; SpnegoAuthenticationMechanism(final CallbackHandler callbackHandler, final Map<String, ?> properties) { checkNotNullParam("callbackHandler", callbackHandler); checkNotNullParam("properties", properties); this.callbackHandler = callbackHandler; this.gssManager = properties.containsKey(CONFIG_GSS_MANAGER) ? (GSSManager) properties.get(CONFIG_GSS_MANAGER) : GSSManager.getInstance(); // JDK-8194073 workaround (for Oracle JDK + native Kerberos) if (properties.containsKey(CONFIG_CREATE_NAME_GSS_INIT) && Boolean.parseBoolean((String) properties.get(CONFIG_CREATE_NAME_GSS_INIT))) { try { // createName call ensure correct GSSManager initialization gssManager.createName("dummy", GSSName.NT_USER_NAME, SPNEGO); httpSpnego.trace("createName workaround for native GSS initialization applied"); } catch (GSSException e) { httpSpnego.trace("Exception while applying createName workaround for native GSS initialization", e); } } String scopesProperty = (String) properties.get(CONFIG_STATE_SCOPES); if (scopesProperty == null) { storageScopes = new Scope[] { Scope.SESSION, Scope.CONNECTION }; } else { String[] names = scopesProperty.split(","); storageScopes = new Scope[names.length]; for (int i=0;i<names.length;i++) { if ("NONE".equals(names[i])) { storageScopes[i] = null; } else { Scope scope = Scope.valueOf(names[i]); if (scope == Scope.APPLICATION || scope == Scope.GLOBAL) { throw httpSpnego.unsuitableScope(scope.name()); } storageScopes[i] = scope; } } } } @Override public String getMechanismName() { return SPNEGO_NAME; } @Override public void evaluateRequest(HttpServerRequest request) throws HttpAuthenticationException { HttpScope storageScope = getStorageScope(request); IdentityCache identityCache = null; identityCache = createIdentityCache(identityCache, storageScope, false); if (identityCache != null && attemptReAuthentication(identityCache, request)) { httpSpnego.trace("Successfully authorized using cached identity"); return; } // If the scope does not already exist it can't have previously been used to store state. SpnegoContext spnegoContext = storageScope != null && storageScope.exists() ? storageScope.getAttachment(SPNEGO_CONTEXT_KEY, SpnegoContext.class) : null; GSSContext gssContext = spnegoContext != null ? spnegoContext.gssContext : null; KerberosTicket kerberosTicket = spnegoContext != null ? spnegoContext.kerberosTicket : null; httpSpnego.tracef("Evaluating SPNEGO request: cached GSSContext = %s", gssContext); // Do we already have a cached identity? If so use it. if (gssContext != null && gssContext.isEstablished()) { identityCache = createIdentityCache(identityCache, storageScope, true); if (authorizeSrcName(gssContext, identityCache)) { httpSpnego.trace("Successfully authorized using cached GSSContext"); request.authenticationComplete(); return; } else { clearAttachments(storageScope); gssContext = null; kerberosTicket = null; } } if (gssContext == null) { // init GSSContext if (spnegoContext == null) { spnegoContext = new SpnegoContext(); } ServerCredentialCallback gssCredentialCallback = new ServerCredentialCallback(GSSKerberosCredential.class); final GSSCredential serviceGssCredential; try { httpSpnego.trace("Obtaining GSSCredential for the service from callback handler..."); callbackHandler.handle(new Callback[] { gssCredentialCallback }); serviceGssCredential = gssCredentialCallback.applyToCredential(GSSKerberosCredential.class, GSSKerberosCredential::getGssCredential); kerberosTicket = gssCredentialCallback.applyToCredential(GSSKerberosCredential.class, GSSKerberosCredential::getKerberosTicket); } catch (IOException | UnsupportedCallbackException e) { throw httpSpnego.mechCallbackHandlerFailedForUnknownReason(e).toHttpAuthenticationException(); } if (serviceGssCredential == null) { throw httpSpnego.unableToObtainServerCredential().toHttpAuthenticationException(); } try { gssContext = gssManager.createContext(serviceGssCredential); if (httpSpnego.isTraceEnabled()) { httpSpnego.tracef("Using SpnegoAuthenticationMechanism to authenticate %s using the following mechanisms: [%s]", serviceGssCredential.getName(), Arrays2.objectToString(serviceGssCredential.getMechs())); } } catch (GSSException e) { throw httpSpnego.mechUnableToCreateGssContext(e).toHttpAuthenticationException(); } spnegoContext.gssContext = gssContext; spnegoContext.kerberosTicket = kerberosTicket; } // authentication exchange List<String> authorizationValues = request.getRequestHeaderValues(AUTHORIZATION); String challenge = null; if (authorizationValues != null && authorizationValues.isEmpty() == false) { for (String current : authorizationValues) { if (current.startsWith(CHALLENGE_PREFIX)) { challenge = current.substring(CHALLENGE_PREFIX.length()); break; } } } if (httpSpnego.isTraceEnabled()) { httpSpnego.tracef("Sent HTTP authorizations: [%s]", Arrays2.objectToString(authorizationValues)); } // Do we have an incoming response to a challenge? If so, process it. if (challenge != null) { httpSpnego.trace("Processing incoming response to a challenge..."); // We only need to store the scope if we have a challenge otherwise the next round // trip will be a new response anyway. if (storageScope != null && (storageScope.exists() || storageScope.create())) { httpSpnego.tracef("Caching SPNEGO Context with GSSContext %s and KerberosTicket %s", gssContext, kerberosTicket); storageScope.setAttachment(SPNEGO_CONTEXT_KEY, spnegoContext); } else { storageScope = null; httpSpnego.trace("No usable HttpScope for storage, continuation will not be possible"); } byte[] decodedValue = ByteIterator.ofBytes(challenge.getBytes(UTF_8)).base64Decode().drain(); Subject subject = new Subject(true, Collections.emptySet(), Collections.emptySet(), kerberosTicket != null ? Collections.singleton(kerberosTicket) : Collections.emptySet()); byte[] responseToken; try { final GSSContext finalGssContext = gssContext; responseToken = Subject.doAs(subject, (PrivilegedExceptionAction<byte[]>) () -> finalGssContext.acceptSecContext(decodedValue, 0, decodedValue.length)); } catch (PrivilegedActionException e) { httpSpnego.trace("Call to acceptSecContext failed.", e.getCause()); handleCallback(AuthenticationCompleteCallback.FAILED); clearAttachments(storageScope); request.authenticationFailed(httpSpnego.authenticationFailed()); return; } if (gssContext.isEstablished()) { // no more tokens are needed from the peer final GSSCredential gssCredential; try { gssCredential = gssContext.getCredDelegState() ? gssContext.getDelegCred() : null; } catch (GSSException e) { httpSpnego.trace("Unable to access delegated credential despite being delegated.", e); handleCallback(AuthenticationCompleteCallback.FAILED); clearAttachments(storageScope); request.authenticationFailed(httpSpnego.authenticationFailed()); return; } if (gssCredential != null) { httpSpnego.trace("Associating delegated GSSCredential with identity."); handleCallback(new IdentityCredentialCallback(new GSSKerberosCredential(gssCredential), true)); } else { httpSpnego.trace("No GSSCredential delegated from client."); } httpSpnego.trace("GSSContext established, authorizing..."); identityCache = createIdentityCache(identityCache, storageScope, true); if (authorizeSrcName(gssContext, identityCache)) { httpSpnego.trace("GSSContext established and authorized - authentication complete"); request.authenticationComplete(response -> sendChallenge(responseToken, response, 0)); return; } else { httpSpnego.trace("Authorization of established GSSContext failed"); handleCallback(AuthenticationCompleteCallback.FAILED); clearAttachments(storageScope); request.authenticationFailed(httpSpnego.authenticationFailed()); return; } } else if (responseToken != null && storageScope != null) { httpSpnego.trace("GSSContext establishing - sending negotiation token to the peer"); request.authenticationInProgress(response -> sendChallenge(responseToken, response, UNAUTHORIZED)); return; } else { httpSpnego.trace("GSSContext establishing - unable to hold GSSContext so continuation will not be possible"); handleCallback(AuthenticationCompleteCallback.FAILED); request.authenticationFailed(httpSpnego.authenticationFailed()); return; } } httpSpnego.trace("Request lacks valid authentication credentials"); clearAttachments(storageScope); request.noAuthenticationInProgress(this::sendBareChallenge); } private HttpScope getStorageScope(HttpServerRequest request) throws HttpAuthenticationException { for (Scope scope : storageScopes) { if (scope == null) { return null; } HttpScope httpScope = request.getScope(scope); if (httpScope != null && httpScope.supportsAttachments()) { if (httpSpnego.isTraceEnabled()) { httpSpnego.tracef("Using HttpScope '%s' with ID '%s'", scope.name(), httpScope.getID()); } return httpScope; } else { if (httpSpnego.isTraceEnabled()) { httpSpnego.tracef(httpScope == null ? "HttpScope %s not supported" : "HttpScope %s does not support attachments", scope); } } } throw httpSpnego.unableToIdentifyHttpScope(); } private IdentityCache createIdentityCache(final IdentityCache existingCache, final HttpScope httpScope, boolean forUpdate) { if (existingCache != null || // If we have a cache continue to use it. httpScope == null || // If we don't have a scope we can't create a cache (existing cache is null so return it) !httpScope.supportsAttachments() || // It is not null but if it doesn't support attachments pointless to wrap in a cache (!httpScope.exists() && (!forUpdate || !httpScope.create())) // Doesn't exist and if update is requested can't be created ) { return existingCache; } return new IdentityCache() { @Override public CachedIdentity remove() { CachedIdentity cachedIdentity = get(); httpScope.setAttachment(CACHED_IDENTITY_KEY, null); return cachedIdentity; } @Override public void put(SecurityIdentity identity) { httpScope.setAttachment(CACHED_IDENTITY_KEY, new CachedIdentity(SPNEGO_NAME, identity)); } @Override public CachedIdentity get() { return httpScope.getAttachment(CACHED_IDENTITY_KEY, CachedIdentity.class); } }; } private static void clearAttachments(HttpScope scope) { if (scope != null) { scope.setAttachment(SPNEGO_CONTEXT_KEY, null); // clear cache } } private void sendBareChallenge(HttpServerResponse response) { response.addResponseHeader(WWW_AUTHENTICATE, NEGOTIATE); response.setStatusCode(UNAUTHORIZED); } private void sendChallenge(byte[] responseToken, HttpServerResponse response, int statusCode) { if (httpSpnego.isTraceEnabled()) { httpSpnego.tracef("Sending intermediate challenge: %s", Arrays2.objectToString(responseToken)); } if (responseToken == null) { response.addResponseHeader(WWW_AUTHENTICATE, NEGOTIATE); } else { String responseConverted = ByteIterator.ofBytes(responseToken).base64Encode().drainToString(); response.addResponseHeader(WWW_AUTHENTICATE, CHALLENGE_PREFIX + responseConverted); } if (statusCode != 0) { response.setStatusCode(statusCode); } } private boolean attemptReAuthentication(IdentityCache identityCache, HttpServerRequest request) throws HttpAuthenticationException { CachedIdentityAuthorizeCallback authorizeCallback = new CachedIdentityAuthorizeCallback(identityCache); try { callbackHandler.handle(new Callback[] { authorizeCallback }); } catch (IOException | UnsupportedCallbackException e) { throw new HttpAuthenticationException(e); } if (authorizeCallback.isAuthorized()) { try { handleCallback(AuthenticationCompleteCallback.SUCCEEDED); } catch (IOException e) { throw new HttpAuthenticationException(e); } request.authenticationComplete(null, identityCache::remove); return true; } return false; } private boolean authorizeSrcName(GSSContext gssContext, IdentityCache identityCache) throws HttpAuthenticationException { final GSSName srcName; try { srcName = gssContext.getSrcName(); if (srcName == null) { httpSpnego.trace("Authorization failed - srcName of GSSContext (name of initiator) is null - wrong realm or kdc?"); return false; } } catch (GSSException e) { httpSpnego.trace("Unable to obtain srcName from established GSSContext.", e); return false; } final BooleanSupplier authorizedFunction; final Callback authorizeCallBack; if (gssContext.getCredDelegState()) { try { GSSCredential credential = gssContext.getDelegCred(); httpSpnego.tracef("Credential delegation enabled, delegated credential = %s", credential); MechanismUtil.handleCallbacks(httpSpnego, callbackHandler, new IdentityCredentialCallback(new GSSKerberosCredential(credential), true)); } catch (UnsupportedCallbackException ignored) { // ignored } catch (AuthenticationMechanismException e) { throw e.toHttpAuthenticationException(); } catch (GSSException e) { throw new HttpAuthenticationException(e); } } else { httpSpnego.trace("Credential delegation not enabled"); } boolean authorized = false; try { String clientName = srcName.toString(); if (identityCache != null) { CachedIdentityAuthorizeCallback cacheCallback = new CachedIdentityAuthorizeCallback(new NamePrincipal(clientName), identityCache, true); authorizedFunction = cacheCallback::isAuthorized; authorizeCallBack = cacheCallback; } else { AuthorizeCallback plainCallback = new AuthorizeCallback(clientName, clientName); authorizedFunction = plainCallback::isAuthorized; authorizeCallBack = plainCallback; } callbackHandler.handle(new Callback[] { authorizeCallBack }); authorized = authorizedFunction.getAsBoolean(); httpSpnego.tracef("Authorized by callback handler = %b clientName = [%s]", authorized, clientName); } catch (IOException e) { httpSpnego.trace("IOException during AuthorizeCallback handling", e); throw httpSpnego.mechServerSideAuthenticationFailed(e).toHttpAuthenticationException(); } catch (UnsupportedCallbackException ignored) { } if (authorized) { // If we fail the caller may still decide to try and continue authentication. handleCallback(AuthenticationCompleteCallback.SUCCEEDED); } return authorized; } private void handleCallback(Callback callback) throws HttpAuthenticationException { try { MechanismUtil.handleCallbacks(httpSpnego, callbackHandler, callback); } catch (AuthenticationMechanismException e) { throw e.toHttpAuthenticationException(); } catch (UnsupportedCallbackException ignored) { } } private static class SpnegoContext implements Serializable { private static final long serialVersionUID = 1168213910107551573L; transient GSSContext gssContext; transient KerberosTicket kerberosTicket; } }
src/main/java/org/wildfly/security/http/impl/SpnegoAuthenticationMechanism.java
/* * JBoss, Home of Professional Open Source. * Copyright 2016 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wildfly.security.http.impl; import static java.nio.charset.StandardCharsets.UTF_8; import static org.wildfly.common.Assert.checkNotNullParam; import static org.wildfly.security._private.ElytronMessages.httpSpnego; import static org.wildfly.security.auth.util.GSSCredentialSecurityFactory.SPNEGO; import static org.wildfly.security.http.HttpConstants.AUTHORIZATION; import static org.wildfly.security.http.HttpConstants.CONFIG_CREATE_NAME_GSS_INIT; import static org.wildfly.security.http.HttpConstants.CONFIG_GSS_MANAGER; import static org.wildfly.security.http.HttpConstants.CONFIG_STATE_SCOPES; import static org.wildfly.security.http.HttpConstants.NEGOTIATE; import static org.wildfly.security.http.HttpConstants.SPNEGO_NAME; import static org.wildfly.security.http.HttpConstants.UNAUTHORIZED; import static org.wildfly.security.http.HttpConstants.WWW_AUTHENTICATE; import java.io.IOException; import java.io.Serializable; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.function.BooleanSupplier; import javax.security.auth.Subject; import javax.security.auth.callback.Callback; import javax.security.auth.callback.CallbackHandler; import javax.security.auth.callback.UnsupportedCallbackException; import javax.security.auth.kerberos.KerberosTicket; import javax.security.sasl.AuthorizeCallback; import org.ietf.jgss.GSSContext; import org.ietf.jgss.GSSCredential; import org.ietf.jgss.GSSException; import org.ietf.jgss.GSSManager; import org.ietf.jgss.GSSName; import org.wildfly.security.auth.callback.AuthenticationCompleteCallback; import org.wildfly.security.auth.callback.CachedIdentityAuthorizeCallback; import org.wildfly.security.auth.callback.IdentityCredentialCallback; import org.wildfly.security.auth.callback.ServerCredentialCallback; import org.wildfly.security.auth.principal.NamePrincipal; import org.wildfly.security.auth.server.SecurityIdentity; import org.wildfly.security.cache.CachedIdentity; import org.wildfly.security.cache.IdentityCache; import org.wildfly.security.credential.GSSKerberosCredential; import org.wildfly.security.http.HttpAuthenticationException; import org.wildfly.security.http.HttpScope; import org.wildfly.security.http.HttpServerAuthenticationMechanism; import org.wildfly.security.http.HttpServerRequest; import org.wildfly.security.http.HttpServerResponse; import org.wildfly.security.http.Scope; import org.wildfly.security.mechanism.AuthenticationMechanismException; import org.wildfly.security.mechanism._private.MechanismUtil; import org.wildfly.security.util.ByteIterator; import org.wildfly.security.util._private.Arrays2; /** * A {@link HttpServerAuthenticationMechanism} implementation to support SPNEGO. * * @author <a href="mailto:[email protected]">Darran Lofthouse</a> */ public final class SpnegoAuthenticationMechanism implements HttpServerAuthenticationMechanism { private static final String CHALLENGE_PREFIX = NEGOTIATE + " "; private static final String SPNEGO_CONTEXT_KEY = SpnegoAuthenticationMechanism.class.getName() + ".spnego-context"; private static final String CACHED_IDENTITY_KEY = SpnegoAuthenticationMechanism.class.getName() + ".elytron-identity"; private final CallbackHandler callbackHandler; private final GSSManager gssManager; private final Scope[] storageScopes; SpnegoAuthenticationMechanism(final CallbackHandler callbackHandler, final Map<String, ?> properties) { checkNotNullParam("callbackHandler", callbackHandler); checkNotNullParam("properties", properties); this.callbackHandler = callbackHandler; this.gssManager = properties.containsKey(CONFIG_GSS_MANAGER) ? (GSSManager) properties.get(CONFIG_GSS_MANAGER) : GSSManager.getInstance(); // JDK-8194073 workaround (for Oracle JDK + native Kerberos) if (properties.containsKey(CONFIG_CREATE_NAME_GSS_INIT) && Boolean.parseBoolean((String) properties.get(CONFIG_CREATE_NAME_GSS_INIT))) { try { // createName call ensure correct GSSManager initialization gssManager.createName("dummy", GSSName.NT_USER_NAME, SPNEGO); httpSpnego.trace("createName workaround for native GSS initialization applied"); } catch (GSSException e) { httpSpnego.trace("Exception while applying createName workaround for native GSS initialization", e); } } String scopesProperty = (String) properties.get(CONFIG_STATE_SCOPES); if (scopesProperty == null) { storageScopes = new Scope[] { Scope.SESSION, Scope.CONNECTION }; } else { String[] names = scopesProperty.split(","); storageScopes = new Scope[names.length]; for (int i=0;i<names.length;i++) { if ("NONE".equals(names[i])) { storageScopes[i] = null; } else { Scope scope = Scope.valueOf(names[i]); if (scope == Scope.APPLICATION || scope == Scope.GLOBAL) { throw httpSpnego.unsuitableScope(scope.name()); } storageScopes[i] = scope; } } } } @Override public String getMechanismName() { return SPNEGO_NAME; } @Override public void evaluateRequest(HttpServerRequest request) throws HttpAuthenticationException { HttpScope storageScope = getStorageScope(request); IdentityCache identityCache = null; identityCache = createIdentityCache(identityCache, storageScope, false); if (identityCache != null && attemptReAuthentication(identityCache, request)) { httpSpnego.trace("Successfully authorized using cached identity"); return; } // If the scope does not already exist it can't have previously been used to store state. SpnegoContext spnegoContext = storageScope != null && storageScope.exists() ? storageScope.getAttachment(SPNEGO_CONTEXT_KEY, SpnegoContext.class) : null; GSSContext gssContext = spnegoContext != null ? spnegoContext.gssContext : null; KerberosTicket kerberosTicket = spnegoContext != null ? spnegoContext.kerberosTicket : null; httpSpnego.tracef("Evaluating SPNEGO request: cached GSSContext = %s", gssContext); // Do we already have a cached identity? If so use it. if (gssContext != null && gssContext.isEstablished()) { identityCache = createIdentityCache(identityCache, storageScope, true); if (authorizeSrcName(gssContext, identityCache)) { httpSpnego.trace("Successfully authorized using cached GSSContext"); request.authenticationComplete(); return; } else { clearAttachments(storageScope); gssContext = null; kerberosTicket = null; } } if (gssContext == null) { // init GSSContext if (spnegoContext == null) { spnegoContext = new SpnegoContext(); } ServerCredentialCallback gssCredentialCallback = new ServerCredentialCallback(GSSKerberosCredential.class); final GSSCredential serviceGssCredential; try { httpSpnego.trace("Obtaining GSSCredential for the service from callback handler..."); callbackHandler.handle(new Callback[] { gssCredentialCallback }); serviceGssCredential = gssCredentialCallback.applyToCredential(GSSKerberosCredential.class, GSSKerberosCredential::getGssCredential); kerberosTicket = gssCredentialCallback.applyToCredential(GSSKerberosCredential.class, GSSKerberosCredential::getKerberosTicket); } catch (IOException | UnsupportedCallbackException e) { throw httpSpnego.mechCallbackHandlerFailedForUnknownReason(e).toHttpAuthenticationException(); } if (serviceGssCredential == null) { throw httpSpnego.unableToObtainServerCredential().toHttpAuthenticationException(); } try { gssContext = gssManager.createContext(serviceGssCredential); if (httpSpnego.isTraceEnabled()) { httpSpnego.tracef("Using SpnegoAuthenticationMechanism to authenticate %s using the following mechanisms: [%s]", serviceGssCredential.getName(), Arrays2.objectToString(serviceGssCredential.getMechs())); } } catch (GSSException e) { throw httpSpnego.mechUnableToCreateGssContext(e).toHttpAuthenticationException(); } spnegoContext.gssContext = gssContext; spnegoContext.kerberosTicket = kerberosTicket; } // authentication exchange List<String> authorizationValues = request.getRequestHeaderValues(AUTHORIZATION); String challenge = null; if (authorizationValues != null && authorizationValues.isEmpty() == false) { for (String current : authorizationValues) { if (current.startsWith(CHALLENGE_PREFIX)) { challenge = current.substring(CHALLENGE_PREFIX.length()); break; } } } if (httpSpnego.isTraceEnabled()) { httpSpnego.tracef("Sent HTTP authorizations: [%s]", Arrays2.objectToString(authorizationValues)); } // Do we have an incoming response to a challenge? If so, process it. if (challenge != null) { httpSpnego.trace("Processing incoming response to a challenge..."); // We only need to store the scope if we have a challenge otherwise the next round // trip will be a new response anyway. if (storageScope != null && (storageScope.exists() || storageScope.create())) { httpSpnego.tracef("Caching SPNEGO Context with GSSContext %s and KerberosTicket %s", gssContext, kerberosTicket); storageScope.setAttachment(SPNEGO_CONTEXT_KEY, spnegoContext); } else { storageScope = null; httpSpnego.trace("No usable HttpScope for storage, continuation will not be possible"); } byte[] decodedValue = ByteIterator.ofBytes(challenge.getBytes(UTF_8)).base64Decode().drain(); Subject subject = new Subject(true, Collections.emptySet(), Collections.emptySet(), kerberosTicket != null ? Collections.singleton(kerberosTicket) : Collections.emptySet()); byte[] responseToken; try { final GSSContext finalGssContext = gssContext; responseToken = Subject.doAs(subject, (PrivilegedExceptionAction<byte[]>) () -> finalGssContext.acceptSecContext(decodedValue, 0, decodedValue.length)); } catch (PrivilegedActionException e) { httpSpnego.trace("Call to acceptSecContext failed.", e.getCause()); handleCallback(AuthenticationCompleteCallback.FAILED); clearAttachments(storageScope); request.authenticationFailed(httpSpnego.authenticationFailed()); return; } if (gssContext.isEstablished()) { // no more tokens are needed from the peer final GSSCredential gssCredential; try { gssCredential = gssContext.getCredDelegState() ? gssContext.getDelegCred() : null; } catch (GSSException e) { httpSpnego.trace("Unable to access delegated credential despite being delegated.", e); handleCallback(AuthenticationCompleteCallback.FAILED); clearAttachments(storageScope); request.authenticationFailed(httpSpnego.authenticationFailed()); return; } if (gssCredential != null) { httpSpnego.trace("Associating delegated GSSCredential with identity."); handleCallback(new IdentityCredentialCallback(new GSSKerberosCredential(gssCredential), true)); } else { httpSpnego.trace("No GSSCredential delegated from client."); } httpSpnego.trace("GSSContext established, authorizing..."); identityCache = createIdentityCache(identityCache, storageScope, true); if (authorizeSrcName(gssContext, identityCache)) { httpSpnego.trace("GSSContext established and authorized - authentication complete"); request.authenticationComplete(response -> sendChallenge(responseToken, response, 0)); return; } else { httpSpnego.trace("Authorization of established GSSContext failed"); handleCallback(AuthenticationCompleteCallback.FAILED); clearAttachments(storageScope); request.authenticationFailed(httpSpnego.authenticationFailed()); return; } } else if (responseToken != null && storageScope != null) { httpSpnego.trace("GSSContext establishing - sending negotiation token to the peer"); request.authenticationInProgress(response -> sendChallenge(responseToken, response, UNAUTHORIZED)); return; } else { httpSpnego.trace("GSSContext establishing - unable to hold GSSContext so continuation will not be possible"); handleCallback(AuthenticationCompleteCallback.FAILED); request.authenticationFailed(httpSpnego.authenticationFailed()); return; } } httpSpnego.trace("Request lacks valid authentication credentials"); clearAttachments(storageScope); request.noAuthenticationInProgress(this::sendBareChallenge); } private HttpScope getStorageScope(HttpServerRequest request) throws HttpAuthenticationException { for (Scope scope : storageScopes) { if (scope == null) { return null; } HttpScope httpScope = request.getScope(scope); if (httpScope != null && httpScope.supportsAttachments()) { if (httpSpnego.isTraceEnabled()) { httpSpnego.tracef("Using HttpScope '%s' with ID '%s'", scope.name(), httpScope.getID()); } return httpScope; } else { if (httpSpnego.isTraceEnabled()) { httpSpnego.tracef(httpScope == null ? "HttpScope %s not supported" : "HttpScope %s does not support attachments", scope); } } } throw httpSpnego.unableToIdentifyHttpScope(); } private IdentityCache createIdentityCache(final IdentityCache existingCache, final HttpScope httpScope, boolean forUpdate) { if (existingCache != null || // If we have a cache continue to use it. httpScope == null || // If we don't have a scope we can't create a cache (existing cache is null so return it) !httpScope.supportsAttachments() || // It is not null but if it doesn't support attachments pointless to wrap in a cache (!httpScope.exists() && (!forUpdate || !httpScope.create())) // Doesn't exist and if update is requested can't be created ) { return existingCache; } return new IdentityCache() { @Override public CachedIdentity remove() { CachedIdentity cachedIdentity = get(); httpScope.setAttachment(CACHED_IDENTITY_KEY, null); return cachedIdentity; } @Override public void put(SecurityIdentity identity) { httpScope.setAttachment(CACHED_IDENTITY_KEY, new CachedIdentity(SPNEGO_NAME, identity)); } @Override public CachedIdentity get() { return httpScope.getAttachment(CACHED_IDENTITY_KEY, CachedIdentity.class); } }; } private static void clearAttachments(HttpScope scope) { if (scope != null) { scope.setAttachment(SPNEGO_CONTEXT_KEY, null); // clear cache } } private void sendBareChallenge(HttpServerResponse response) { response.addResponseHeader(WWW_AUTHENTICATE, NEGOTIATE); response.setStatusCode(UNAUTHORIZED); } private void sendChallenge(byte[] responseToken, HttpServerResponse response, int statusCode) { if (httpSpnego.isTraceEnabled()) { httpSpnego.tracef("Sending intermediate challenge: %s", Arrays2.objectToString(responseToken)); } if (responseToken == null) { response.addResponseHeader(WWW_AUTHENTICATE, NEGOTIATE); } else { String responseConverted = ByteIterator.ofBytes(responseToken).base64Encode().drainToString(); response.addResponseHeader(WWW_AUTHENTICATE, CHALLENGE_PREFIX + responseConverted); } if (statusCode != 0) { response.setStatusCode(statusCode); } } private boolean attemptReAuthentication(IdentityCache identityCache, HttpServerRequest request) throws HttpAuthenticationException { CachedIdentityAuthorizeCallback authorizeCallback = new CachedIdentityAuthorizeCallback(identityCache); try { callbackHandler.handle(new Callback[] { authorizeCallback }); } catch (IOException | UnsupportedCallbackException e) { throw new HttpAuthenticationException(e); } if (authorizeCallback.isAuthorized()) { try { handleCallback(AuthenticationCompleteCallback.SUCCEEDED); } catch (IOException e) { throw new HttpAuthenticationException(e); } request.authenticationComplete(null, identityCache::remove); return true; } return false; } private boolean authorizeSrcName(GSSContext gssContext, IdentityCache identityCache) throws HttpAuthenticationException { final GSSName srcName; try { srcName = gssContext.getSrcName(); if (srcName == null) { httpSpnego.trace("Authorization failed - srcName of GSSContext (name of initiator) is null - wrong realm or kdc?"); return false; } } catch (GSSException e) { httpSpnego.trace("Unable to obtain srcName from established GSSContext.", e); return false; } final BooleanSupplier authorizedFunction; final Callback authorizeCallBack; if (gssContext.getCredDelegState()) { try { GSSCredential credential = gssContext.getDelegCred(); httpSpnego.tracef("Credential delegation enabled, delegated credential = %s", credential); MechanismUtil.handleCallbacks(httpSpnego, callbackHandler, new IdentityCredentialCallback(new GSSKerberosCredential(credential), true)); } catch (UnsupportedCallbackException ignored) { // ignored } catch (AuthenticationMechanismException e) { throw e.toHttpAuthenticationException(); } catch (GSSException e) { throw new HttpAuthenticationException(e); } } else { httpSpnego.trace("Credential delegation not enabled"); } boolean authorized = false; try { String clientName = srcName.toString(); if (identityCache != null) { CachedIdentityAuthorizeCallback cacheCallback = new CachedIdentityAuthorizeCallback(new NamePrincipal(clientName), identityCache, true); authorizedFunction = cacheCallback::isAuthorized; authorizeCallBack = cacheCallback; } else { AuthorizeCallback plainCallback = new AuthorizeCallback(clientName, clientName); authorizedFunction = plainCallback::isAuthorized; authorizeCallBack = plainCallback; } callbackHandler.handle(new Callback[] { authorizeCallBack }); authorized = authorizedFunction.getAsBoolean(); httpSpnego.tracef("Authorized by callback handler = %b clientName = [%s]", authorized, clientName); } catch (IOException e) { httpSpnego.trace("IOException during AuthorizeCallback handling", e); throw httpSpnego.mechServerSideAuthenticationFailed(e).toHttpAuthenticationException(); } catch (UnsupportedCallbackException ignored) { } if (authorized) { // If we fail the caller may still decide to try and continue authentication. handleCallback(AuthenticationCompleteCallback.SUCCEEDED); } return authorized; } private void handleCallback(Callback callback) throws HttpAuthenticationException { try { MechanismUtil.handleCallbacks(httpSpnego, callbackHandler, callback); } catch (AuthenticationMechanismException e) { throw e.toHttpAuthenticationException(); } catch (UnsupportedCallbackException ignored) { } } private static class SpnegoContext implements Serializable { transient GSSContext gssContext; transient KerberosTicket kerberosTicket; } }
[ELY-1521] Coverity, SpnegoContext is Serializable; consider declaring a serialVersionUID
src/main/java/org/wildfly/security/http/impl/SpnegoAuthenticationMechanism.java
[ELY-1521] Coverity, SpnegoContext is Serializable; consider declaring a serialVersionUID
Java
apache-2.0
2ff7186978df4b54f6f4c7d1af4410176a33f824
0
xzel23/meja,xzel23/meja
/* * Copyright 2015 Axel Howind ([email protected]). * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.dua3.meja.ui.swing; import java.awt.geom.AffineTransform; import javax.swing.JTextPane; import javax.swing.text.AbstractDocument; import javax.swing.text.AttributeSet; import javax.swing.text.BoxView; import javax.swing.text.ComponentView; import javax.swing.text.Element; import javax.swing.text.IconView; import javax.swing.text.LabelView; import javax.swing.text.ParagraphView; import javax.swing.text.SimpleAttributeSet; import javax.swing.text.StyleConstants; import javax.swing.text.StyledEditorKit; import javax.swing.text.View; import javax.swing.text.ViewFactory; import com.dua3.meja.model.Cell; import com.dua3.meja.model.CellStyle; import com.dua3.meja.model.CellType; import com.dua3.meja.model.Font; import com.dua3.meja.model.HAlign; import com.dua3.meja.model.VAlign; import com.dua3.meja.text.RichText; /** * * @author Axel Howind ([email protected]) */ public class CellEditorPane extends JTextPane { /** * A subclass of BoxView that enables alignment options. */ class AlignedBoxView extends BoxView { AlignedBoxView(Element elem, int axis) { super(elem, axis); } @Override protected void layoutMajorAxis(int targetSpan, int axis, int[] offsets, int[] spans) { super.layoutMajorAxis(targetSpan, axis, offsets, spans); int textBlockHeight = 0; for (int i = 0; i < spans.length; i++) { textBlockHeight += spans[i]; } final int available = targetSpan - textBlockHeight; float offset; float increase; switch (vAlign) { case ALIGN_TOP: offset = 0; increase = 0; break; case ALIGN_BOTTOM: offset = available; increase = 0; break; case ALIGN_MIDDLE: offset = (float) available / 2; increase = 0; break; case ALIGN_JUSTIFY: offset = (float) available / spans.length; increase = offset; break; default: throw new IllegalStateException(); } for (int i = 0; i < offsets.length; i++) { offsets[i] += Math.round(offset + i * increase); } } } /** * A custom EditorKit to allow vertical alignment of text. */ class CellEditorKit extends StyledEditorKit { private static final long serialVersionUID = 1L; @Override public ViewFactory getViewFactory() { return new CellEditorViewFactory(); } } /** * A ViewFactory for the custom EditorKit. */ class CellEditorViewFactory implements ViewFactory { @Override public View create(Element elem) { String kind = elem.getName(); if (kind != null) { switch (kind) { case AbstractDocument.ContentElementName: return new LabelView(elem); case AbstractDocument.ParagraphElementName: return new ParagraphView(elem); case AbstractDocument.SectionElementName: return new AlignedBoxView(elem, View.Y_AXIS); case StyleConstants.ComponentElementName: return new ComponentView(elem); case StyleConstants.IconElementName: return new IconView(elem); } } return new LabelView(elem); } } private static final long serialVersionUID = 1L; /** * Translate {@code HALign.ALIGN_AUTOMATIC} to the actual value for the cell * type. * * @param hAlign * the horizontal alignment * @param type * the cell type * @return * <ul> * <li>{@code hAlign}, if * {@code hAlign!=HAlign.ALIGN_AUTOMATIC}</li> * <li>otherwise the horizontal alignment to apply to cells of the * given type</li> * </ul> */ public static HAlign getHAlign(HAlign hAlign, CellType type) { if (hAlign != HAlign.ALIGN_AUTOMATIC) { return hAlign; } switch (type) { case BLANK: case BOOLEAN: case ERROR: case NUMERIC: case DATE: return HAlign.ALIGN_RIGHT; case TEXT: case FORMULA: return HAlign.ALIGN_LEFT; default: throw new IllegalStateException(); } } private VAlign vAlign = VAlign.ALIGN_TOP; public CellEditorPane() { setEditorKit(new CellEditorKit()); } public SimpleAttributeSet getCellAttributes(final CellStyle cellStyle, Cell cell) throws IllegalStateException { SimpleAttributeSet dfltAttr = new SimpleAttributeSet(); switch (getHAlign(cellStyle.getHAlign(), cell.getResultType())) { case ALIGN_LEFT: StyleConstants.setAlignment(dfltAttr, StyleConstants.ALIGN_LEFT); break; case ALIGN_CENTER: StyleConstants.setAlignment(dfltAttr, StyleConstants.ALIGN_CENTER); break; case ALIGN_RIGHT: StyleConstants.setAlignment(dfltAttr, StyleConstants.ALIGN_RIGHT); break; case ALIGN_JUSTIFY: StyleConstants.setAlignment(dfltAttr, StyleConstants.ALIGN_JUSTIFIED); break; case ALIGN_AUTOMATIC: // ALIGN_AUTOMATIC should already be resolved default: throw new IllegalStateException(); } // these must be set despite already setting the component font StyleConstants.setUnderline(dfltAttr, cellStyle.getFont().isUnderlined()); StyleConstants.setStrikeThrough(dfltAttr, cellStyle.getFont().isStrikeThrough()); return dfltAttr; } /** * Set the editor content to the content of given cell. * * @param cell * the cell to display * @param scale * the scale to apply * @param eval * set to true to display formula results instead of the formula * itself */ public void setContent(Cell cell, double scale, boolean eval) { CellStyle cellStyle = cell.getCellStyle(); Font font = cellStyle.getFont(); final java.awt.Font awtFont = font.toAwtFont().deriveFont((float)scale*font.getSizeInPoints()); setFont(awtFont); setBackground(MejaSwingHelper.toAwtColor(cellStyle.getFillBgColor())); setForeground(MejaSwingHelper.toAwtColor(font.getColor())); final RichText text; if (!eval && cell.getCellType() == CellType.FORMULA) { text = RichText.valueOf("=" + cell.getFormula()); } else { text = cell.getAsText(); } AttributeSet dfltAttr = getCellAttributes(cellStyle, cell); setDocument(StyledDocumentBuilder.toStyledDocument(text, dfltAttr, scale)); this.vAlign = cellStyle.getVAlign(); revalidate(); repaint(); } }
meja-ui-swing/src/main/java/com/dua3/meja/ui/swing/CellEditorPane.java
/* * Copyright 2015 Axel Howind ([email protected]). * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.dua3.meja.ui.swing; import java.awt.geom.AffineTransform; import javax.swing.JTextPane; import javax.swing.text.AbstractDocument; import javax.swing.text.AttributeSet; import javax.swing.text.BoxView; import javax.swing.text.ComponentView; import javax.swing.text.Element; import javax.swing.text.IconView; import javax.swing.text.LabelView; import javax.swing.text.ParagraphView; import javax.swing.text.SimpleAttributeSet; import javax.swing.text.StyleConstants; import javax.swing.text.StyledEditorKit; import javax.swing.text.View; import javax.swing.text.ViewFactory; import com.dua3.meja.model.Cell; import com.dua3.meja.model.CellStyle; import com.dua3.meja.model.CellType; import com.dua3.meja.model.Font; import com.dua3.meja.model.HAlign; import com.dua3.meja.model.VAlign; import com.dua3.meja.text.RichText; /** * * @author Axel Howind ([email protected]) */ public class CellEditorPane extends JTextPane { /** * A subclass of BoxView that enables alignment options. */ class AlignedBoxView extends BoxView { AlignedBoxView(Element elem, int axis) { super(elem, axis); } @Override protected void layoutMajorAxis(int targetSpan, int axis, int[] offsets, int[] spans) { super.layoutMajorAxis(targetSpan, axis, offsets, spans); int textBlockHeight = 0; for (int i = 0; i < spans.length; i++) { textBlockHeight += spans[i]; } final int available = targetSpan - textBlockHeight; float offset; float increase; switch (vAlign) { case ALIGN_TOP: offset = 0; increase = 0; break; case ALIGN_BOTTOM: offset = available; increase = 0; break; case ALIGN_MIDDLE: offset = (float) available / 2; increase = 0; break; case ALIGN_JUSTIFY: offset = (float) available / spans.length; increase = offset; break; default: throw new IllegalStateException(); } for (int i = 0; i < offsets.length; i++) { offsets[i] += Math.round(offset + i * increase); } } } /** * A custom EditorKit to allow vertical alignment of text. */ class CellEditorKit extends StyledEditorKit { private static final long serialVersionUID = 1L; @Override public ViewFactory getViewFactory() { return new CellEditorViewFactory(); } } /** * A ViewFactory for the custom EditorKit. */ class CellEditorViewFactory implements ViewFactory { @Override public View create(Element elem) { String kind = elem.getName(); if (kind != null) { switch (kind) { case AbstractDocument.ContentElementName: return new LabelView(elem); case AbstractDocument.ParagraphElementName: return new ParagraphView(elem); case AbstractDocument.SectionElementName: return new AlignedBoxView(elem, View.Y_AXIS); case StyleConstants.ComponentElementName: return new ComponentView(elem); case StyleConstants.IconElementName: return new IconView(elem); } } return new LabelView(elem); } } private static final long serialVersionUID = 1L; /** * Translate {@code HALign.ALIGN_AUTOMATIC} to the actual value for the cell * type. * * @param hAlign * the horizontal alignment * @param type * the cell type * @return * <ul> * <li>{@code hAlign}, if * {@code hAlign!=HAlign.ALIGN_AUTOMATIC}</li> * <li>otherwise the horizontal alignment to apply to cells of the * given type</li> * </ul> */ public static HAlign getHAlign(HAlign hAlign, CellType type) { if (hAlign != HAlign.ALIGN_AUTOMATIC) { return hAlign; } switch (type) { case BLANK: case BOOLEAN: case ERROR: case NUMERIC: case DATE: return HAlign.ALIGN_RIGHT; case TEXT: case FORMULA: return HAlign.ALIGN_LEFT; default: throw new IllegalStateException(); } } private VAlign vAlign = VAlign.ALIGN_TOP; public CellEditorPane() { setEditorKit(new CellEditorKit()); } public SimpleAttributeSet getCellAttributes(final CellStyle cellStyle, Cell cell) throws IllegalStateException { SimpleAttributeSet dfltAttr = new SimpleAttributeSet(); switch (getHAlign(cellStyle.getHAlign(), cell.getResultType())) { case ALIGN_LEFT: StyleConstants.setAlignment(dfltAttr, StyleConstants.ALIGN_LEFT); break; case ALIGN_CENTER: StyleConstants.setAlignment(dfltAttr, StyleConstants.ALIGN_CENTER); break; case ALIGN_RIGHT: StyleConstants.setAlignment(dfltAttr, StyleConstants.ALIGN_RIGHT); break; case ALIGN_JUSTIFY: StyleConstants.setAlignment(dfltAttr, StyleConstants.ALIGN_JUSTIFIED); break; case ALIGN_AUTOMATIC: // ALIGN_AUTOMATIC should already be resolved default: throw new IllegalStateException(); } // these must be set despite already setting the component font StyleConstants.setUnderline(dfltAttr, cellStyle.getFont().isUnderlined()); StyleConstants.setStrikeThrough(dfltAttr, cellStyle.getFont().isStrikeThrough()); return dfltAttr; } /** * Set the editor content to the content of given cell. * * @param cell * the cell to display * @param scale * the scale to apply * @param eval * set to true to display formula results instead of the formula * itself */ public void setContent(Cell cell, double scale, boolean eval) { CellStyle cellStyle = cell.getCellStyle(); Font font = cellStyle.getFont(); AffineTransform transform = AffineTransform.getScaleInstance(scale, scale); final java.awt.Font awtFont = font.toAwtFont().deriveFont(transform); setFont(awtFont); setBackground(MejaSwingHelper.toAwtColor(cellStyle.getFillBgColor())); setForeground(MejaSwingHelper.toAwtColor(font.getColor())); final RichText text; if (!eval && cell.getCellType() == CellType.FORMULA) { text = RichText.valueOf("=" + cell.getFormula()); } else { text = cell.getAsText(); } AttributeSet dfltAttr = getCellAttributes(cellStyle, cell); setDocument(StyledDocumentBuilder.toStyledDocument(text, dfltAttr, scale)); this.vAlign = cellStyle.getVAlign(); revalidate(); repaint(); } }
fix zoomed text display on linux AffineTransform does not work as expected
meja-ui-swing/src/main/java/com/dua3/meja/ui/swing/CellEditorPane.java
fix zoomed text display
Java
apache-2.0
f4500dc5669b2cff2ab75359c0efc25270358bd2
0
zlikun-learning/redis
package com.zlikun.learning.commands; import java.util.UUID; import java.util.stream.Stream; import org.junit.Assert; import org.junit.Test; import com.zlikun.learning.TestBase; import lombok.extern.slf4j.Slf4j; /** * 键操作命令 * http://www.redis.net.cn/order/ * @author zlikun * @date 2017年9月15日 下午5:44:49 */ @Slf4j public class KeyTest extends TestBase { @Test public void data() { // 准备测试数据 jedis.set("key1" ,"A") ; jedis.set("key2" ,"B") ; jedis.set("key3" ,"C") ; } @Test public void del() { // 删除键 log.info("计删除{}个键" ,jedis.del("key1" ,"key2" ,"key3")); } @Test public void dump() { // 序列化给定的Key,返回序列化的值 Stream.of(jedis.dump("key1")).forEach(System.out::print); } @Test public void exists() { Assert.assertTrue(jedis.exists("key1")); } @Test public void expire() { final String key = UUID.randomUUID().toString() ; // 未设置过期时间(永久缓存) jedis.set(key, "A") ; Assert.assertEquals(Long.valueOf(-1), jedis.ttl(key)); // 指定过期时间,相对于当前时间 jedis.expire(key, 30) ; Assert.assertTrue(jedis.ttl(key) > 0); Assert.assertTrue(jedis.ttl(key) <= 30); // 通过设定一个过去的时间点,使用缓存过期 jedis.expireAt(key, 0) ; Assert.assertEquals(Long.valueOf(-2), jedis.ttl(key)); } @Test public void ttl() { final String key = UUID.randomUUID().toString() ; // 永久缓存,剩余生存时间值为:-1 jedis.set(key, "A") ; Assert.assertEquals(Long.valueOf(-1), jedis.ttl(key)); // 指定过期时间为当前剩余秒(毫秒)数 jedis.setex(key, 30, "A") ; Assert.assertTrue(jedis.ttl(key) > 0); // TTL,单位:秒 Assert.assertTrue(jedis.ttl(key) <= 30); // PTTL,单位:毫秒 Assert.assertTrue(jedis.pttl(key) <= 30 * 1000); // 不存在的键剩余生存时间值为:-2 jedis.del(key) ; Assert.assertEquals(Long.valueOf(-2), jedis.ttl(key)); } @Test public void rename() { final String key = UUID.randomUUID().toString() ; jedis.setex(key, 30, "A") ; jedis.rename(key, "newkey") ; Assert.assertFalse(jedis.exists(key)); Assert.assertEquals(Long.valueOf(30), jedis.ttl("newkey")); } @Test public void move() { // 选择索引为2的DB jedis.select(2) ; final String key = UUID.randomUUID().toString() ; jedis.setex(key, 30 ,"A") ; // 切换到索引为0的DB,数据不存在 jedis.select(0) ; Assert.assertFalse(jedis.exists(key)); // 将其移动到0 jedis.select(2) ; jedis.move(key, 0) ; jedis.select(0) ; Assert.assertTrue(jedis.exists(key)); } }
redis-commands/src/test/java/com/zlikun/learning/commands/KeyTest.java
package com.zlikun.learning.commands; import java.util.stream.Stream; import org.junit.Assert; import org.junit.Test; import com.zlikun.learning.TestBase; import lombok.extern.slf4j.Slf4j; /** * 键操作命令 * http://www.redis.net.cn/order/ * @author zlikun * @date 2017年9月15日 下午5:44:49 */ @Slf4j public class KeyTest extends TestBase { @Test public void data() { // 准备测试数据 jedis.set("key1" ,"A") ; jedis.set("key2" ,"B") ; jedis.set("key3" ,"C") ; } @Test public void del() { // 删除键 log.info("计删除{}个键" ,jedis.del("key1" ,"key2" ,"key3")); } @Test public void dump() { // 序列化给定的Key,返回序列化的值 Stream.of(jedis.dump("key1")).forEach(System.out::print); } @Test public void exists() { Assert.assertTrue(jedis.exists("key1")); } }
常用键操作命令
redis-commands/src/test/java/com/zlikun/learning/commands/KeyTest.java
常用键操作命令
Java
apache-2.0
4a03a27b8daf40c8c585ba286b3a7851ad135cc3
0
motown-io/motown,pqtoan/motown,quann169/MotownBlueCurrent
vas/view-model/src/main/java/io/motown/vas/viewmodel/model/CdrChargePointType.java
/** * Copyright (C) 2013 Motown.IO ([email protected]) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.motown.vas.viewmodel.model; /** * Charge Point Type as per CDR spec */ public enum CdrChargePointType { /** * Unspecified */ UNSPECIFIED(0), /** * AC < 3,7kW */ AC_LT_37KW(1), /** * AC 3,7kW */ AC_37KW(2), /** * AC 11kW */ AC_11KW(3), /** * AC 22kW */ AC_22KW(4), /** * DC 50kW */ DC_50KW(5); private final Integer value; CdrChargePointType(Integer v) { value = v; } public Integer value() { return value; } public static CdrChargePointType fromValue(Integer v) { for (CdrChargePointType c : values()) { if (c.value.equals(v)) { return c; } } throw new IllegalArgumentException(v.toString()); } }
Removed CdrChargePointType as it's not used.
vas/view-model/src/main/java/io/motown/vas/viewmodel/model/CdrChargePointType.java
Removed CdrChargePointType as it's not used.
Java
apache-2.0
c16adbd4effa6478a9f5b9459a9cc5e6bbc55ec3
0
jcosmo/scoutmgr,jcosmo/scoutmgr,jcosmo/scoutmgr
package scoutmgr.client.application.scout; import com.google.gwt.event.shared.GwtEvent; import com.google.web.bindery.event.shared.EventBus; import com.gwtplatform.mvp.client.HasUiHandlers; import com.gwtplatform.mvp.client.Presenter; import com.gwtplatform.mvp.client.annotations.ContentSlot; import com.gwtplatform.mvp.client.annotations.NameToken; import com.gwtplatform.mvp.client.annotations.ProxyStandard; import com.gwtplatform.mvp.client.proxy.PlaceManager; import com.gwtplatform.mvp.client.proxy.ProxyPlace; import com.gwtplatform.mvp.client.proxy.RevealContentHandler; import com.gwtplatform.mvp.shared.proxy.PlaceRequest; import javax.inject.Inject; import org.realityforge.replicant.client.EntityRepository; import scoutmgr.client.application.ApplicationPresenter; import scoutmgr.client.application.scout.badgework.BadgeworkPresenter; import scoutmgr.client.entity.Person; import scoutmgr.client.ioc.FrontendContext; import scoutmgr.client.net.ScoutmgrDataLoaderService; import scoutmgr.client.place.NameTokens; import scoutmgr.client.service.PersonnelService; import scoutmgr.client.view.model.ScoutViewModel; public class ScoutPresenter extends Presenter<ScoutPresenter.View, ScoutPresenter.Proxy> implements ScoutUiHandlers { private Integer _scoutID; @Inject private BadgeworkPresenter _badgeworkPresenter; @Inject private PlaceManager _placeManager; @Inject private ScoutmgrDataLoaderService _dataloader; @Inject private EntityRepository _entityRepository; @Inject private PersonnelService _personnelService; @Inject private FrontendContext _frontendContext; @ContentSlot public static final GwtEvent.Type<RevealContentHandler<?>> SLOT_BADGEWORK = new GwtEvent.Type<>(); @ProxyStandard @NameToken( { NameTokens.SCOUT } ) interface Proxy extends ProxyPlace<ScoutPresenter> { } interface View extends com.gwtplatform.mvp.client.View, HasUiHandlers<ScoutUiHandlers> { void setScout( ScoutViewModel viewModel ); void showLoadingMessage(); } @Inject ScoutPresenter( final EventBus eventBus, final View view, final Proxy proxy, final ScoutmgrDataLoaderService dataLoader ) { super( eventBus, view, proxy, ApplicationPresenter.SLOT_MAIN_CONTENT ); _dataloader = dataLoader; getView().setUiHandlers( this ); } protected void onBind() { super.onBind(); setInSlot( SLOT_BADGEWORK, _badgeworkPresenter ); } @Override protected void onReveal() { getView().showLoadingMessage(); super.onReveal(); } @Override protected void onHide() { super.onHide(); unsubscribeFromScout(); } @Override public void prepareFromRequest( final PlaceRequest request ) { super.prepareFromRequest( request ); final String idStr = request.getParameter( "id", null ); if ( null != idStr ) { unsubscribeFromScout(); _scoutID = Integer.valueOf( idStr ); getView().showLoadingMessage(); _dataloader.getSession().subscribeToPerson( _scoutID, () -> configureForScout( _scoutID ) ); } else { _placeManager.revealErrorPlace( "Invalid URL" ); } } private void unsubscribeFromScout() { if ( null != _scoutID ) { if ( !_scoutID.equals( _frontendContext.getLoggedInUserID() ) ) { _dataloader.getSession().unsubscribeFromPerson( _scoutID, null ); } _badgeworkPresenter.configureForScout( null ); _scoutID = null; } } private void configureForScout( final Integer id ) { final Person person = _entityRepository.findByID( Person.class, id ); if ( null != person ) { final ScoutViewModel viewModel = new ScoutViewModel( person ); getView().setScout( viewModel ); _badgeworkPresenter.configureForScout( person ); } else { _badgeworkPresenter.configureForScout( null ); getView().showLoadingMessage(); } } }
user-experience/src/main/java/scoutmgr/client/application/scout/ScoutPresenter.java
package scoutmgr.client.application.scout; import com.google.gwt.event.shared.GwtEvent; import com.google.web.bindery.event.shared.EventBus; import com.gwtplatform.mvp.client.HasUiHandlers; import com.gwtplatform.mvp.client.Presenter; import com.gwtplatform.mvp.client.annotations.ContentSlot; import com.gwtplatform.mvp.client.annotations.NameToken; import com.gwtplatform.mvp.client.annotations.ProxyStandard; import com.gwtplatform.mvp.client.proxy.PlaceManager; import com.gwtplatform.mvp.client.proxy.ProxyPlace; import com.gwtplatform.mvp.client.proxy.RevealContentHandler; import com.gwtplatform.mvp.shared.proxy.PlaceRequest; import javax.inject.Inject; import org.realityforge.replicant.client.EntityRepository; import scoutmgr.client.application.ApplicationPresenter; import scoutmgr.client.application.scout.badgework.BadgeworkPresenter; import scoutmgr.client.entity.Person; import scoutmgr.client.net.ScoutmgrDataLoaderService; import scoutmgr.client.place.NameTokens; import scoutmgr.client.service.PersonnelService; import scoutmgr.client.view.model.ScoutViewModel; public class ScoutPresenter extends Presenter<ScoutPresenter.View, ScoutPresenter.Proxy> implements ScoutUiHandlers { private Integer _scoutID; @Inject private BadgeworkPresenter _badgeworkPresenter; @Inject private PlaceManager _placeManager; @Inject private ScoutmgrDataLoaderService _dataloader; @Inject private EntityRepository _entityRepository; @Inject private PersonnelService _personnelService; @ContentSlot public static final GwtEvent.Type<RevealContentHandler<?>> SLOT_BADGEWORK = new GwtEvent.Type<>(); @ProxyStandard @NameToken( { NameTokens.SCOUT } ) interface Proxy extends ProxyPlace<ScoutPresenter> { } interface View extends com.gwtplatform.mvp.client.View, HasUiHandlers<ScoutUiHandlers> { void setScout( ScoutViewModel viewModel ); void showLoadingMessage(); } @Inject ScoutPresenter( final EventBus eventBus, final View view, final Proxy proxy, final ScoutmgrDataLoaderService dataLoader ) { super( eventBus, view, proxy, ApplicationPresenter.SLOT_MAIN_CONTENT ); _dataloader = dataLoader; getView().setUiHandlers( this ); } protected void onBind() { super.onBind(); setInSlot( SLOT_BADGEWORK, _badgeworkPresenter ); } @Override protected void onReveal() { getView().showLoadingMessage(); super.onReveal(); } @Override protected void onHide() { super.onHide(); unsubscribeFromScout(); } @Override public void prepareFromRequest( final PlaceRequest request ) { super.prepareFromRequest( request ); final String idStr = request.getParameter( "id", null ); if ( null != idStr ) { unsubscribeFromScout(); _scoutID = Integer.valueOf( idStr ); getView().showLoadingMessage(); _dataloader.getSession().subscribeToPerson( _scoutID, () -> configureForScout( _scoutID ) ); } else { _placeManager.revealErrorPlace( "Invalid URL" ); } } private void unsubscribeFromScout() { if ( null != _scoutID ) { _dataloader.getSession().unsubscribeFromPerson( _scoutID, null ); _badgeworkPresenter.configureForScout( null ); _scoutID = null; } } private void configureForScout( final Integer id ) { final Person person = _entityRepository.findByID( Person.class, id ); if ( null != person ) { final ScoutViewModel viewModel = new ScoutViewModel( person ); getView().setScout( viewModel ); _badgeworkPresenter.configureForScout( person ); } else { _badgeworkPresenter.configureForScout( null ); getView().showLoadingMessage(); } } }
Dont unsubscribe from the currently logged in user
user-experience/src/main/java/scoutmgr/client/application/scout/ScoutPresenter.java
Dont unsubscribe from the currently logged in user
Java
bsd-2-clause
c60fdda05311d71f95b2c9bfc92650a0a3c68cef
0
10000TB/galileo,10000TB/galileo
/* Copyright (c) 2013, Colorado State University All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. This software is provided by the copyright holders and contributors "as is" and any express or implied warranties, including, but not limited to, the implied warranties of merchantability and fitness for a particular purpose are disclaimed. In no event shall the copyright holder or contributors be liable for any direct, indirect, incidental, special, exemplary, or consequential damages (including, but not limited to, procurement of substitute goods or services; loss of use, data, or profits; or business interruption) however caused and on any theory of liability, whether in contract, strict liability, or tort (including negligence or otherwise) arising in any way out of the use of this software, even if advised of the possibility of such damage. */ package galileo.stat; /** * Provides an online method for computing mean, variance, and standard * deviation. Based on "Note on a Method for Calculating Corrected Sums of * Squares and Products" by B. P. Welford. * * @author malensek */ public class RunningStatistics { private long n; private double mean; private double M2; /** * Creates a Welford running statistics instance without no observed values. */ public RunningStatistics() { } /** * Creates a copy of a {@link RunningStatistics} instance. */ public RunningStatistics(RunningStatistics that) { copyFrom(that); } /** * Create a new {@link RunningStatistics} instance by combining multiple * existing instances. */ public RunningStatistics(RunningStatistics... others) { if (others.length == 0) { return; } else if (others.length == 1) { copyFrom(others[0]); return; } /* Calculate new n */ for (RunningStatistics rs : others) { merge(rs); } } /** * Copies statistics from another RunningStatistics instance. */ private void copyFrom(RunningStatistics that) { this.n = that.n; this.mean = that.mean; this.M2 = that.M2; } public void merge(RunningStatistics that) { long newN = n + that.n; double delta = this.mean - that.mean; mean = (this.n * this.mean + that.n * that.mean) / newN; M2 = M2 + that.M2 + delta * delta * this.n * that.n / newN; n = newN; } /** * Creates a Welford running statistics instance based on a number of * samples. */ public RunningStatistics(double... samples ) { for (double sample : samples) { put(sample); } } /** * Add a number of new samples to the running statistics. */ public void put(double... samples) { for (double sample : samples) { put(sample); } } /** * Add a new sample to the running statistics. */ public void put(double sample) { n++; double delta = sample - mean; mean = mean + delta / n; M2 = M2 + delta * (sample - mean); } /** * Calculates the current running mean for the values observed thus far. * * @return mean of all the samples observed thus far. */ public double mean() { return mean; } /** * Calculates the running sample variance. * * @return sample variance */ public double var() { return var(1.0); } /** * Calculates the population variance. * * @return population variance */ public double popVar() { return var(0.0); } /** * Calculates the running variance, given a bias adjustment. * * @param ddof delta degrees-of-freedom to use in the calculation. Use 1.0 * for the sample variance. * * @return variance */ public double var(double ddof) { return M2 / (n - ddof); } /** * Calculates the standard deviation of the samples observed thus far. * * @return population standard deviation */ public double std() { return Math.sqrt(var()); } /** * Calculates the standard deviation of the values observed thus far, given * a bias adjustment. * * @param ddof delta degrees-of-freedom to use in the calculation. * * @return standard deviation */ public double std(double ddof) { return Math.sqrt(var(ddof)); } public double prob(double sample) { double norm = 1 / Math.sqrt(2 * Math.PI * this.var()); return norm * Math.exp((- Math.pow(sample - this.mean, 2)) / (2 * this.var())); } /** * Retrieves the number of samples submitted to the RunningStatistics * instance so far. * * @return number of samples */ public long numSamples() { return n; } @Override public String toString() { String str = ""; str += "Number of Samples: " + n + System.lineSeparator(); str += "Mean: " + mean + System.lineSeparator(); str += "Variance: " + var() + System.lineSeparator(); str += "Std Dev: " + std(); return str; } }
src/galileo/stat/RunningStatistics.java
/* Copyright (c) 2013, Colorado State University All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. This software is provided by the copyright holders and contributors "as is" and any express or implied warranties, including, but not limited to, the implied warranties of merchantability and fitness for a particular purpose are disclaimed. In no event shall the copyright holder or contributors be liable for any direct, indirect, incidental, special, exemplary, or consequential damages (including, but not limited to, procurement of substitute goods or services; loss of use, data, or profits; or business interruption) however caused and on any theory of liability, whether in contract, strict liability, or tort (including negligence or otherwise) arising in any way out of the use of this software, even if advised of the possibility of such damage. */ package galileo.stat; /** * Provides an online method for computing mean, variance, and standard * deviation. Based on "Note on a Method for Calculating Corrected Sums of * Squares and Products" by B. P. Welford. * * @author malensek */ public class RunningStatistics { private long n; private double mean; private double M2; /** * Creates a Welford running statistics instance without no observed values. */ public RunningStatistics() { } /** * Creates a copy of a {@link RunningStatistics} instance. */ public RunningStatistics(RunningStatistics that) { copyFrom(that); } /** * Create a new {@link RunningStatistics} instance by combining multiple * existing instances. */ public RunningStatistics(RunningStatistics... others) { if (others.length == 0) { return; } else if (others.length == 1) { copyFrom(others[0]); return; } /* Calculate new n */ for (RunningStatistics rs : others) { merge(rs); } } /** * Copies statistics from another RunningStatistics instance. */ private void copyFrom(RunningStatistics that) { this.n = that.n; this.mean = that.mean; this.M2 = that.M2; } public void merge(RunningStatistics that) { long newN = n + that.n; double delta = this.mean - that.mean; mean = (this.n * this.mean + that.n * that.mean) / newN; M2 = M2 + that.M2 + delta * delta * this.n * that.n / newN; n = newN; } /** * Creates a Welford running statistics instance based on a number of * samples. */ public RunningStatistics(double... samples ) { for (double sample : samples) { put(sample); } } /** * Add a number of new samples to the running statistics. */ public void put(double... samples) { for (double sample : samples) { put(sample); } } /** * Add a new sample to the running statistics. */ public void put(double sample) { n++; double delta = sample - mean; mean = mean + delta / n; M2 = M2 + delta * (sample - mean); } /** * Calculates the current running mean for the values observed thus far. * * @return mean of all the samples observed thus far. */ public double mean() { return mean; } /** * Calculates the running sample variance. * * @return sample variance */ public double var() { return var(1.0); } /** * Calculates the population variance. * * @return population variance */ public double popVar() { return var(0.0); } /** * Calculates the running variance, given a bias adjustment. * * @param ddof delta degrees-of-freedom to use in the calculation. Use 1.0 * for the sample variance. * * @return variance */ public double var(double ddof) { return M2 / (n - ddof); } /** * Calculates the standard deviation of the samples observed thus far. * * @return population standard deviation */ public double std() { return Math.sqrt(var()); } /** * Calculates the standard deviation of the values observed thus far, given * a bias adjustment. * * @param ddof delta degrees-of-freedom to use in the calculation. * * @return standard deviation */ public double std(double ddof) { return Math.sqrt(var(ddof)); } /** * Retrieves the number of samples submitted to the RunningStatistics * instance so far. * * @return number of samples */ public long numSamples() { return n; } @Override public String toString() { String str = ""; str += "Number of Samples: " + n + System.lineSeparator(); str += "Mean: " + mean + System.lineSeparator(); str += "Variance: " + var() + System.lineSeparator(); str += "Std Dev: " + std(); return str; } }
Add normal distribution probability function For use in Naive Bayes classification
src/galileo/stat/RunningStatistics.java
Add normal distribution probability function
Java
bsd-3-clause
62c49bf7f23c9a99da609324a70b1c451ff0f503
0
BlackXnt/Citadel,TealNerd/Citadel,TreeDB/Citadel,psygate/Citadel,jjj5311/Citadel,Civcraft/Citadel,psygate/Citadel
package com.untamedears.citadel.listener; import static com.untamedears.citadel.Utility.createPlayerReinforcement; import static com.untamedears.citadel.Utility.isReinforced; import static com.untamedears.citadel.Utility.reinforcementBroken; import static com.untamedears.citadel.Utility.sendMessage; import org.bukkit.ChatColor; import org.bukkit.GameMode; import org.bukkit.Material; import org.bukkit.block.Block; import org.bukkit.block.BlockFace; import org.bukkit.entity.Player; import org.bukkit.event.Event; import org.bukkit.event.EventHandler; import org.bukkit.event.EventPriority; import org.bukkit.event.Listener; import org.bukkit.event.block.Action; import org.bukkit.event.player.PlayerInteractEvent; import org.bukkit.event.player.PlayerLoginEvent; import org.bukkit.event.player.PlayerQuitEvent; import org.bukkit.event.player.PlayerBucketEmptyEvent; import org.bukkit.material.Openable; import com.untamedears.citadel.Citadel; import com.untamedears.citadel.GroupManager; import com.untamedears.citadel.MemberManager; import com.untamedears.citadel.PersonalGroupManager; import com.untamedears.citadel.PlacementMode; import com.untamedears.citadel.SecurityLevel; import com.untamedears.citadel.access.AccessDelegate; import com.untamedears.citadel.entity.Faction; import com.untamedears.citadel.entity.Member; import com.untamedears.citadel.entity.PlayerState; import com.untamedears.citadel.entity.IReinforcement; import com.untamedears.citadel.entity.PlayerReinforcement; /** * Created by IntelliJ IDEA. * User: chrisrico * Date: 3/21/12 * Time: 9:57 PM * * Last edited by JonnyD * 7/18/12 */ public class PlayerListener implements Listener { @EventHandler public void login(PlayerLoginEvent ple) { MemberManager memberManager = Citadel.getMemberManager(); memberManager.addOnlinePlayer(ple.getPlayer()); String playerName = ple.getPlayer().getName(); Member member = memberManager.getMember(playerName); if(member == null){ member = new Member(playerName); memberManager.addMember(member); } PersonalGroupManager personalGroupManager = Citadel.getPersonalGroupManager(); boolean hasPersonalGroup = personalGroupManager.hasPersonalGroup(playerName); GroupManager groupManager = Citadel.getGroupManager(); if(!hasPersonalGroup){ String groupName = playerName; int i = 1; while(groupManager.isGroup(groupName)){ groupName = playerName + i; i++; } Faction group = new Faction(groupName, playerName); groupManager.addGroup(group); personalGroupManager.addPersonalGroup(groupName, playerName); } else if(hasPersonalGroup){ String personalGroupName = personalGroupManager.getPersonalGroup(playerName).getGroupName(); if(!groupManager.isGroup(personalGroupName)){ Faction group = new Faction(personalGroupName, playerName); groupManager.addGroup(group); } } } @EventHandler public void quit(PlayerQuitEvent pqe) { Player player = pqe.getPlayer(); MemberManager memberManager = Citadel.getMemberManager(); memberManager.removeOnlinePlayer(player); PlayerState.remove(player); } @EventHandler(priority = EventPriority.LOWEST) public void bookshelf(PlayerInteractEvent pie) { if (pie.hasBlock() && pie.getMaterial() == Material.BOOKSHELF) interact(pie); } @EventHandler(ignoreCancelled = true, priority = EventPriority.LOW) public void bucketEmpty(PlayerBucketEmptyEvent pbee) { Material bucket = pbee.getBucket(); if (Material.LAVA_BUCKET == bucket || Material.WATER_BUCKET == bucket) { Block block = pbee.getBlockClicked(); BlockFace face = pbee.getBlockFace(); Block relativeBlock = block.getRelative(face); // Protection for reinforced rails types from direct lava bucket drop. if (Material.RAILS == relativeBlock.getType() || Material.POWERED_RAIL == relativeBlock.getType() || Material.DETECTOR_RAIL == relativeBlock.getType()) { if (isReinforced(relativeBlock)) { pbee.setCancelled(true); } } } } @EventHandler(priority = EventPriority.HIGHEST) public void interact(PlayerInteractEvent pie) { try { if (!pie.hasBlock()) return; Player player = pie.getPlayer(); Block block = pie.getClickedBlock(); AccessDelegate accessDelegate = AccessDelegate.getDelegate(block); block = accessDelegate.getBlock(); IReinforcement generic_reinforcement = accessDelegate.getReinforcement(); PlayerReinforcement reinforcement = null; if (generic_reinforcement instanceof PlayerReinforcement) { reinforcement = (PlayerReinforcement)generic_reinforcement; } Action action = pie.getAction(); boolean access_reinforcement = action == Action.RIGHT_CLICK_BLOCK && reinforcement != null && reinforcement.isSecurable(); boolean normal_access_denied = reinforcement != null && !reinforcement.isAccessible(player); boolean admin_can_access = player.hasPermission("citadel.admin.accesssecurable"); if (access_reinforcement && normal_access_denied && !admin_can_access) { Citadel.info(String.format( "%s failed to access locked reinforcement at %s", player.getName(), block.getLocation().toString())); sendMessage(pie.getPlayer(), ChatColor.RED, "%s is locked", block.getType().name()); pie.setCancelled(true); } if (pie.isCancelled()) return; PlayerState state = PlayerState.get(player); PlacementMode placementMode = state.getMode(); switch (placementMode) { case NORMAL: if (access_reinforcement && normal_access_denied && admin_can_access) { Citadel.info(String.format( "[Admin] %s accessed locked reinforcement at %s", player.getName(), block.getLocation().toString())); } return; case FORTIFICATION: return; case INFO: // did player click on a reinforced block? if (reinforcement != null) { String reinforcementStatus = reinforcement.getStatus(); SecurityLevel securityLevel = reinforcement.getSecurityLevel(); Faction group = reinforcement.getOwner(); String message; if (player.hasPermission("citadel.admin.ctinfodetails")) { message = String.format("Loc[%s] Chunk[%s]", reinforcement.getId().toString(), reinforcement.getChunkId()); sendMessage(player, ChatColor.GREEN, message); String groupName = "!NULL!"; if (group != null) { if (group.isPersonalGroup()) { groupName = String.format("[%s] (Personal)", group.getName()); } else { groupName = String.format("[%s]", group.getName()); } } message = String.format(" Group%s Durability[%d/%d]", groupName, reinforcement.getDurability(), reinforcement.getMaterial().getStrength()); sendMessage(player, ChatColor.GREEN, message); } else if(reinforcement.isAccessible(player)){ boolean is_personal_group = false; String groupName = "!NULL!"; if (group != null) { groupName = group.getName(); is_personal_group = group.isPersonalGroup(); } if(is_personal_group){ message = String.format("%s, security: %s, group: %s (Default Group)", reinforcementStatus, securityLevel, groupName); } else { message = String.format("%s, security: %s, group: %s", reinforcementStatus, securityLevel, groupName); } sendMessage(player, ChatColor.GREEN, message); } else { sendMessage(player, ChatColor.RED, "%s, security: %s", reinforcementStatus, securityLevel); } if (player.getGameMode() == GameMode.CREATIVE) { pie.setCancelled(true); } } break; default: // player is in reinforcement mode if (reinforcement == null) { // Break any natural reinforcement before placing the player reinforcement if (generic_reinforcement != null) { reinforcementBroken(generic_reinforcement); } createPlayerReinforcement(player, block); } else if (reinforcement.isBypassable(player)) { boolean update = false; String message = ""; if (reinforcement.getSecurityLevel() != state.getSecurityLevel()){ reinforcement.setSecurityLevel(state.getSecurityLevel()); update = true; message = String.format("Changed security level %s", reinforcement.getSecurityLevel().name()); } Faction group = state.getFaction(); if(!reinforcement.getOwner().equals(group)) { reinforcement.setOwner(group); update = true; if(!message.equals("")){ message = message + ". "; } if(reinforcement.getSecurityLevel() != SecurityLevel.PRIVATE){ message = message + String.format("Changed group to %s", group.getName()); } } if(update){ Citadel.getReinforcementManager().addReinforcement(reinforcement); sendMessage(player, ChatColor.GREEN, message); } } else { sendMessage(player, ChatColor.RED, "You are not permitted to modify this reinforcement"); } pie.setCancelled(true); if (state.getMode() == PlacementMode.REINFORCEMENT_SINGLE_BLOCK) { state.reset(); } else { state.checkResetMode(); } } } catch(Exception e) { Citadel.printStackTrace(e); } } }
src/com/untamedears/citadel/listener/PlayerListener.java
package com.untamedears.citadel.listener; import static com.untamedears.citadel.Utility.createPlayerReinforcement; import static com.untamedears.citadel.Utility.isReinforced; import static com.untamedears.citadel.Utility.reinforcementBroken; import static com.untamedears.citadel.Utility.sendMessage; import org.bukkit.ChatColor; import org.bukkit.Material; import org.bukkit.block.Block; import org.bukkit.block.BlockFace; import org.bukkit.entity.Player; import org.bukkit.event.Event; import org.bukkit.event.EventHandler; import org.bukkit.event.EventPriority; import org.bukkit.event.Listener; import org.bukkit.event.block.Action; import org.bukkit.event.player.PlayerInteractEvent; import org.bukkit.event.player.PlayerLoginEvent; import org.bukkit.event.player.PlayerQuitEvent; import org.bukkit.event.player.PlayerBucketEmptyEvent; import org.bukkit.material.Openable; import com.untamedears.citadel.Citadel; import com.untamedears.citadel.GroupManager; import com.untamedears.citadel.MemberManager; import com.untamedears.citadel.PersonalGroupManager; import com.untamedears.citadel.PlacementMode; import com.untamedears.citadel.SecurityLevel; import com.untamedears.citadel.access.AccessDelegate; import com.untamedears.citadel.entity.Faction; import com.untamedears.citadel.entity.Member; import com.untamedears.citadel.entity.PlayerState; import com.untamedears.citadel.entity.IReinforcement; import com.untamedears.citadel.entity.PlayerReinforcement; /** * Created by IntelliJ IDEA. * User: chrisrico * Date: 3/21/12 * Time: 9:57 PM * * Last edited by JonnyD * 7/18/12 */ public class PlayerListener implements Listener { @EventHandler public void login(PlayerLoginEvent ple) { MemberManager memberManager = Citadel.getMemberManager(); memberManager.addOnlinePlayer(ple.getPlayer()); String playerName = ple.getPlayer().getName(); Member member = memberManager.getMember(playerName); if(member == null){ member = new Member(playerName); memberManager.addMember(member); } PersonalGroupManager personalGroupManager = Citadel.getPersonalGroupManager(); boolean hasPersonalGroup = personalGroupManager.hasPersonalGroup(playerName); GroupManager groupManager = Citadel.getGroupManager(); if(!hasPersonalGroup){ String groupName = playerName; int i = 1; while(groupManager.isGroup(groupName)){ groupName = playerName + i; i++; } Faction group = new Faction(groupName, playerName); groupManager.addGroup(group); personalGroupManager.addPersonalGroup(groupName, playerName); } else if(hasPersonalGroup){ String personalGroupName = personalGroupManager.getPersonalGroup(playerName).getGroupName(); if(!groupManager.isGroup(personalGroupName)){ Faction group = new Faction(personalGroupName, playerName); groupManager.addGroup(group); } } } @EventHandler public void quit(PlayerQuitEvent pqe) { Player player = pqe.getPlayer(); MemberManager memberManager = Citadel.getMemberManager(); memberManager.removeOnlinePlayer(player); PlayerState.remove(player); } @EventHandler(priority = EventPriority.LOWEST) public void bookshelf(PlayerInteractEvent pie) { if (pie.hasBlock() && pie.getMaterial() == Material.BOOKSHELF) interact(pie); } @EventHandler(ignoreCancelled = true, priority = EventPriority.LOW) public void bucketEmpty(PlayerBucketEmptyEvent pbee) { Material bucket = pbee.getBucket(); if (Material.LAVA_BUCKET == bucket || Material.WATER_BUCKET == bucket) { Block block = pbee.getBlockClicked(); BlockFace face = pbee.getBlockFace(); Block relativeBlock = block.getRelative(face); // Protection for reinforced rails types from direct lava bucket drop. if (Material.RAILS == relativeBlock.getType() || Material.POWERED_RAIL == relativeBlock.getType() || Material.DETECTOR_RAIL == relativeBlock.getType()) { if (isReinforced(relativeBlock)) { pbee.setCancelled(true); } } } } @EventHandler(priority = EventPriority.HIGHEST) public void interact(PlayerInteractEvent pie) { try { if (!pie.hasBlock()) return; Player player = pie.getPlayer(); Block block = pie.getClickedBlock(); AccessDelegate accessDelegate = AccessDelegate.getDelegate(block); block = accessDelegate.getBlock(); IReinforcement generic_reinforcement = accessDelegate.getReinforcement(); PlayerReinforcement reinforcement = null; if (generic_reinforcement instanceof PlayerReinforcement) { reinforcement = (PlayerReinforcement)generic_reinforcement; } Action action = pie.getAction(); boolean access_reinforcement = action == Action.RIGHT_CLICK_BLOCK && reinforcement != null && reinforcement.isSecurable(); boolean normal_access_denied = reinforcement != null && !reinforcement.isAccessible(player); boolean admin_can_access = player.hasPermission("citadel.admin.accesssecurable"); if (access_reinforcement && normal_access_denied && !admin_can_access) { Citadel.info(String.format( "%s failed to access locked reinforcement at %s", player.getName(), block.getLocation().toString())); sendMessage(pie.getPlayer(), ChatColor.RED, "%s is locked", block.getType().name()); pie.setCancelled(true); } if (pie.isCancelled()) return; PlayerState state = PlayerState.get(player); PlacementMode placementMode = state.getMode(); switch (placementMode) { case NORMAL: if (access_reinforcement && normal_access_denied && admin_can_access) { Citadel.info(String.format( "[Admin] %s accessed locked reinforcement at %s", player.getName(), block.getLocation().toString())); } return; case FORTIFICATION: return; case INFO: // did player click on a reinforced block? if (reinforcement != null) { String reinforcementStatus = reinforcement.getStatus(); SecurityLevel securityLevel = reinforcement.getSecurityLevel(); Faction group = reinforcement.getOwner(); String message; if (player.hasPermission("citadel.admin.ctinfodetails")) { message = String.format("Loc[%s] Chunk[%s]", reinforcement.getId().toString(), reinforcement.getChunkId()); sendMessage(player, ChatColor.GREEN, message); String groupName = "!NULL!"; if (group != null) { if (group.isPersonalGroup()) { groupName = String.format("[%s] (Personal)", group.getName()); } else { groupName = String.format("[%s]", group.getName()); } } message = String.format(" Group%s Durability[%d/%d]", groupName, reinforcement.getDurability(), reinforcement.getMaterial().getStrength()); sendMessage(player, ChatColor.GREEN, message); } else if(reinforcement.isAccessible(player)){ boolean is_personal_group = false; String groupName = "!NULL!"; if (group != null) { groupName = group.getName(); is_personal_group = group.isPersonalGroup(); } if(is_personal_group){ message = String.format("%s, security: %s, group: %s (Default Group)", reinforcementStatus, securityLevel, groupName); } else { message = String.format("%s, security: %s, group: %s", reinforcementStatus, securityLevel, groupName); } sendMessage(player, ChatColor.GREEN, message); } else { sendMessage(player, ChatColor.RED, "%s, security: %s", reinforcementStatus, securityLevel); } pie.setCancelled(true); } break; default: // player is in reinforcement mode if (reinforcement == null) { // Break any natural reinforcement before placing the player reinforcement if (generic_reinforcement != null) { reinforcementBroken(generic_reinforcement); } createPlayerReinforcement(player, block); } else if (reinforcement.isBypassable(player)) { boolean update = false; String message = ""; if (reinforcement.getSecurityLevel() != state.getSecurityLevel()){ reinforcement.setSecurityLevel(state.getSecurityLevel()); update = true; message = String.format("Changed security level %s", reinforcement.getSecurityLevel().name()); } Faction group = state.getFaction(); if(!reinforcement.getOwner().equals(group)) { reinforcement.setOwner(group); update = true; if(!message.equals("")){ message = message + ". "; } if(reinforcement.getSecurityLevel() != SecurityLevel.PRIVATE){ message = message + String.format("Changed group to %s", group.getName()); } } if(update){ Citadel.getReinforcementManager().addReinforcement(reinforcement); sendMessage(player, ChatColor.GREEN, message); } } else { sendMessage(player, ChatColor.RED, "You are not permitted to modify this reinforcement"); } pie.setCancelled(true); if (state.getMode() == PlacementMode.REINFORCEMENT_SINGLE_BLOCK) { state.reset(); } else { state.checkResetMode(); } } } catch(Exception e) { Citadel.printStackTrace(e); } } }
Only cancel /cti block click in creative mode
src/com/untamedears/citadel/listener/PlayerListener.java
Only cancel /cti block click in creative mode
Java
mit
03f9677108ac20e45da087564fc4fb7a2e625fd6
0
dariver/jenkins,mrooney/jenkins,bpzhang/jenkins,patbos/jenkins,v1v/jenkins,akshayabd/jenkins,intelchen/jenkins,KostyaSha/jenkins,jpederzolli/jenkins-1,scoheb/jenkins,amruthsoft9/Jenkis,SenolOzer/jenkins,pjanouse/jenkins,batmat/jenkins,daniel-beck/jenkins,chbiel/jenkins,mpeltonen/jenkins,aldaris/jenkins,jpederzolli/jenkins-1,kzantow/jenkins,CodeShane/jenkins,dennisjlee/jenkins,tangkun75/jenkins,AustinKwang/jenkins,arcivanov/jenkins,vijayto/jenkins,hashar/jenkins,daspilker/jenkins,KostyaSha/jenkins,mrobinet/jenkins,varmenise/jenkins,v1v/jenkins,DoctorQ/jenkins,Jochen-A-Fuerbacher/jenkins,evernat/jenkins,Ykus/jenkins,NehemiahMi/jenkins,AustinKwang/jenkins,MadsNielsen/jtemp,evernat/jenkins,jenkinsci/jenkins,abayer/jenkins,h4ck3rm1k3/jenkins,ChrisA89/jenkins,keyurpatankar/hudson,lilyJi/jenkins,kzantow/jenkins,jhoblitt/jenkins,github-api-test-org/jenkins,my7seven/jenkins,yonglehou/jenkins,jpederzolli/jenkins-1,MichaelPranovich/jenkins_sc,292388900/jenkins,jenkinsci/jenkins,vlajos/jenkins,DanielWeber/jenkins,KostyaSha/jenkins,gusreiber/jenkins,luoqii/jenkins,msrb/jenkins,daspilker/jenkins,duzifang/my-jenkins,vijayto/jenkins,ErikVerheul/jenkins,jtnord/jenkins,MarkEWaite/jenkins,kzantow/jenkins,CodeShane/jenkins,brunocvcunha/jenkins,aheritier/jenkins,lindzh/jenkins,morficus/jenkins,soenter/jenkins,hplatou/jenkins,SebastienGllmt/jenkins,evernat/jenkins,damianszczepanik/jenkins,Wilfred/jenkins,msrb/jenkins,dbroady1/jenkins,arcivanov/jenkins,viqueen/jenkins,rsandell/jenkins,pselle/jenkins,SenolOzer/jenkins,amuniz/jenkins,everyonce/jenkins,aduprat/jenkins,lilyJi/jenkins,v1v/jenkins,dariver/jenkins,kohsuke/hudson,tfennelly/jenkins,shahharsh/jenkins,deadmoose/jenkins,lvotypko/jenkins,abayer/jenkins,v1v/jenkins,guoxu0514/jenkins,thomassuckow/jenkins,jpbriend/jenkins,gorcz/jenkins,duzifang/my-jenkins,protazy/jenkins,alvarolobato/jenkins,svanoort/jenkins,keyurpatankar/hudson,thomassuckow/jenkins,vvv444/jenkins,lvotypko/jenkins3,hplatou/jenkins,aldaris/jenkins,batmat/jenkins,verbitan/jenkins,pjanouse/jenkins,v1v/jenkins,rsandell/jenkins,mrobinet/jenkins,aheritier/jenkins,paulwellnerbou/jenkins,h4ck3rm1k3/jenkins,nandan4/Jenkins,aheritier/jenkins,jpederzolli/jenkins-1,vlajos/jenkins,rsandell/jenkins,dariver/jenkins,Wilfred/jenkins,vjuranek/jenkins,SebastienGllmt/jenkins,everyonce/jenkins,daniel-beck/jenkins,synopsys-arc-oss/jenkins,github-api-test-org/jenkins,jglick/jenkins,mcanthony/jenkins,SebastienGllmt/jenkins,CodeShane/jenkins,deadmoose/jenkins,mdonohue/jenkins,MarkEWaite/jenkins,jpederzolli/jenkins-1,ns163/jenkins,wangyikai/jenkins,rashmikanta-1984/jenkins,jtnord/jenkins,thomassuckow/jenkins,deadmoose/jenkins,synopsys-arc-oss/jenkins,lilyJi/jenkins,elkingtonmcb/jenkins,ndeloof/jenkins,alvarolobato/jenkins,vlajos/jenkins,huybrechts/hudson,vijayto/jenkins,ajshastri/jenkins,evernat/jenkins,jk47/jenkins,daspilker/jenkins,escoem/jenkins,jzjzjzj/jenkins,AustinKwang/jenkins,6WIND/jenkins,MadsNielsen/jtemp,morficus/jenkins,mattclark/jenkins,1and1/jenkins,jenkinsci/jenkins,kohsuke/hudson,alvarolobato/jenkins,christ66/jenkins,rlugojr/jenkins,vvv444/jenkins,bpzhang/jenkins,tfennelly/jenkins,292388900/jenkins,lvotypko/jenkins2,Ykus/jenkins,aduprat/jenkins,tangkun75/jenkins,Vlatombe/jenkins,jcsirot/jenkins,msrb/jenkins,damianszczepanik/jenkins,vlajos/jenkins,Ykus/jenkins,seanlin816/jenkins,fbelzunc/jenkins,jk47/jenkins,gitaccountforprashant/gittest,lvotypko/jenkins2,amuniz/jenkins,msrb/jenkins,my7seven/jenkins,jk47/jenkins,MadsNielsen/jtemp,evernat/jenkins,gorcz/jenkins,Jimilian/jenkins,DoctorQ/jenkins,damianszczepanik/jenkins,amuniz/jenkins,amuniz/jenkins,csimons/jenkins,iqstack/jenkins,intelchen/jenkins,lordofthejars/jenkins,ikedam/jenkins,rlugojr/jenkins,goldchang/jenkins,christ66/jenkins,seanlin816/jenkins,aquarellian/jenkins,v1v/jenkins,samatdav/jenkins,mrooney/jenkins,github-api-test-org/jenkins,CodeShane/jenkins,recena/jenkins,khmarbaise/jenkins,iqstack/jenkins,huybrechts/hudson,Vlatombe/jenkins,oleg-nenashev/jenkins,hplatou/jenkins,dbroady1/jenkins,lordofthejars/jenkins,mattclark/jenkins,Krasnyanskiy/jenkins,FarmGeek4Life/jenkins,ChrisA89/jenkins,godfath3r/jenkins,varmenise/jenkins,gitaccountforprashant/gittest,6WIND/jenkins,pjanouse/jenkins,Krasnyanskiy/jenkins,gorcz/jenkins,jcarrothers-sap/jenkins,protazy/jenkins,fbelzunc/jenkins,hashar/jenkins,jzjzjzj/jenkins,jcsirot/jenkins,wangyikai/jenkins,aquarellian/jenkins,nandan4/Jenkins,pjanouse/jenkins,petermarcoen/jenkins,alvarolobato/jenkins,vjuranek/jenkins,ErikVerheul/jenkins,mcanthony/jenkins,ErikVerheul/jenkins,arunsingh/jenkins,stephenc/jenkins,bkmeneguello/jenkins,petermarcoen/jenkins,jzjzjzj/jenkins,vijayto/jenkins,ChrisA89/jenkins,scoheb/jenkins,my7seven/jenkins,mrobinet/jenkins,vlajos/jenkins,mrobinet/jenkins,ydubreuil/jenkins,292388900/jenkins,FTG-003/jenkins,DoctorQ/jenkins,chbiel/jenkins,ikedam/jenkins,samatdav/jenkins,albers/jenkins,aheritier/jenkins,svanoort/jenkins,github-api-test-org/jenkins,khmarbaise/jenkins,aquarellian/jenkins,wuwen5/jenkins,Krasnyanskiy/jenkins,sathiya-mit/jenkins,oleg-nenashev/jenkins,h4ck3rm1k3/jenkins,jzjzjzj/jenkins,daspilker/jenkins,everyonce/jenkins,shahharsh/jenkins,nandan4/Jenkins,samatdav/jenkins,chbiel/jenkins,ns163/jenkins,dariver/jenkins,keyurpatankar/hudson,gorcz/jenkins,duzifang/my-jenkins,sathiya-mit/jenkins,csimons/jenkins,arcivanov/jenkins,recena/jenkins,iterate/coding-dojo,AustinKwang/jenkins,huybrechts/hudson,wuwen5/jenkins,h4ck3rm1k3/jenkins,dariver/jenkins,my7seven/jenkins,CodeShane/jenkins,MadsNielsen/jtemp,iqstack/jenkins,stefanbrausch/hudson-main,gorcz/jenkins,1and1/jenkins,nandan4/Jenkins,luoqii/jenkins,guoxu0514/jenkins,jenkinsci/jenkins,SebastienGllmt/jenkins,wangyikai/jenkins,github-api-test-org/jenkins,jcarrothers-sap/jenkins,hudson/hudson-2.x,MadsNielsen/jtemp,batmat/jenkins,escoem/jenkins,tastatur/jenkins,jtnord/jenkins,huybrechts/hudson,SenolOzer/jenkins,luoqii/jenkins,csimons/jenkins,ikedam/jenkins,ajshastri/jenkins,aquarellian/jenkins,brunocvcunha/jenkins,maikeffi/hudson,fbelzunc/jenkins,ajshastri/jenkins,intelchen/jenkins,kohsuke/hudson,scoheb/jenkins,bpzhang/jenkins,ns163/jenkins,godfath3r/jenkins,ydubreuil/jenkins,liupugong/jenkins,thomassuckow/jenkins,Vlatombe/jenkins,olivergondza/jenkins,mrobinet/jenkins,rlugojr/jenkins,akshayabd/jenkins,hashar/jenkins,ajshastri/jenkins,petermarcoen/jenkins,Jochen-A-Fuerbacher/jenkins,noikiy/jenkins,Jimilian/jenkins,pselle/jenkins,6WIND/jenkins,varmenise/jenkins,NehemiahMi/jenkins,tfennelly/jenkins,tangkun75/jenkins,mdonohue/jenkins,ns163/jenkins,arunsingh/jenkins,mpeltonen/jenkins,albers/jenkins,protazy/jenkins,FarmGeek4Life/jenkins,iterate/coding-dojo,arcivanov/jenkins,mdonohue/jenkins,stephenc/jenkins,patbos/jenkins,andresrc/jenkins,jpbriend/jenkins,ndeloof/jenkins,patbos/jenkins,khmarbaise/jenkins,rashmikanta-1984/jenkins,stefanbrausch/hudson-main,1and1/jenkins,gusreiber/jenkins,stephenc/jenkins,ikedam/jenkins,verbitan/jenkins,brunocvcunha/jenkins,guoxu0514/jenkins,mcanthony/jenkins,AustinKwang/jenkins,yonglehou/jenkins,Jimilian/jenkins,deadmoose/jenkins,Wilfred/jenkins,vlajos/jenkins,synopsys-arc-oss/jenkins,Wilfred/jenkins,FTG-003/jenkins,jhoblitt/jenkins,albers/jenkins,Krasnyanskiy/jenkins,aquarellian/jenkins,svanoort/jenkins,jhoblitt/jenkins,soenter/jenkins,vjuranek/jenkins,arunsingh/jenkins,bpzhang/jenkins,damianszczepanik/jenkins,rashmikanta-1984/jenkins,wangyikai/jenkins,pantheon-systems/jenkins,aldaris/jenkins,fbelzunc/jenkins,chbiel/jenkins,mrobinet/jenkins,hashar/jenkins,sathiya-mit/jenkins,ikedam/jenkins,mdonohue/jenkins,ChrisA89/jenkins,ikedam/jenkins,chbiel/jenkins,lordofthejars/jenkins,khmarbaise/jenkins,sathiya-mit/jenkins,paulwellnerbou/jenkins,Wilfred/jenkins,lvotypko/jenkins2,aheritier/jenkins,verbitan/jenkins,noikiy/jenkins,github-api-test-org/jenkins,paulwellnerbou/jenkins,my7seven/jenkins,paulmillar/jenkins,escoem/jenkins,Vlatombe/jenkins,ndeloof/jenkins,albers/jenkins,viqueen/jenkins,olivergondza/jenkins,viqueen/jenkins,jzjzjzj/jenkins,jcsirot/jenkins,pselle/jenkins,svanoort/jenkins,292388900/jenkins,wangyikai/jenkins,arunsingh/jenkins,ErikVerheul/jenkins,batmat/jenkins,petermarcoen/jenkins,stephenc/jenkins,KostyaSha/jenkins,vivek/hudson,jpederzolli/jenkins-1,noikiy/jenkins,godfath3r/jenkins,tangkun75/jenkins,nandan4/Jenkins,ajshastri/jenkins,keyurpatankar/hudson,huybrechts/hudson,yonglehou/jenkins,viqueen/jenkins,sathiya-mit/jenkins,jcarrothers-sap/jenkins,sathiya-mit/jenkins,escoem/jenkins,aduprat/jenkins,ydubreuil/jenkins,hemantojhaa/jenkins,Krasnyanskiy/jenkins,singh88/jenkins,arunsingh/jenkins,thomassuckow/jenkins,liorhson/jenkins,jtnord/jenkins,luoqii/jenkins,amuniz/jenkins,everyonce/jenkins,liorhson/jenkins,jtnord/jenkins,jglick/jenkins,lvotypko/jenkins3,KostyaSha/jenkins,ChrisA89/jenkins,wuwen5/jenkins,iterate/coding-dojo,damianszczepanik/jenkins,iqstack/jenkins,andresrc/jenkins,vjuranek/jenkins,jcsirot/jenkins,lvotypko/jenkins,jcsirot/jenkins,jtnord/jenkins,mrooney/jenkins,akshayabd/jenkins,lvotypko/jenkins3,lordofthejars/jenkins,liorhson/jenkins,daspilker/jenkins,shahharsh/jenkins,mcanthony/jenkins,vivek/hudson,keyurpatankar/hudson,iterate/coding-dojo,mpeltonen/jenkins,liupugong/jenkins,bpzhang/jenkins,mcanthony/jenkins,evernat/jenkins,yonglehou/jenkins,vijayto/jenkins,KostyaSha/jenkins,lordofthejars/jenkins,rlugojr/jenkins,evernat/jenkins,mrooney/jenkins,daniel-beck/jenkins,shahharsh/jenkins,DoctorQ/jenkins,tastatur/jenkins,wangyikai/jenkins,mattclark/jenkins,svanoort/jenkins,292388900/jenkins,Jochen-A-Fuerbacher/jenkins,DanielWeber/jenkins,jglick/jenkins,MichaelPranovich/jenkins_sc,goldchang/jenkins,godfath3r/jenkins,dennisjlee/jenkins,petermarcoen/jenkins,pjanouse/jenkins,morficus/jenkins,iterate/coding-dojo,synopsys-arc-oss/jenkins,varmenise/jenkins,amruthsoft9/Jenkis,recena/jenkins,morficus/jenkins,varmenise/jenkins,jcarrothers-sap/jenkins,maikeffi/hudson,escoem/jenkins,iterate/coding-dojo,oleg-nenashev/jenkins,maikeffi/hudson,SenolOzer/jenkins,aduprat/jenkins,dbroady1/jenkins,elkingtonmcb/jenkins,ns163/jenkins,rsandell/jenkins,rlugojr/jenkins,ErikVerheul/jenkins,svanoort/jenkins,AustinKwang/jenkins,seanlin816/jenkins,hemantojhaa/jenkins,daniel-beck/jenkins,singh88/jenkins,kzantow/jenkins,gusreiber/jenkins,jhoblitt/jenkins,mattclark/jenkins,MichaelPranovich/jenkins_sc,wangyikai/jenkins,dbroady1/jenkins,soenter/jenkins,mrooney/jenkins,jpbriend/jenkins,csimons/jenkins,daniel-beck/jenkins,ErikVerheul/jenkins,gitaccountforprashant/gittest,jhoblitt/jenkins,khmarbaise/jenkins,SebastienGllmt/jenkins,bkmeneguello/jenkins,Vlatombe/jenkins,noikiy/jenkins,jcarrothers-sap/jenkins,kohsuke/hudson,bkmeneguello/jenkins,alvarolobato/jenkins,MichaelPranovich/jenkins_sc,MarkEWaite/jenkins,lvotypko/jenkins,DanielWeber/jenkins,FTG-003/jenkins,elkingtonmcb/jenkins,wuwen5/jenkins,jpbriend/jenkins,pselle/jenkins,vivek/hudson,jcsirot/jenkins,azweb76/jenkins,bkmeneguello/jenkins,tastatur/jenkins,lindzh/jenkins,hemantojhaa/jenkins,Ykus/jenkins,gitaccountforprashant/gittest,kohsuke/hudson,lordofthejars/jenkins,jk47/jenkins,seanlin816/jenkins,SenolOzer/jenkins,DoctorQ/jenkins,mrobinet/jenkins,bpzhang/jenkins,mattclark/jenkins,rlugojr/jenkins,1and1/jenkins,arunsingh/jenkins,lindzh/jenkins,NehemiahMi/jenkins,Jochen-A-Fuerbacher/jenkins,Jimilian/jenkins,andresrc/jenkins,pjanouse/jenkins,ydubreuil/jenkins,vvv444/jenkins,brunocvcunha/jenkins,abayer/jenkins,msrb/jenkins,mpeltonen/jenkins,vijayto/jenkins,jpbriend/jenkins,aheritier/jenkins,pantheon-systems/jenkins,ndeloof/jenkins,amruthsoft9/Jenkis,dbroady1/jenkins,intelchen/jenkins,gusreiber/jenkins,viqueen/jenkins,paulwellnerbou/jenkins,jhoblitt/jenkins,everyonce/jenkins,patbos/jenkins,1and1/jenkins,abayer/jenkins,huybrechts/hudson,github-api-test-org/jenkins,lindzh/jenkins,recena/jenkins,vvv444/jenkins,kzantow/jenkins,lordofthejars/jenkins,Ykus/jenkins,pselle/jenkins,viqueen/jenkins,patbos/jenkins,jzjzjzj/jenkins,dbroady1/jenkins,rsandell/jenkins,paulmillar/jenkins,seanlin816/jenkins,mcanthony/jenkins,aquarellian/jenkins,verbitan/jenkins,liupugong/jenkins,jcarrothers-sap/jenkins,liorhson/jenkins,wuwen5/jenkins,intelchen/jenkins,kzantow/jenkins,github-api-test-org/jenkins,tastatur/jenkins,nandan4/Jenkins,amuniz/jenkins,gusreiber/jenkins,fbelzunc/jenkins,hudson/hudson-2.x,verbitan/jenkins,csimons/jenkins,oleg-nenashev/jenkins,tangkun75/jenkins,lvotypko/jenkins,my7seven/jenkins,rashmikanta-1984/jenkins,kohsuke/hudson,DanielWeber/jenkins,luoqii/jenkins,FTG-003/jenkins,tfennelly/jenkins,dennisjlee/jenkins,guoxu0514/jenkins,Jimilian/jenkins,kohsuke/hudson,stefanbrausch/hudson-main,abayer/jenkins,daspilker/jenkins,elkingtonmcb/jenkins,MadsNielsen/jtemp,ajshastri/jenkins,recena/jenkins,ns163/jenkins,arcivanov/jenkins,CodeShane/jenkins,bkmeneguello/jenkins,mdonohue/jenkins,aldaris/jenkins,olivergondza/jenkins,andresrc/jenkins,pantheon-systems/jenkins,h4ck3rm1k3/jenkins,viqueen/jenkins,6WIND/jenkins,yonglehou/jenkins,mpeltonen/jenkins,hemantojhaa/jenkins,lindzh/jenkins,jzjzjzj/jenkins,fbelzunc/jenkins,tastatur/jenkins,iqstack/jenkins,singh88/jenkins,liupugong/jenkins,paulmillar/jenkins,rsandell/jenkins,vjuranek/jenkins,NehemiahMi/jenkins,amuniz/jenkins,rashmikanta-1984/jenkins,duzifang/my-jenkins,goldchang/jenkins,tfennelly/jenkins,FarmGeek4Life/jenkins,hashar/jenkins,tfennelly/jenkins,abayer/jenkins,vvv444/jenkins,shahharsh/jenkins,abayer/jenkins,hemantojhaa/jenkins,kzantow/jenkins,h4ck3rm1k3/jenkins,hudson/hudson-2.x,damianszczepanik/jenkins,stefanbrausch/hudson-main,MarkEWaite/jenkins,hashar/jenkins,guoxu0514/jenkins,lilyJi/jenkins,lvotypko/jenkins3,FarmGeek4Life/jenkins,tangkun75/jenkins,scoheb/jenkins,jhoblitt/jenkins,guoxu0514/jenkins,pantheon-systems/jenkins,brunocvcunha/jenkins,NehemiahMi/jenkins,AustinKwang/jenkins,FTG-003/jenkins,iqstack/jenkins,goldchang/jenkins,jglick/jenkins,DanielWeber/jenkins,amruthsoft9/Jenkis,oleg-nenashev/jenkins,andresrc/jenkins,Jimilian/jenkins,protazy/jenkins,scoheb/jenkins,olivergondza/jenkins,vivek/hudson,DoctorQ/jenkins,ydubreuil/jenkins,ChrisA89/jenkins,deadmoose/jenkins,yonglehou/jenkins,goldchang/jenkins,synopsys-arc-oss/jenkins,mcanthony/jenkins,NehemiahMi/jenkins,lvotypko/jenkins2,rlugojr/jenkins,amruthsoft9/Jenkis,dariver/jenkins,synopsys-arc-oss/jenkins,vvv444/jenkins,Jochen-A-Fuerbacher/jenkins,stefanbrausch/hudson-main,aquarellian/jenkins,vlajos/jenkins,andresrc/jenkins,azweb76/jenkins,protazy/jenkins,Krasnyanskiy/jenkins,Jochen-A-Fuerbacher/jenkins,singh88/jenkins,verbitan/jenkins,stephenc/jenkins,vivek/hudson,vijayto/jenkins,iterate/coding-dojo,SebastienGllmt/jenkins,lilyJi/jenkins,292388900/jenkins,lvotypko/jenkins,dbroady1/jenkins,oleg-nenashev/jenkins,singh88/jenkins,everyonce/jenkins,christ66/jenkins,keyurpatankar/hudson,damianszczepanik/jenkins,Wilfred/jenkins,KostyaSha/jenkins,liupugong/jenkins,lilyJi/jenkins,stefanbrausch/hudson-main,aduprat/jenkins,FarmGeek4Life/jenkins,maikeffi/hudson,lilyJi/jenkins,SenolOzer/jenkins,liorhson/jenkins,rsandell/jenkins,godfath3r/jenkins,paulmillar/jenkins,verbitan/jenkins,stephenc/jenkins,maikeffi/hudson,godfath3r/jenkins,duzifang/my-jenkins,arunsingh/jenkins,lvotypko/jenkins,aldaris/jenkins,godfath3r/jenkins,singh88/jenkins,6WIND/jenkins,msrb/jenkins,keyurpatankar/hudson,azweb76/jenkins,singh88/jenkins,deadmoose/jenkins,deadmoose/jenkins,jenkinsci/jenkins,khmarbaise/jenkins,pjanouse/jenkins,intelchen/jenkins,dennisjlee/jenkins,alvarolobato/jenkins,hplatou/jenkins,azweb76/jenkins,csimons/jenkins,akshayabd/jenkins,chbiel/jenkins,soenter/jenkins,daspilker/jenkins,goldchang/jenkins,noikiy/jenkins,pantheon-systems/jenkins,damianszczepanik/jenkins,wuwen5/jenkins,daniel-beck/jenkins,jk47/jenkins,thomassuckow/jenkins,Vlatombe/jenkins,kohsuke/hudson,jcsirot/jenkins,noikiy/jenkins,arcivanov/jenkins,andresrc/jenkins,Vlatombe/jenkins,varmenise/jenkins,luoqii/jenkins,yonglehou/jenkins,Ykus/jenkins,daniel-beck/jenkins,CodeShane/jenkins,hplatou/jenkins,tastatur/jenkins,recena/jenkins,ChrisA89/jenkins,shahharsh/jenkins,scoheb/jenkins,MichaelPranovich/jenkins_sc,dennisjlee/jenkins,mattclark/jenkins,azweb76/jenkins,liupugong/jenkins,mdonohue/jenkins,vivek/hudson,gitaccountforprashant/gittest,escoem/jenkins,lindzh/jenkins,samatdav/jenkins,samatdav/jenkins,vvv444/jenkins,jcarrothers-sap/jenkins,Ykus/jenkins,seanlin816/jenkins,amruthsoft9/Jenkis,ydubreuil/jenkins,bkmeneguello/jenkins,Jochen-A-Fuerbacher/jenkins,intelchen/jenkins,FarmGeek4Life/jenkins,h4ck3rm1k3/jenkins,morficus/jenkins,jcarrothers-sap/jenkins,daniel-beck/jenkins,vjuranek/jenkins,lvotypko/jenkins2,jenkinsci/jenkins,duzifang/my-jenkins,paulmillar/jenkins,christ66/jenkins,rashmikanta-1984/jenkins,lvotypko/jenkins,jk47/jenkins,amruthsoft9/Jenkis,svanoort/jenkins,varmenise/jenkins,arcivanov/jenkins,fbelzunc/jenkins,jglick/jenkins,akshayabd/jenkins,protazy/jenkins,christ66/jenkins,hemantojhaa/jenkins,bpzhang/jenkins,elkingtonmcb/jenkins,albers/jenkins,mrooney/jenkins,sathiya-mit/jenkins,nandan4/Jenkins,paulwellnerbou/jenkins,akshayabd/jenkins,ydubreuil/jenkins,hashar/jenkins,ns163/jenkins,Jimilian/jenkins,christ66/jenkins,liorhson/jenkins,recena/jenkins,FTG-003/jenkins,mdonohue/jenkins,lvotypko/jenkins3,vjuranek/jenkins,protazy/jenkins,MadsNielsen/jtemp,morficus/jenkins,292388900/jenkins,soenter/jenkins,thomassuckow/jenkins,albers/jenkins,6WIND/jenkins,MarkEWaite/jenkins,liorhson/jenkins,lindzh/jenkins,albers/jenkins,petermarcoen/jenkins,DoctorQ/jenkins,pantheon-systems/jenkins,vivek/hudson,mattclark/jenkins,jzjzjzj/jenkins,pantheon-systems/jenkins,synopsys-arc-oss/jenkins,duzifang/my-jenkins,hplatou/jenkins,hudson/hudson-2.x,Krasnyanskiy/jenkins,hudson/hudson-2.x,patbos/jenkins,1and1/jenkins,MarkEWaite/jenkins,alvarolobato/jenkins,shahharsh/jenkins,aldaris/jenkins,paulwellnerbou/jenkins,jglick/jenkins,SenolOzer/jenkins,azweb76/jenkins,noikiy/jenkins,aheritier/jenkins,goldchang/jenkins,maikeffi/hudson,tfennelly/jenkins,wuwen5/jenkins,morficus/jenkins,gorcz/jenkins,elkingtonmcb/jenkins,ndeloof/jenkins,FarmGeek4Life/jenkins,luoqii/jenkins,jpederzolli/jenkins-1,MarkEWaite/jenkins,lvotypko/jenkins2,ndeloof/jenkins,bkmeneguello/jenkins,NehemiahMi/jenkins,stephenc/jenkins,Wilfred/jenkins,dennisjlee/jenkins,keyurpatankar/hudson,escoem/jenkins,dennisjlee/jenkins,DanielWeber/jenkins,jtnord/jenkins,my7seven/jenkins,paulwellnerbou/jenkins,msrb/jenkins,maikeffi/hudson,pselle/jenkins,aduprat/jenkins,elkingtonmcb/jenkins,ErikVerheul/jenkins,samatdav/jenkins,seanlin816/jenkins,scoheb/jenkins,tastatur/jenkins,batmat/jenkins,hudson/hudson-2.x,paulmillar/jenkins,ikedam/jenkins,ndeloof/jenkins,lvotypko/jenkins2,csimons/jenkins,guoxu0514/jenkins,FTG-003/jenkins,olivergondza/jenkins,6WIND/jenkins,oleg-nenashev/jenkins,DoctorQ/jenkins,MichaelPranovich/jenkins_sc,jpbriend/jenkins,olivergondza/jenkins,gorcz/jenkins,paulmillar/jenkins,MarkEWaite/jenkins,mrooney/jenkins,gusreiber/jenkins,jglick/jenkins,maikeffi/hudson,rsandell/jenkins,hplatou/jenkins,jk47/jenkins,goldchang/jenkins,pselle/jenkins,aldaris/jenkins,gorcz/jenkins,chbiel/jenkins,SebastienGllmt/jenkins,gitaccountforprashant/gittest,ajshastri/jenkins,KostyaSha/jenkins,christ66/jenkins,akshayabd/jenkins,gusreiber/jenkins,batmat/jenkins,olivergondza/jenkins,MichaelPranovich/jenkins_sc,patbos/jenkins,tangkun75/jenkins,iqstack/jenkins,jpbriend/jenkins,stefanbrausch/hudson-main,petermarcoen/jenkins,brunocvcunha/jenkins,samatdav/jenkins,jenkinsci/jenkins,azweb76/jenkins,1and1/jenkins,huybrechts/hudson,brunocvcunha/jenkins,shahharsh/jenkins,khmarbaise/jenkins,vivek/hudson,ikedam/jenkins,rashmikanta-1984/jenkins,gitaccountforprashant/gittest,lvotypko/jenkins3,mpeltonen/jenkins,batmat/jenkins,soenter/jenkins,everyonce/jenkins,mpeltonen/jenkins,jenkinsci/jenkins,liupugong/jenkins,v1v/jenkins,lvotypko/jenkins3,hemantojhaa/jenkins,DanielWeber/jenkins,soenter/jenkins,dariver/jenkins,aduprat/jenkins
package hudson.model; import org.kohsuke.stapler.StaplerRequest; import org.kohsuke.stapler.StaplerResponse; import javax.servlet.ServletException; import java.io.IOException; import hudson.search.SearchableModelObject; import hudson.search.Search; import hudson.search.SearchIndexBuilder; import hudson.search.SearchIndex; /** * {@link ModelObject} with some convenience methods. * * @author Kohsuke Kawaguchi */ public abstract class AbstractModelObject implements SearchableModelObject { /** * Displays the error in a page. */ protected final void sendError(Exception e, StaplerRequest req, StaplerResponse rsp) throws ServletException, IOException { sendError(e.getMessage(),req,rsp); } protected final void sendError(String message, StaplerRequest req, StaplerResponse rsp) throws ServletException, IOException { req.setAttribute("message",message); rsp.forward(this,"error",req); } /** * Default implementation that returns empty index. */ protected SearchIndexBuilder makeSearchIndex() { return new SearchIndexBuilder().addAllAnnotations(this); } public final SearchIndex getSearchIndex() { return makeSearchIndex().make(); } public Search getSearch() { return new Search(); } /** * Default implementation that returns the display name. */ public String getSearchName() { return getDisplayName(); } }
core/src/main/java/hudson/model/AbstractModelObject.java
package hudson.model; import org.kohsuke.stapler.StaplerRequest; import org.kohsuke.stapler.StaplerResponse; import javax.servlet.ServletException; import java.io.IOException; import hudson.search.SearchableModelObject; import hudson.search.Search; import hudson.search.SearchIndexBuilder; import hudson.search.SearchIndex; /** * {@link ModelObject} with some convenience methods. * * @author Kohsuke Kawaguchi */ public abstract class AbstractModelObject implements SearchableModelObject { /** * Displays the error in a page. */ protected final void sendError(Exception e, StaplerRequest req, StaplerResponse rsp) throws ServletException, IOException { sendError(e.getMessage(),req,rsp); } protected final void sendError(String message, StaplerRequest req, StaplerResponse rsp) throws ServletException, IOException { req.setAttribute("message",message); rsp.forward(this,"error",req); } /** * Default implementation that returns empty index. */ protected SearchIndexBuilder makeSearchIndex() { return new SearchIndexBuilder().addAllAnnotations(this); } public final SearchIndex getSearchIndex() { return makeSearchIndex().make(); } public Search getSearch() { return new Search(); } /** * Default implementation that returns the display name. */ public String getSearchName() { return getDisplayName(); } }
formatting hcnages. git-svn-id: 28f34f9aa52bc55a5ddd5be9e183c5cccadc6ee4@4135 71c3de6d-444a-0410-be80-ed276b4c234a
core/src/main/java/hudson/model/AbstractModelObject.java
formatting hcnages.
Java
mit
d40f553e5b3379b804acc4201ed45f84e9f7e822
0
fclairamb/tc65lib,fclairamb/tc65lib
package org.javacint.settings; //#if sdkns == "siemens" import com.siemens.icm.io.file.FileConnection; //#elif sdkns == "cinterion" //# import com.cinterion.io.file.FileConnection; //#endif import java.io.*; import java.util.*; import javax.microedition.io.Connector; import org.javacint.common.Strings; import org.javacint.logging.Logger; /** * Settings management class. * * This class should take advantage of the PropertiesFile class. */ public class Settings { /** * Settings container. It has both the default and specific settings. */ private static Hashtable settings; /** * Settings filename */ private static String fileName = "settings.txt"; /** * Settings providers. They provide some settings with some default values * and they receive events when some settings are changed. */ private static final Vector providers = new Vector(); private static final String PATH_PREFIX = "file:///a:/"; /** * APN setting. In the AT^SJNET=... format */ public static final String SETTING_APN = "apn"; /** * Protection code */ public static final String SETTING_CODE = "code"; public static final String SETTING_MANAGERSPHONE = "phoneManager"; /** * ICCID sim card setting. This is very useful to detect iccid card change * (SIM card change detection not handled by the settings class itself). */ public static final String SETTING_ICCID = "iccid"; /** * pincode setting name. Using pincode is NOT recommended. */ public static final String SETTING_PINCODE = "pincode"; /** * jadurl setting */ public static final String SETTING_JADURL = "jadurl"; private static boolean madeSomeChanges = false; private static boolean firstStartup = false; /** * loading state */ private static boolean loading; public static synchronized void setFilename(String filename) { fileName = filename; settings = null; } /** * Define if we are loading the program. If we are loading, we can't * get/set/load/save settings. We can only add and remove settings * consumers. * * @param l Loading state */ public static void loading(boolean l) { loading = l; } /** * Get the settings filename. * * @return Filename without the "file:///a:/" path prefix */ public static String getFilename() { return fileName; } /** * If this is the first startup. * * The first statup flag is activated if we don't have any settings file. * * @return If this is the first startup */ public static boolean firstStartup() { return firstStartup; } /** * Load settings. We should replace the line by line loading code by using * the PropertiesFile class. */ public static synchronized void load() { if (Logger.BUILD_DEBUG) { Logger.log("Settings.load();"); } StringBuffer buffer = new StringBuffer(); Hashtable newSettings = getDefaultSettings(); try { FileConnection fc = (FileConnection) Connector.open(PATH_PREFIX + fileName, Connector.READ); if (!fc.exists()) { if (Logger.BUILD_WARNING) { Logger.log("Settings.load: File \"" + fileName + "\" doesn\'t exist!"); } firstStartup = true; fc = (FileConnection) Connector.open(PATH_PREFIX + fileName + ".old", Connector.READ); if (fc.exists()) { if (Logger.BUILD_WARNING) { Logger.log("Settings.load: But \"" + fileName + ".old\" exists ! "); } } else { return; } } InputStream is = fc.openInputStream(); while (is.available() > 0) { int c = is.read(); if (c == '\n') { loadLine(newSettings, buffer.toString()); buffer.setLength(0); } else { buffer.append((char) c); } } is.close(); fc.close(); } catch (IOException ex) { // The exception we shoud have is at first launch : // There shouldn't be any file to read from if (Logger.BUILD_CRITICAL) { Logger.log("Settings.load", ex); } } finally { settings = newSettings; } } /** * Treat each line of the file * * @param def Default settings * @param line Line to parse */ private static void loadLine(Hashtable settings, String line) { String[] spl = Strings.split('=', line); String key = spl[0]; String value = spl[1]; // If default settings hashTable contains this key // we can use this value if (settings.containsKey(key)) { settings.remove(key); settings.put(key, value); } // If not, we just forget about it (no dirty settings) } public static void onSettingsChanged(String[] names) { onSettingsChanged(names, null); } /** * Launch an event when some settings * * @param names Names of the settings */ public static void onSettingsChanged(String[] names, SettingsProvider caller) { try { synchronized (providers) { for (Enumeration en = providers.elements(); en.hasMoreElements();) { SettingsProvider cons = (SettingsProvider) en.nextElement(); if (cons == caller) { continue; } cons.settingsChanged(names); } } } catch (Exception ex) { if (Logger.BUILD_CRITICAL) { Logger.log("Settings.OnSeettingChanged", ex); } } } /** * Get default settings * * @return Default settings Hashtable */ public static Hashtable getDefaultSettings() { Hashtable defaultSettings = new Hashtable(); // The following settings are mandatory but they // are NOT handled by the Settings class. // Code is mandatory (SMS control protection) defaultSettings.put(SETTING_CODE, "1234"); // APN is mandatory (GPRS setup) defaultSettings.put(SETTING_APN, ""); // ICCID is mandatory (SIM card detection) defaultSettings.put(SETTING_ICCID, ""); synchronized (providers) { for (Enumeration en = providers.elements(); en.hasMoreElements();) { SettingsProvider cons = (SettingsProvider) en.nextElement(); cons.getDefaultSettings(defaultSettings); } } return defaultSettings; } /** * Add a settings provier class * * @param consumer Provider of settings and consumer of settings change */ public static void addProvider(SettingsProvider consumer) { // if (Logger.BUILD_DEBUG) { // Logger.log("Settings.addSettingsConsumer( " + consumer + " );"); // } if (!loading) { // We should never add or removed a settings provider when we have finished loading throw new RuntimeException("Settings.addSettingsConsumer: We're not loading anymore !"); } synchronized (providers) { providers.addElement(consumer); // Adding a provider voids the current state of the settings settings = null; } } /** * Remove a settings consumer class * * @param consumer Consumer of settings */ public static void removeProvider(SettingsProvider consumer) { synchronized (providers) { if (providers.contains(consumer)) { providers.removeElement(consumer); } settings = null; } } /** * Reset all settings */ public synchronized static void reset() { try { FileConnection fc = (FileConnection) Connector.open(PATH_PREFIX + fileName, Connector.READ_WRITE); if (fc.exists()) { fc.delete(); } load(); settings = null; } catch (Exception ex) { if (Logger.BUILD_CRITICAL) { Logger.log("Settings.resetErything", ex); } } } /** * Save setttings */ public static synchronized void save() { synchronized (Settings.class) { // If there's no settings, we shouldn't have to save anything if (settings == null) { return; } // If no changes were made, we shouldn't have to save anything if (!madeSomeChanges) { return; } try { Hashtable defSettings = getDefaultSettings(); String fileNameTmp = fileName + ".tmp"; String fileNameOld = fileName + ".old"; String settingFileUrl = PATH_PREFIX + fileName; String settingFileUrlTmp = PATH_PREFIX + fileNameTmp; String settingFileUrlOld = PATH_PREFIX + fileNameOld; FileConnection fc = (FileConnection) Connector.open(settingFileUrlTmp, Connector.READ_WRITE); if (fc.exists()) { fc.delete(); } fc.create(); OutputStream os = fc.openOutputStream(); Enumeration e = defSettings.keys(); while (e.hasMoreElements()) { String key = (String) e.nextElement(); String value = (String) settings.get(key); String defValue = (String) defSettings.get(key); if ( // if there is a default value defValue != null && // and // the value isn't the same as the default value defValue.compareTo(value) != 0) { String line = key + "=" + value + '\n'; // if ( Logger.BUILD_DEBUG ) { // Logger.log("Settings.save.line: " + line); // } os.write(line.getBytes()); } } os.flush(); os.close(); { // We move the current setting file to the old one FileConnection currentFile = (FileConnection) Connector.open(settingFileUrl, Connector.READ_WRITE); // if ( Logger.BUILD_DEBUG ) { // Logger.log("Settings.save: Renaming \"" + settingFileUrl + "\" to \"" + fileNameOld + "\""); // } if (currentFile.exists()) { { // We delete the old setting file // if ( Logger.BUILD_DEBUG ) { // Logger.log("Settings.save: Deleting \"" + settingFileUrlOld + "\""); // } FileConnection oldFile = (FileConnection) Connector.open(settingFileUrlOld, Connector.READ_WRITE); if (oldFile.exists()) { oldFile.delete(); } } currentFile.rename(fileNameOld); } } { // We move the tmp file to the current setting file // if ( Logger.BUILD_DEBUG ) { // Logger.log("Setting.save: Renaming \"" + settingFileUrlTmp + "\" to \"" + _fileName + "\""); // } fc.rename(fileName); fc.close(); } // We don't have anything to be written anymore madeSomeChanges = false; } catch (Exception ex) { if (Logger.BUILD_CRITICAL) { Logger.log("Settings.Save", ex, true); } } } } /** * Init (and ReInit) method */ private static void checkLoad() { if (settings == null) { load(); } } /** * Get a setting's value as a String * * @param key Key Name of the setting * @return String value of the setting */ public static synchronized String get(String key) { return (String) getSettings().get(key); } /** * Get all the settings * * @return All the settings */ public static synchronized Hashtable getSettings() { checkLoad(); return settings; } /** * Set a setting * * @param key Name of the setting * @param value Value of the setting */ public static void set(String key, String value) { if (Logger.BUILD_DEBUG) { Logger.log("Settings.setSetting( \"" + key + "\", \"" + value + "\" );"); } if (setWithoutEvent(key, value)) { onSettingsChanged(new String[]{key}); } } /** * Set a setting * * @param key Name of the setting * @param value Value of the setting */ public static void set(String key, int value) { set(key, Integer.toString(value)); } /** * Set a setting * * @param key Name of the setting * @param value Value of the setting */ public static void set(String key, boolean value) { set(key, value ? "1" : "0"); } /** * Set a setting without launching the onSettingsChange method * * @param key Setting to set * @param value Value of the setting * @return If setting was actually changed */ public static synchronized boolean setWithoutEvent(String key, String value) { Hashtable table = getSettings(); if (table.containsKey(key)) { String previousValue = (String) table.get(key); if (previousValue.compareTo(value) == 0) { return false; } } else { return false; } if (loading) { throw new RuntimeException("Settings.setWithoutEvent: You can't change a setting while loading !"); } table.put(key, value); madeSomeChanges = true; return true; } /** * Get a setting's value as an int * * @param key Key Name of the setting * @return Integer value of the setting * @throws java.lang.NumberFormatException When the int cannot be parsed */ public static int getInt(String key) throws NumberFormatException { String value = get(key); if (value == null) { return -1; } return Integer.parseInt(value); } /** * Get a setting's value as a boolean * * @param key Key name of the setting * @return The value of the setting (any value not understood will be * treated as false) */ public static boolean getBool(String key) { String value = get(key); if (value == null) { return false; } return value.compareTo("1") == 0; } }
tc65lib/src/org/javacint/settings/Settings.java
package org.javacint.settings; //#if sdkns == "siemens" import com.siemens.icm.io.file.FileConnection; //#elif sdkns == "cinterion" //# import com.cinterion.io.file.FileConnection; //#endif import java.io.*; import java.util.*; import javax.microedition.io.Connector; import org.javacint.common.Strings; import org.javacint.logging.Logger; /** * Settings management class. * * This class should take advantage of the PropertiesFile class. */ public class Settings { /** * Settings container. It has both the default and specific settings. */ private static Hashtable settings; /** * Settings filename */ private static String fileName = "settings.txt"; /** * Settings providers. They provide some settings with some default values * and they receive events when some settings are changed. */ private static final Vector providers = new Vector(); private static final String PATH_PREFIX = "file:///a:/"; /** * APN setting. In the AT^SJNET=... format */ public static final String SETTING_APN = "apn"; /** * Protection code */ public static final String SETTING_CODE = "code"; public static final String SETTING_MANAGERSPHONE = "phoneManager"; /** * ICCID sim card setting. This is very useful to detect iccid card change * (SIM card change detection not handled by the settings class itself). */ public static final String SETTING_ICCID = "iccid"; /** * pincode setting name. Using pincode is NOT recommended. */ public static final String SETTING_PINCODE = "pincode"; /** * jadurl setting */ public static final String SETTING_JADURL = "jadurl"; private static boolean madeSomeChanges = false; private static boolean firstStartup = false; /** * loading state */ private static boolean loading; public static synchronized void setFilename(String filename) { fileName = filename; settings = null; } /** * Define if we are loading the program. If we are loading, we can't * get/set/load/save settings. We can only add and remove settings * consumers. * * @param l Loading state */ public static void loading(boolean l) { loading = l; } /** * Get the settings filename. * * @return Filename without the "file:///a:/" path prefix */ public static String getFilename() { return fileName; } /** * If this is the first startup. * * The first statup flag is activated if we don't have any settings file. * * @return If this is the first startup */ public static boolean firstStartup() { return firstStartup; } /** * Load settings. We should replace the line by line loading code by using * the PropertiesFile class. */ public static synchronized void load() { if (Logger.BUILD_DEBUG) { Logger.log("Settings.load();"); } StringBuffer buffer = new StringBuffer(); Hashtable newSettings = getDefaultSettings(); try { FileConnection fc = (FileConnection) Connector.open(PATH_PREFIX + fileName, Connector.READ); if (!fc.exists()) { if (Logger.BUILD_WARNING) { Logger.log("Settings.load: File \"" + fileName + "\" doesn\'t exist!"); } firstStartup = true; fc = (FileConnection) Connector.open(PATH_PREFIX + fileName + ".old", Connector.READ); if (fc.exists()) { if (Logger.BUILD_WARNING) { Logger.log("Settings.load: But \"" + fileName + ".old\" exists ! "); } } else { return; } } InputStream is = fc.openInputStream(); while (is.available() > 0) { int c = is.read(); if (c == '\n') { loadLine(newSettings, buffer.toString()); buffer.setLength(0); } else { buffer.append((char) c); } } is.close(); fc.close(); } catch (IOException ex) { // The exception we shoud have is at first launch : // There shouldn't be any file to read from if (Logger.BUILD_CRITICAL) { Logger.log("Settings.load", ex); } } finally { settings = newSettings; } } /** * Treat each line of the file * * @param def Default settings * @param line Line to parse */ private static void loadLine(Hashtable settings, String line) { String[] spl = Strings.split('=', line); String key = spl[0]; String value = spl[1]; // If default settings hashTable contains this key // we can use this value if (settings.containsKey(key)) { settings.remove(key); settings.put(key, value); } // If not, we just forget about it (no dirty settings) } public static void onSettingsChanged(String[] names) { onSettingsChanged(names, null); } /** * Launch an event when some settings * * @param names Names of the settings */ public static void onSettingsChanged(String[] names, SettingsProvider caller) { try { synchronized (providers) { for (Enumeration en = providers.elements(); en.hasMoreElements();) { SettingsProvider cons = (SettingsProvider) en.nextElement(); if (cons == caller) { continue; } cons.settingsChanged(names); } } } catch (Exception ex) { if (Logger.BUILD_CRITICAL) { Logger.log("Settings.OnSeettingChanged", ex); } } } /** * Get default settings * * @return Default settings Hashtable */ public static Hashtable getDefaultSettings() { Hashtable defaultSettings = new Hashtable(); // The following settings are mandatory but they // are NOT handled by the Settings class. // Code is mandatory (SMS control protection) defaultSettings.put(SETTING_CODE, "1234"); // APN is mandatory (GPRS setup) defaultSettings.put(SETTING_APN, ""); // ICCID is mandatory (SIM card detection) defaultSettings.put(SETTING_ICCID, ""); synchronized (providers) { for (Enumeration en = providers.elements(); en.hasMoreElements();) { SettingsProvider cons = (SettingsProvider) en.nextElement(); cons.getDefaultSettings(defaultSettings); } } return defaultSettings; } /** * Add a settings provier class * * @param consumer Provider of settings and consumer of settings change */ public static void addProvider(SettingsProvider consumer) { // if (Logger.BUILD_DEBUG) { // Logger.log("Settings.addSettingsConsumer( " + consumer + " );"); // } if (!loading) { // We should never add or removed a settings provider when we have finished loading throw new RuntimeException("Settings.addSettingsConsumer: We're not loading anymore !"); } synchronized (providers) { providers.addElement(consumer); // Adding a provider voids the current state of the settings settings = null; } } /** * Remove a settings consumer class * * @param consumer Consumer of settings */ public static void removeProvider(SettingsProvider consumer) { synchronized (providers) { if (providers.contains(consumer)) { providers.removeElement(consumer); } settings = null; } } /** * Reset all settings */ public synchronized static void reset() { try { FileConnection fc = (FileConnection) Connector.open(PATH_PREFIX + fileName, Connector.READ_WRITE); if (fc.exists()) { fc.delete(); } load(); settings = null; } catch (Exception ex) { if (Logger.BUILD_CRITICAL) { Logger.log("Settings.resetErything", ex); } } } /** * Save setttings */ public static synchronized void save() { synchronized (Settings.class) { // If there's no settings, we shouldn't have to save anything if (settings == null) { return; } // If no changes were made, we shouldn't have to save anything if (!madeSomeChanges) { return; } try { Hashtable defSettings = getDefaultSettings(); String fileNameTmp = fileName + ".tmp"; String fileNameOld = fileName + ".old"; String settingFileUrl = PATH_PREFIX + fileName; String settingFileUrlTmp = PATH_PREFIX + fileNameTmp; String settingFileUrlOld = PATH_PREFIX + fileNameOld; FileConnection fc = (FileConnection) Connector.open(settingFileUrlTmp, Connector.READ_WRITE); if (fc.exists()) { fc.delete(); } fc.create(); OutputStream os = fc.openOutputStream(); Enumeration e = defSettings.keys(); while (e.hasMoreElements()) { String key = (String) e.nextElement(); String value = (String) settings.get(key); String defValue = (String) defSettings.get(key); if ( // if there is a default value defValue != null && // and // the value isn't the same as the default value defValue.compareTo(value) != 0) { String line = key + "=" + value + '\n'; // if ( Logger.BUILD_DEBUG ) { // Logger.log("Settings.save.line: " + line); // } os.write(line.getBytes()); } } os.flush(); os.close(); { // We move the current setting file to the old one FileConnection currentFile = (FileConnection) Connector.open(settingFileUrl, Connector.READ_WRITE); // if ( Logger.BUILD_DEBUG ) { // Logger.log("Settings.save: Renaming \"" + settingFileUrl + "\" to \"" + fileNameOld + "\""); // } if (currentFile.exists()) { { // We delete the old setting file // if ( Logger.BUILD_DEBUG ) { // Logger.log("Settings.save: Deleting \"" + settingFileUrlOld + "\""); // } FileConnection oldFile = (FileConnection) Connector.open(settingFileUrlOld, Connector.READ_WRITE); if (oldFile.exists()) { oldFile.delete(); } } currentFile.rename(fileNameOld); } } { // We move the tmp file to the current setting file // if ( Logger.BUILD_DEBUG ) { // Logger.log("Setting.save: Renaming \"" + settingFileUrlTmp + "\" to \"" + _fileName + "\""); // } fc.rename(fileName); fc.close(); } // We don't have anything to be written anymore madeSomeChanges = false; } catch (Exception ex) { if (Logger.BUILD_CRITICAL) { Logger.log("Settings.Save", ex, true); } } } } /** * Init (and ReInit) method */ private static void checkLoad() { if (settings == null) { load(); } } /** * Get a setting's value as a String * * @param key Key Name of the setting * @return String value of the setting */ public static synchronized String get(String key) { return (String) getSettings().get(key); } /** * Get all the settings * * @return All the settings */ public static synchronized Hashtable getSettings() { checkLoad(); return settings; } /** * Set a setting * * @param key Setting to set * @param value Value of the setting */ public static synchronized void set(String key, String value) { if (Logger.BUILD_DEBUG) { Logger.log("Settings.setSetting( \"" + key + "\", \"" + value + "\" );"); } if (setWithoutEvent(key, value)) { onSettingsChanged(new String[]{key}); } } public void set(String key, int value) { set(key, "" + value); } public void set(String key, boolean value) { set(key, value ? "1" : "0"); } /** * Set a setting without launching the onSettingsChange method * * @param key Setting to set * @param value Value of the setting * @return If setting was actually changed */ public static synchronized boolean setWithoutEvent(String key, String value) { Hashtable table = getSettings(); if (table.containsKey(key)) { String previousValue = (String) table.get(key); if (previousValue.compareTo(value) == 0) { return false; } } else { return false; } if (loading) { throw new RuntimeException("Settings.setWithoutEvent: You can't change a setting while loading !"); } table.put(key, value); madeSomeChanges = true; return true; } /** * Get a setting's value as an int * * @param key Key Name of the setting * @return Integer value of the setting * @throws java.lang.NumberFormatException When the int cannot be parsed */ public static int getInt(String key) throws NumberFormatException { String value = get(key); if (value == null) { return -1; } return Integer.parseInt(value); } /** * Get a setting's value as a boolean * * @param key Key name of the setting * @return The value of the setting (any value not understood will be * treated as false) */ public static boolean getBool(String key) { String value = get(key); if (value == null) { return false; } return value.compareTo("1") == 0; } }
Small fix around the overloaded Settings.set class: They weren't declared as static.
tc65lib/src/org/javacint/settings/Settings.java
Small fix around the overloaded Settings.set class: They weren't declared as static.
Java
mit
8eca1eaf7ecba7131e0525b7ae43f6678653f956
0
kazocsaba/imageviewer
package hu.kazocsaba.imageviewer; import java.applet.Applet; import java.awt.Container; import java.awt.Dimension; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.Point; import java.awt.Rectangle; import java.awt.RenderingHints; import java.awt.Window; import java.awt.event.MouseEvent; import java.awt.geom.AffineTransform; import java.awt.geom.NoninvertibleTransformException; import java.awt.geom.Point2D; import java.awt.image.BufferedImage; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.beans.PropertyChangeSupport; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import javax.swing.CellRendererPane; import javax.swing.JComponent; import javax.swing.JViewport; import javax.swing.SwingUtilities; import javax.swing.event.MouseInputListener; /** * The component that displays the image itself. * @author Kazó Csaba */ class ImageComponent extends JComponent { private ResizeStrategy resizeStrategy = ResizeStrategy.SHRINK_TO_FIT; private BufferedImage image; private boolean pixelatedZoom=false; private Object interpolationType=RenderingHints.VALUE_INTERPOLATION_BICUBIC; private double zoomFactor=1; private final List<ImageMouseMotionListener> moveListeners = new ArrayList<ImageMouseMotionListener>(4); private final List<ImageMouseClickListener> clickListeners = new ArrayList<ImageMouseClickListener>(4); private final MouseEventTranslator mouseEventTranslator = new MouseEventTranslator(); private final PaintManager paintManager = new PaintManager(); /* Handles repositioning the scroll pane when the image is resized so that the same area remains visible. */ class Rescroller { private Point preparedCenter=null; void prepare() { if (image!=null && hasSize()) { Rectangle viewRect=((JViewport)SwingUtilities.getAncestorOfClass(JViewport.class, ImageComponent.this)).getViewRect(); preparedCenter=new Point(viewRect.x+viewRect.width/2, viewRect.y+viewRect.height/2); try { getImageTransform().inverseTransform(preparedCenter, preparedCenter); } catch (NoninvertibleTransformException e) { throw new Error(e); } } } void rescroll() { if (preparedCenter!=null) { JViewport viewport = (JViewport)SwingUtilities.getAncestorOfClass(JViewport.class, ImageComponent.this); Dimension viewSize=(viewport).getExtentSize(); getImageTransform().transform(preparedCenter, preparedCenter); Rectangle view = new Rectangle(preparedCenter.x-viewSize.width/2, preparedCenter.y-viewSize.height/2, viewSize.width, viewSize.height); scrollRectToVisible(view); mouseEventTranslator.correctionalFire(); preparedCenter=null; } } } private Rescroller rescroller=new Rescroller(); /** * This set is shared by all synchronized image components and contains all * synchronized image components. Unless there is no synchronization; then it is null. */ private Set<ImageComponent> trackSizeIfEmpty = null; private final PropertyChangeSupport propertyChangeSupport; private final Object eventSource; public ImageComponent(Object eventSource, PropertyChangeSupport propertyChangeSupport) { this.eventSource = eventSource; this.propertyChangeSupport=propertyChangeSupport; mouseEventTranslator.register(this); setOpaque(true); } private boolean hasSize() { return getWidth()>0 && getHeight()>0; } @Override public Dimension getMaximumSize() { return new Dimension(Integer.MAX_VALUE, Integer.MAX_VALUE); } public void addImageMouseMoveListener(ImageMouseMotionListener l) { if (l!=null) moveListeners.add(l); } public void removeImageMouseMoveListener(ImageMouseMotionListener l) { if (l!=null) moveListeners.remove(l); } public void addImageMouseClickListener(ImageMouseClickListener l) { if (l!=null) clickListeners.add(l); } public void removeImageMouseClickListener(ImageMouseClickListener l) { if (l!=null) clickListeners.remove(l); } public void setImage(BufferedImage newImage) { BufferedImage oldImage = image; image = newImage; paintManager.notifyChanged(); if (oldImage != newImage && (oldImage == null || newImage == null || oldImage.getWidth() != newImage.getWidth() || oldImage.getHeight() != newImage.getHeight())) revalidate(); repaint(); propertyChangeSupport.firePropertyChange("image", oldImage, newImage); } public BufferedImage getImage() { return image; } /** * Preforms all necessary actions to ensure that the viewer is resized to its proper size. It does that by invoking * {@code validate()} on the viewer's validateRoot. It also issues a {@code repaint()}. */ private void resizeNow() { invalidate(); // find the validate root; adapted from the package-private SwingUtilities.getValidateRoot Container root = null; Container c=this; for (; c != null; c = c.getParent()) { if (!c.isDisplayable() || c instanceof CellRendererPane) { return; } if (c.isValidateRoot()) { root = c; break; } } if (root == null) return; for (; c != null; c = c.getParent()) { if (!c.isDisplayable() || !c.isVisible()) { return; } if (c instanceof Window || c instanceof Applet) { break; } } if (c==null) return; root.validate(); repaint(); } public void setResizeStrategy(ResizeStrategy resizeStrategy) { if (resizeStrategy == this.resizeStrategy) return; rescroller.prepare(); ResizeStrategy oldResizeStrategy=this.resizeStrategy; this.resizeStrategy = resizeStrategy; resizeNow(); rescroller.rescroll(); propertyChangeSupport.firePropertyChange("resizeStrategy", oldResizeStrategy, resizeStrategy); } public ResizeStrategy getResizeStrategy() { return resizeStrategy; } public void setInterpolationType(Object type) { if (interpolationType==type) return; if ( type!=RenderingHints.VALUE_INTERPOLATION_NEAREST_NEIGHBOR && type!=RenderingHints.VALUE_INTERPOLATION_BILINEAR && type!=RenderingHints.VALUE_INTERPOLATION_BICUBIC) throw new IllegalArgumentException("Invalid interpolation type; use one of the RenderingHints constants"); Object old=this.interpolationType; this.interpolationType=type; paintManager.notifyChanged(); repaint(); propertyChangeSupport.firePropertyChange("interpolationType", old, type); } public Object getInterpolationType() { return interpolationType; } public void setPixelatedZoom(boolean pixelatedZoom) { if (pixelatedZoom == this.pixelatedZoom) return; this.pixelatedZoom = pixelatedZoom; paintManager.notifyChanged(); repaint(); propertyChangeSupport.firePropertyChange("pixelatedZoom", !pixelatedZoom, pixelatedZoom); } public boolean isPixelatedZoom() { return pixelatedZoom; } /** Returns the zoom factor used when resize strategy is CUSTOM_ZOOM. */ public double getZoomFactor() { return zoomFactor; } /** * Sets the zoom factor to use when the resize strategy is CUSTOM_ZOOM. * <p> * Note that calling this function does not change the current resize strategy. * @throws IllegalArgumentException if {@code newZoomFactor} is not a positive number */ public void setZoomFactor(double newZoomFactor) { if (zoomFactor==newZoomFactor) return; if (newZoomFactor<=0 || Double.isInfinite(newZoomFactor) || Double.isNaN(newZoomFactor)) throw new IllegalArgumentException("Invalid zoom factor: "+newZoomFactor); if (getResizeStrategy()==ResizeStrategy.CUSTOM_ZOOM) { rescroller.prepare(); } double oldZoomFactor=zoomFactor; zoomFactor=newZoomFactor; if (getResizeStrategy()==ResizeStrategy.CUSTOM_ZOOM) { resizeNow(); rescroller.rescroll(); } propertyChangeSupport.firePropertyChange("zoomFactor", oldZoomFactor, newZoomFactor); } @Override public Dimension getPreferredSize() { if (image == null) { if (trackSizeIfEmpty!=null) for (ImageComponent c:trackSizeIfEmpty) if (c.getImage()!=null) return c.getPreferredSize(); return new Dimension(); } else if (resizeStrategy==ResizeStrategy.CUSTOM_ZOOM) { return new Dimension((int)Math.ceil(image.getWidth()*zoomFactor), (int)Math.ceil(image.getHeight()*zoomFactor)); } else return new Dimension(image.getWidth(), image.getHeight()); } /** * Adds a component to the trackSizeIfEmpty set. If this component has no image set * but one of the tracked ones does, then the size of this component will be set to * match the size of the image displayed in one of the tracked components. This * method is useful if the scroll bars of image viewers are synchronized, because * if a viewer has no image set, it can cause the scrolling of a viewer that has an * image set not to work. * @param c the component to track */ public void trackSizeIfEmpty(ImageComponent c) { if (trackSizeIfEmpty!=null) { if (c.trackSizeIfEmpty!=null) { trackSizeIfEmpty.addAll(c.trackSizeIfEmpty); c.trackSizeIfEmpty=trackSizeIfEmpty; } else { trackSizeIfEmpty.add(c); c.trackSizeIfEmpty=trackSizeIfEmpty; } } else { if (c.trackSizeIfEmpty!=null) { c.trackSizeIfEmpty.add(this); trackSizeIfEmpty=c.trackSizeIfEmpty; } else { trackSizeIfEmpty=new HashSet<ImageComponent>(4); trackSizeIfEmpty.add(this); trackSizeIfEmpty.add(c); c.trackSizeIfEmpty=trackSizeIfEmpty; } } } /** * Returns the image pixel that is under the given point. * * @param p a point in component coordinate system * @return the corresponding image pixel, or <code>null</code> if the point is outside the image */ public Point pointToPixel(Point p) { return pointToPixel(p, true); } /** * Returns the image pixel corresponding to the given point. If the <code>clipToImage</code> * parameter is <code>false</code>, then the function will return an appropriately positioned * pixel on an infinite plane, even if the point is outside the image bounds. If * <code>clipToImage</code> is <code>true</code> then the function will return <code>null</code> * for such positions, and any non-null return value will be a valid image pixel. * @param p a point in component coordinate system * @param clipToImage whether the function should return <code>null</code> for positions outside * the image bounds * @return the corresponding image pixel * @throws IllegalStateException if there is no image set or if the size of the viewer is 0 (for example because * it is not in a visible component) */ public Point pointToPixel(Point p, boolean clipToImage) { Point2D.Double fp=new Point2D.Double(p.x+.5, p.y+.5); try { getImageTransform().inverseTransform(fp, fp); } catch (NoninvertibleTransformException ex) { throw new Error("Image transformation not invertible"); } p.x=(int)Math.floor(fp.x); p.y=(int)Math.floor(fp.y); if (clipToImage && (p.x < 0 || p.y < 0 || p.x >= image.getWidth() || p.y >= image.getHeight())) { return null; } return p; } @Override protected void paintComponent(Graphics g) { paintManager.paintComponent(g); } /** * Returns the transformation that is applied to the image. Most commonly the transformation * is the concatenation of a uniform scale and a translation. * <p> * The <code>AffineTransform</code> * instance returned by this method should not be modified. * @return the transformation applied to the image before painting * @throws IllegalStateException if there is no image set or if the size of the viewer is 0 (for example because * it is not in a visible component) */ public AffineTransform getImageTransform() { if (getImage()==null) throw new IllegalStateException("No image"); if (!hasSize()) throw new IllegalStateException("Viewer size is zero"); double currentZoom; switch (resizeStrategy) { case NO_RESIZE: currentZoom=1; break; case SHRINK_TO_FIT: currentZoom = Math.min(getSizeRatio(), 1); break; case RESIZE_TO_FIT: currentZoom = getSizeRatio(); break; case CUSTOM_ZOOM: currentZoom = zoomFactor; break; default: throw new Error("Unhandled resize strategy"); } AffineTransform tr=new AffineTransform(); tr.setToTranslation((getWidth()-image.getWidth()*currentZoom)/2.0, (getHeight()-image.getHeight()*currentZoom)/2.0); tr.scale(currentZoom, currentZoom); return tr; } private double getSizeRatio() { return Math.min(getWidth() / (double) image.getWidth(), getHeight() / (double) image.getHeight()); } /** * Helper class that generates ImageMouseEvents by translating normal mouse events onto * the image. */ private class MouseEventTranslator implements MouseInputListener, PropertyChangeListener { /** This flag is true if the mouse cursor is inside the bounds of the image. */ private boolean on=false; /** * The last position reported. This is used to avoid multiple successive image mouse motion events * with the same position. */ private Point lastPosition=null; /** Sets up this translator. */ private void register(ImageComponent ic) { ic.addMouseListener(this); ic.addMouseMotionListener(this); ic.propertyChangeSupport.addPropertyChangeListener(this); } private void handleMouseAt(Point position, MouseEvent event) { if (image==null) { if (on) { on=false; fireMouseExit(); } } else { if (position!=null) position=pointToPixel(position); if (position==null) { if (on) { on=false; fireMouseExit(); } } else { if (!on) { on=true; lastPosition=null; fireMouseEnter(position.x, position.y, event); } if (!position.equals(lastPosition)) { lastPosition=position; fireMouseAtPixel(position.x, position.y, event); } } } } @Override public void mouseClicked(MouseEvent e) { if (image == null || !on) return; Point p = pointToPixel(e.getPoint()); if (p != null) { fireMouseClickedAtPixel(p.x, p.y, e); } } @Override public void mouseEntered(MouseEvent e) { if (image != null) { Point p=pointToPixel(e.getPoint()); if (p!=null) { on=true; fireMouseEnter(p.x, p.y, e); fireMouseAtPixel(p.x, p.y, e); } } } @Override public void mouseExited(MouseEvent e) { if (on) { on = false; fireMouseExit(); } } @Override public void mouseMoved(MouseEvent e) { handleMouseAt(e.getPoint(), e); } @Override public void mouseDragged(MouseEvent e) { if (image==null) return; Point p = pointToPixel(e.getPoint(), false); fireMouseDrag(p.x, p.y, e); } @Override public void propertyChange(PropertyChangeEvent evt) { if ( "image".equals(evt.getPropertyName()) || "resizeStrategy".equals(evt.getPropertyName()) || (getResizeStrategy()==ResizeStrategy.CUSTOM_ZOOM && "zoomFactor".equals(evt.getPropertyName()))) { correctionalFire(); } } /** * Fires a motion event based on the current cursor position. Use this method if something other than mouse motion * changed where the cursor is relative to the image. */ private void correctionalFire() { /** * We use our parent, LayeredImageView, to locate the mouse. If the viewer has an overlay, then * ImageComponent.getMousePosition will return null because the mouse is over the overlay and not the image * component. */ handleMouseAt(getParent().getMousePosition(true), null); } private void fireMouseAtPixel(int x, int y, MouseEvent ev) { ImageMouseEvent e = null; for (ImageMouseMotionListener imageMouseMoveListener: moveListeners) { if (e == null) e = new ImageMouseEvent(eventSource, image, x, y, ev); imageMouseMoveListener.mouseMoved(e); } } private void fireMouseClickedAtPixel(int x, int y, MouseEvent ev) { ImageMouseEvent e = null; for (ImageMouseClickListener imageMouseClickListener: clickListeners) { if (e == null) e = new ImageMouseEvent(eventSource, image, x, y, ev); imageMouseClickListener.mouseClicked(e); } } private void fireMouseEnter(int x, int y, MouseEvent ev) { ImageMouseEvent e = null; for (ImageMouseMotionListener imageMouseMoveListener: moveListeners) { if (e == null) e = new ImageMouseEvent(eventSource, image, x, y, ev); imageMouseMoveListener.mouseEntered(e); } } private void fireMouseExit() { ImageMouseEvent e = null; for (ImageMouseMotionListener imageMouseMoveListener: moveListeners) { if (e == null) e = new ImageMouseEvent(eventSource, image, -1, -1, null); imageMouseMoveListener.mouseExited(e); } } private void fireMouseDrag(int x, int y, MouseEvent ev) { ImageMouseEvent e = null; for (ImageMouseMotionListener imageMouseMoveListener: moveListeners) { if (e == null) e = new ImageMouseEvent(eventSource, image, x, y, ev); imageMouseMoveListener.mouseDragged(e); } } @Override public void mousePressed(MouseEvent e) {} @Override public void mouseReleased(MouseEvent e) {} } /** * Helper class that manages the actual painting. */ private class PaintManager { BufferedImage cachedImage=null; boolean cachedImageChanged=false; AffineTransform cachedTransform; private void doPaint(Graphics2D gg, AffineTransform imageTransform) { gg.setColor(getBackground()); gg.fillRect(0, 0, getWidth(), getHeight()); gg.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY); if (pixelatedZoom && imageTransform.getScaleX()>=1) gg.setRenderingHint(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_NEAREST_NEIGHBOR); else gg.setRenderingHint(RenderingHints.KEY_INTERPOLATION, interpolationType); gg.drawImage(image, imageTransform, ImageComponent.this); } private void ensureCachedValid(AffineTransform imageTransform) { boolean cacheValid; // create the image if necessary; if the existing one is sufficiently large, use it if (cachedImage==null || cachedImage.getWidth()<getWidth() || cachedImage.getHeight()<getHeight()) { cachedImage=getGraphicsConfiguration().createCompatibleImage(getWidth(), getHeight()); cacheValid=false; } else { cacheValid = cachedTransform.equals(imageTransform) && !cachedImageChanged; } if (!cacheValid) { Graphics2D gg=cachedImage.createGraphics(); doPaint(gg, imageTransform); gg.dispose(); cachedImageChanged=false; cachedTransform=new AffineTransform(imageTransform); } } /** * Called when a property which affects how the component is painted changes. This invalidates the cache and causes * it to be redrawn upon the next paint request. */ public void notifyChanged() { cachedImageChanged=true; } public void paintComponent(Graphics g) { if (image==null) { Graphics2D gg=(Graphics2D)g.create(); gg.setColor(getBackground()); gg.fillRect(0, 0, getWidth(), getHeight()); gg.dispose(); return; } AffineTransform imageTransform = getImageTransform(); if (imageTransform.getScaleX()<1 && interpolationType!=RenderingHints.VALUE_INTERPOLATION_NEAREST_NEIGHBOR) { /* * We're shrinking the image; instead of letting the Graphics object do it every time, we do it and cache * the result. */ ensureCachedValid(imageTransform); g.drawImage(cachedImage, 0, 0, ImageComponent.this); } else { // draw the image directly Graphics2D gg=(Graphics2D)g.create(); doPaint(gg, imageTransform); gg.dispose(); } } } }
src/main/java/hu/kazocsaba/imageviewer/ImageComponent.java
package hu.kazocsaba.imageviewer; import java.applet.Applet; import java.awt.Container; import java.awt.Dimension; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.Point; import java.awt.Rectangle; import java.awt.RenderingHints; import java.awt.Window; import java.awt.event.MouseEvent; import java.awt.geom.AffineTransform; import java.awt.geom.NoninvertibleTransformException; import java.awt.geom.Point2D; import java.awt.image.BufferedImage; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.beans.PropertyChangeSupport; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import javax.swing.CellRendererPane; import javax.swing.JComponent; import javax.swing.JViewport; import javax.swing.SwingUtilities; import javax.swing.event.MouseInputListener; /** * The component that displays the image itself. * @author Kazó Csaba */ class ImageComponent extends JComponent { private ResizeStrategy resizeStrategy = ResizeStrategy.SHRINK_TO_FIT; private BufferedImage image; private boolean pixelatedZoom=false; private Object interpolationType=RenderingHints.VALUE_INTERPOLATION_BICUBIC; private double zoomFactor=1; private final List<ImageMouseMotionListener> moveListeners = new ArrayList<ImageMouseMotionListener>(4); private final List<ImageMouseClickListener> clickListeners = new ArrayList<ImageMouseClickListener>(4); private final MouseEventTranslator mouseEventTranslator = new MouseEventTranslator(); private final PaintManager paintManager = new PaintManager(); /* Handles repositioning the scroll pane when the image is resized so that the same area remains visible. */ class Rescroller { private Point preparedCenter=null; void prepare() { if (image!=null && hasSize()) { Rectangle viewRect=((JViewport)SwingUtilities.getAncestorOfClass(JViewport.class, ImageComponent.this)).getViewRect(); preparedCenter=new Point(viewRect.x+viewRect.width/2, viewRect.y+viewRect.height/2); try { getImageTransform().inverseTransform(preparedCenter, preparedCenter); } catch (NoninvertibleTransformException e) { throw new Error(e); } } } void rescroll() { if (preparedCenter!=null) { JViewport viewport = (JViewport)SwingUtilities.getAncestorOfClass(JViewport.class, ImageComponent.this); Dimension viewSize=(viewport).getExtentSize(); getImageTransform().transform(preparedCenter, preparedCenter); Rectangle view = new Rectangle(preparedCenter.x-viewSize.width/2, preparedCenter.y-viewSize.height/2, viewSize.width, viewSize.height); scrollRectToVisible(view); mouseEventTranslator.correctionalFire(); preparedCenter=null; } } } private Rescroller rescroller=new Rescroller(); /** * This set is shared by all synchronized image components and contains all * synchronized image components. Unless there is no synchronization; then it is null. */ private Set<ImageComponent> trackSizeIfEmpty = null; private final PropertyChangeSupport propertyChangeSupport; private final Object eventSource; public ImageComponent(Object eventSource, PropertyChangeSupport propertyChangeSupport) { this.eventSource = eventSource; this.propertyChangeSupport=propertyChangeSupport; mouseEventTranslator.register(this); setOpaque(true); } private boolean hasSize() { return getWidth()>0 && getHeight()>0; } @Override public Dimension getMaximumSize() { return new Dimension(Integer.MAX_VALUE, Integer.MAX_VALUE); } public void addImageMouseMoveListener(ImageMouseMotionListener l) { if (l!=null) moveListeners.add(l); } public void removeImageMouseMoveListener(ImageMouseMotionListener l) { if (l!=null) moveListeners.remove(l); } public void addImageMouseClickListener(ImageMouseClickListener l) { if (l!=null) clickListeners.add(l); } public void removeImageMouseClickListener(ImageMouseClickListener l) { if (l!=null) clickListeners.remove(l); } public void setImage(BufferedImage newImage) { BufferedImage oldImage = image; image = newImage; paintManager.notifyChanged(); if (oldImage != newImage && (oldImage == null || newImage == null || oldImage.getWidth() != newImage.getWidth() || oldImage.getHeight() != newImage.getHeight())) revalidate(); repaint(); propertyChangeSupport.firePropertyChange("image", oldImage, newImage); } public BufferedImage getImage() { return image; } /** * Preforms all necessary actions to ensure that the viewer is resized to its proper size. It does that by invoking * {@code validate()} on the viewer's validateRoot. It also issues a {@code repaint()}. */ private void resizeNow() { invalidate(); // find the validate root; adapted from the package-private SwingUtilities.getValidateRoot Container root = null; Container c=this; for (; c != null; c = c.getParent()) { if (!c.isDisplayable() || c instanceof CellRendererPane) { return; } if (c.isValidateRoot()) { root = c; break; } } if (root == null) return; for (; c != null; c = c.getParent()) { if (!c.isDisplayable() || !c.isVisible()) { return; } if (c instanceof Window || c instanceof Applet) { break; } } if (c==null) return; root.validate(); repaint(); } public void setResizeStrategy(ResizeStrategy resizeStrategy) { if (resizeStrategy == this.resizeStrategy) return; rescroller.prepare(); ResizeStrategy oldResizeStrategy=this.resizeStrategy; this.resizeStrategy = resizeStrategy; resizeNow(); rescroller.rescroll(); propertyChangeSupport.firePropertyChange("resizeStrategy", oldResizeStrategy, resizeStrategy); } public ResizeStrategy getResizeStrategy() { return resizeStrategy; } public void setInterpolationType(Object type) { if (interpolationType==type) return; if ( type!=RenderingHints.VALUE_INTERPOLATION_NEAREST_NEIGHBOR && type!=RenderingHints.VALUE_INTERPOLATION_BILINEAR && type!=RenderingHints.VALUE_INTERPOLATION_BICUBIC) throw new IllegalArgumentException("Invalid interpolation type; use one of the RenderingHints constants"); Object old=this.interpolationType; this.interpolationType=type; paintManager.notifyChanged(); repaint(); propertyChangeSupport.firePropertyChange("interpolationType", old, type); } public Object getInterpolationType() { return interpolationType; } public void setPixelatedZoom(boolean pixelatedZoom) { if (pixelatedZoom == this.pixelatedZoom) return; this.pixelatedZoom = pixelatedZoom; paintManager.notifyChanged(); repaint(); propertyChangeSupport.firePropertyChange("pixelatedZoom", !pixelatedZoom, pixelatedZoom); } public boolean isPixelatedZoom() { return pixelatedZoom; } /** Returns the zoom factor used when resize strategy is CUSTOM_ZOOM. */ public double getZoomFactor() { return zoomFactor; } /** * Sets the zoom factor to use when the resize strategy is CUSTOM_ZOOM. * <p> * Note that calling this function does not change the current resize strategy. * @throws IllegalArgumentException if {@code newZoomFactor} is not a positive number */ public void setZoomFactor(double newZoomFactor) { if (zoomFactor==newZoomFactor) return; if (newZoomFactor<=0 || Double.isInfinite(newZoomFactor) || Double.isNaN(newZoomFactor)) throw new IllegalArgumentException("Invalid zoom factor: "+newZoomFactor); if (getResizeStrategy()==ResizeStrategy.CUSTOM_ZOOM) { rescroller.prepare(); } double oldZoomFactor=zoomFactor; zoomFactor=newZoomFactor; if (getResizeStrategy()==ResizeStrategy.CUSTOM_ZOOM) { resizeNow(); rescroller.rescroll(); } propertyChangeSupport.firePropertyChange("zoomFactor", oldZoomFactor, newZoomFactor); } @Override public Dimension getPreferredSize() { if (image == null) { if (trackSizeIfEmpty!=null) for (ImageComponent c:trackSizeIfEmpty) if (c.getImage()!=null) return c.getPreferredSize(); return new Dimension(); } else if (resizeStrategy==ResizeStrategy.CUSTOM_ZOOM) { return new Dimension((int)Math.ceil(image.getWidth()*zoomFactor), (int)Math.ceil(image.getHeight()*zoomFactor)); } else return new Dimension(image.getWidth(), image.getHeight()); } /** * Adds a component to the trackSizeIfEmpty set. If this component has no image set * but one of the tracked ones does, then the size of this component will be set to * match the size of the image displayed in one of the tracked components. This * method is useful if the scroll bars of image viewers are synchronized, because * if a viewer has no image set, it can cause the scrolling of a viewer that has an * image set not to work. * @param c the component to track */ public void trackSizeIfEmpty(ImageComponent c) { if (trackSizeIfEmpty!=null) { if (c.trackSizeIfEmpty!=null) { trackSizeIfEmpty.addAll(c.trackSizeIfEmpty); c.trackSizeIfEmpty=trackSizeIfEmpty; } else { trackSizeIfEmpty.add(c); c.trackSizeIfEmpty=trackSizeIfEmpty; } } else { if (c.trackSizeIfEmpty!=null) { c.trackSizeIfEmpty.add(this); trackSizeIfEmpty=c.trackSizeIfEmpty; } else { trackSizeIfEmpty=new HashSet<ImageComponent>(4); trackSizeIfEmpty.add(this); trackSizeIfEmpty.add(c); c.trackSizeIfEmpty=trackSizeIfEmpty; } } } /** * Returns the image pixel that is under the given point. * * @param p a point in component coordinate system * @return the corresponding image pixel, or <code>null</code> if the point is outside the image */ public Point pointToPixel(Point p) { return pointToPixel(p, true); } /** * Returns the image pixel corresponding to the given point. If the <code>clipToImage</code> * parameter is <code>false</code>, then the function will return an appropriately positioned * pixel on an infinite plane, even if the point is outside the image bounds. If * <code>clipToImage</code> is <code>true</code> then the function will return <code>null</code> * for such positions, and any non-null return value will be a valid image pixel. * @param p a point in component coordinate system * @param clipToImage whether the function should return <code>null</code> for positions outside * the image bounds * @return the corresponding image pixel * @throws IllegalStateException if there is no image set or if the size of the viewer is 0 (for example because * it is not in a visible component) */ public Point pointToPixel(Point p, boolean clipToImage) { Point2D.Double fp=new Point2D.Double(p.x+.5, p.y+.5); try { getImageTransform().inverseTransform(fp, fp); } catch (NoninvertibleTransformException ex) { throw new Error("Image transformation not invertible"); } p.x=(int)Math.floor(fp.x); p.y=(int)Math.floor(fp.y); if (clipToImage && (p.x < 0 || p.y < 0 || p.x >= image.getWidth() || p.y >= image.getHeight())) { return null; } return p; } @Override protected void paintComponent(Graphics g) { paintManager.paintComponent(g); } /** * Returns the transformation that is applied to the image. Most commonly the transformation * is the concatenation of a uniform scale and a translation. * <p> * The <code>AffineTransform</code> * instance returned by this method should not be modified. * @return the transformation applied to the image before painting * @throws IllegalStateException if there is no image set or if the size of the viewer is 0 (for example because * it is not in a visible component) */ public AffineTransform getImageTransform() { if (getImage()==null) throw new IllegalStateException("No image"); if (!hasSize()) throw new IllegalStateException("Viewer size is zero"); double currentZoom; switch (resizeStrategy) { case NO_RESIZE: currentZoom=1; break; case SHRINK_TO_FIT: currentZoom = Math.min(getSizeRatio(), 1); break; case RESIZE_TO_FIT: currentZoom = getSizeRatio(); break; case CUSTOM_ZOOM: currentZoom = zoomFactor; break; default: throw new Error("Unhandled resize strategy"); } AffineTransform tr=new AffineTransform(); tr.setToTranslation((getWidth()-image.getWidth()*currentZoom)/2.0, (getHeight()-image.getHeight()*currentZoom)/2.0); tr.scale(currentZoom, currentZoom); return tr; } private double getSizeRatio() { return Math.min(getWidth() / (double) image.getWidth(), getHeight() / (double) image.getHeight()); } /** * Helper class that generates ImageMouseEvents by translating normal mouse events onto * the image. */ private class MouseEventTranslator implements MouseInputListener, PropertyChangeListener { /** This flag is true if the mouse cursor is inside the bounds of the image. */ private boolean on=false; /** * The last position reported. This is used to avoid multiple successive image mouse motion events * with the same position. */ private Point lastPosition=null; /** Sets up this translator. */ private void register(ImageComponent ic) { ic.addMouseListener(this); ic.addMouseMotionListener(this); ic.propertyChangeSupport.addPropertyChangeListener(this); } private void handleMouseAt(Point position, MouseEvent event) { if (image==null) { if (on) { on=false; fireMouseExit(); } } else { if (position!=null) position=pointToPixel(position); if (position==null) { if (on) { on=false; fireMouseExit(); } } else { if (!on) { on=true; lastPosition=null; fireMouseEnter(position.x, position.y, event); } if (!position.equals(lastPosition)) { lastPosition=position; fireMouseAtPixel(position.x, position.y, event); } } } } @Override public void mouseClicked(MouseEvent e) { if (image == null || !on) return; Point p = pointToPixel(e.getPoint()); if (p != null) { fireMouseClickedAtPixel(p.x, p.y, e); } } @Override public void mouseEntered(MouseEvent e) { if (image != null) { Point p=pointToPixel(e.getPoint()); if (p!=null) { on=true; fireMouseEnter(p.x, p.y, e); fireMouseAtPixel(p.x, p.y, e); } } } @Override public void mouseExited(MouseEvent e) { if (on) { on = false; fireMouseExit(); } } @Override public void mouseMoved(MouseEvent e) { handleMouseAt(e.getPoint(), e); } @Override public void mouseDragged(MouseEvent e) { if (image==null) return; Point p = pointToPixel(e.getPoint(), false); fireMouseDrag(p.x, p.y, e); } @Override public void propertyChange(PropertyChangeEvent evt) { if ( "image".equals(evt.getPropertyName()) || "resizeStrategy".equals(evt.getPropertyName()) || (getResizeStrategy()==ResizeStrategy.CUSTOM_ZOOM && "zoomFactor".equals(evt.getPropertyName()))) { correctionalFire(); } } /** * Fires a motion event based on the current cursor position. Use this method if something other than mouse motion * changed where the cursor is relative to the image. */ private void correctionalFire() { handleMouseAt(ImageComponent.this.getMousePosition(), null); } private void fireMouseAtPixel(int x, int y, MouseEvent ev) { ImageMouseEvent e = null; for (ImageMouseMotionListener imageMouseMoveListener: moveListeners) { if (e == null) e = new ImageMouseEvent(eventSource, image, x, y, ev); imageMouseMoveListener.mouseMoved(e); } } private void fireMouseClickedAtPixel(int x, int y, MouseEvent ev) { ImageMouseEvent e = null; for (ImageMouseClickListener imageMouseClickListener: clickListeners) { if (e == null) e = new ImageMouseEvent(eventSource, image, x, y, ev); imageMouseClickListener.mouseClicked(e); } } private void fireMouseEnter(int x, int y, MouseEvent ev) { ImageMouseEvent e = null; for (ImageMouseMotionListener imageMouseMoveListener: moveListeners) { if (e == null) e = new ImageMouseEvent(eventSource, image, x, y, ev); imageMouseMoveListener.mouseEntered(e); } } private void fireMouseExit() { ImageMouseEvent e = null; for (ImageMouseMotionListener imageMouseMoveListener: moveListeners) { if (e == null) e = new ImageMouseEvent(eventSource, image, -1, -1, null); imageMouseMoveListener.mouseExited(e); } } private void fireMouseDrag(int x, int y, MouseEvent ev) { ImageMouseEvent e = null; for (ImageMouseMotionListener imageMouseMoveListener: moveListeners) { if (e == null) e = new ImageMouseEvent(eventSource, image, x, y, ev); imageMouseMoveListener.mouseDragged(e); } } @Override public void mousePressed(MouseEvent e) {} @Override public void mouseReleased(MouseEvent e) {} } /** * Helper class that manages the actual painting. */ private class PaintManager { BufferedImage cachedImage=null; boolean cachedImageChanged=false; AffineTransform cachedTransform; private void doPaint(Graphics2D gg, AffineTransform imageTransform) { gg.setColor(getBackground()); gg.fillRect(0, 0, getWidth(), getHeight()); gg.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY); if (pixelatedZoom && imageTransform.getScaleX()>=1) gg.setRenderingHint(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_NEAREST_NEIGHBOR); else gg.setRenderingHint(RenderingHints.KEY_INTERPOLATION, interpolationType); gg.drawImage(image, imageTransform, ImageComponent.this); } private void ensureCachedValid(AffineTransform imageTransform) { boolean cacheValid; // create the image if necessary; if the existing one is sufficiently large, use it if (cachedImage==null || cachedImage.getWidth()<getWidth() || cachedImage.getHeight()<getHeight()) { cachedImage=getGraphicsConfiguration().createCompatibleImage(getWidth(), getHeight()); cacheValid=false; } else { cacheValid = cachedTransform.equals(imageTransform) && !cachedImageChanged; } if (!cacheValid) { Graphics2D gg=cachedImage.createGraphics(); doPaint(gg, imageTransform); gg.dispose(); cachedImageChanged=false; cachedTransform=new AffineTransform(imageTransform); } } /** * Called when a property which affects how the component is painted changes. This invalidates the cache and causes * it to be redrawn upon the next paint request. */ public void notifyChanged() { cachedImageChanged=true; } public void paintComponent(Graphics g) { if (image==null) { Graphics2D gg=(Graphics2D)g.create(); gg.setColor(getBackground()); gg.fillRect(0, 0, getWidth(), getHeight()); gg.dispose(); return; } AffineTransform imageTransform = getImageTransform(); if (imageTransform.getScaleX()<1 && interpolationType!=RenderingHints.VALUE_INTERPOLATION_NEAREST_NEIGHBOR) { /* * We're shrinking the image; instead of letting the Graphics object do it every time, we do it and cache * the result. */ ensureCachedValid(imageTransform); g.drawImage(cachedImage, 0, 0, ImageComponent.this); } else { // draw the image directly Graphics2D gg=(Graphics2D)g.create(); doPaint(gg, imageTransform); gg.dispose(); } } } }
Fix synthetic mouse events when image has overlay. Due to the presence of the overlay, the code used to think that the mouse exited the component because getMousePosition returned null for the image component. (The overlay basically covered up the image.) Now we use the parent container (which contains the image and all the overlays) to find the mouse cursor.
src/main/java/hu/kazocsaba/imageviewer/ImageComponent.java
Fix synthetic mouse events when image has overlay.
Java
mit
43f1adc012ea5d0324dde11bbf002faa9cc65b83
0
conveyal/r5,conveyal/r5,conveyal/r5,conveyal/r5,conveyal/r5
package org.opentripplanner.analyst.broker; import com.fasterxml.jackson.databind.ObjectMapper; import gnu.trove.map.TIntIntMap; import gnu.trove.map.TIntObjectMap; import gnu.trove.map.hash.TIntIntHashMap; import gnu.trove.map.hash.TIntObjectHashMap; import org.glassfish.grizzly.http.server.Response; import org.glassfish.grizzly.http.util.HttpStatus; import org.opentripplanner.analyst.cluster.AnalystClusterRequest; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.io.OutputStream; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collection; import java.util.Deque; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Queue; /** * This class watches for incoming requests for work tasks, and attempts to match them to enqueued tasks. * It draws tasks fairly from all users, and fairly from all jobs within each user, while attempting to respect the * cache affinity of each worker (give it tasks on the same graph it has been working on recently). * * When no work is available, the polling functions return immediately. Workers are expected to sleep and re-poll * after a few tens of seconds. * * TODO if there is a backlog of work (the usual case when jobs are lined up) workers will constantly change graphs * We need a queue of deferred work: (job, timestamp) when a job would have fairly had its work consumed if a worker was available. * Anything that survives at the head of that queue for more than e.g. one minute gets forced on a non-affinity worker. * Any new workers without an affinity preferentially pull work off the deferred queue. * Polling worker connections scan the deferred queue before ever going to the main circular queue. * When the deferred queue exceeds a certain size, that's when we must start more workers. * * We should distinguish between two cases: * 1. we were waiting for work and woke up because work became available. * 2. we were waiting for a consumer and woke up when one arrived. * * The first case implies that many workers should migrate toward the new work. * * Two key ideas are: * 1. Least recently serviced queue of jobs * 2. Affinity Homeostasis * * If we can constantly keep track of the ideal proportion of workers by graph (based on active queues), * and the true proportion of consumers by graph (based on incoming requests) then we can decide when a worker's graph * affinity should be ignored. * * It may also be helpful to mark jobs every time they are skipped in the LRU queue. Each time a job is serviced, * it is taken out of the queue and put at its end. Jobs that have not been serviced float to the top. */ public class Broker implements Runnable { // TODO catalog of recently seen consumers by affinity with IP: response.getRequest().getRemoteAddr(); private static final Logger LOG = LoggerFactory.getLogger(Broker.class); private CircularList<User> users = new CircularList<>(); private int nUndeliveredTasks = 0; private int nWaitingConsumers = 0; // including some that might be closed private int nextTaskId = 0; private ObjectMapper mapper = new ObjectMapper(); /** The messages that have already been delivered to a worker. */ TIntObjectMap<AnalystClusterRequest> deliveredTasks = new TIntObjectHashMap<>(); /** The time at which each task was delivered to a worker, to allow re-delivery. */ TIntIntMap deliveryTimes = new TIntIntHashMap(); /** Requests that are not part of a job and can "cut in line" in front of jobs for immediate execution. */ private Queue<AnalystClusterRequest> priorityTasks = new ArrayDeque<>(); /** Priority requests that have already been farmed out to workers, and are awaiting a response. */ private TIntObjectMap<Response> priorityResponses = new TIntObjectHashMap<>(); /** Outstanding requests from workers for tasks, grouped by worker graph affinity. */ Map<String, Deque<Response>> connectionsForGraph = new HashMap<>(); // Queue of tasks to complete Delete, Enqueue etc. to avoid synchronizing all the functions ? /** * Enqueue a task for execution ASAP, planning to return the response over the same HTTP connection. * Low-reliability, no re-delivery. */ public synchronized void enqueuePriorityTask (QueuePath queuePath, AnalystClusterRequest task, Response response) { task.taskId = nextTaskId++; priorityTasks.add(task); priorityResponses.put(task.taskId, response); } /** Enqueue some tasks for asynchronous execution possibly much later. Results will be saved to S3. */ public synchronized void enqueueTasks (QueuePath queuePath, Collection<AnalystClusterRequest> tasks) { LOG.debug("Queue {}", queuePath); // Assumes tasks are pre-validated and are all on the same user/job User user = findUser(queuePath.userId, true); Job job = user.findJob(queuePath.jobId, true); for (AnalystClusterRequest task : tasks) { task.taskId = nextTaskId++; job.addTask(task); nUndeliveredTasks += 1; LOG.debug("Enqueued task id {} in job {}", task.taskId, job.jobId); } // Wake up the delivery thread if it's waiting on input. // This wakes whatever thread called wait() while holding the monitor for this Broker object. notify(); } /** Long poll operations are enqueued here. */ public synchronized void registerSuspendedResponse(String graphId, Response response) { // The workers are not allowed to request a specific job or task, just a specific graph and queue type. Deque<Response> deque = connectionsForGraph.get(graphId); if (deque == null) { deque = new ArrayDeque<>(); connectionsForGraph.put(graphId, deque); } deque.addLast(response); nWaitingConsumers += 1; // Wake up the delivery thread if it's waiting on consumers. // This is whatever thread called wait() while holding the monitor for this QBroker object. notify(); } /** When we notice that a long poll connection has closed, we remove it here. */ public synchronized boolean removeSuspendedResponse(String graphId, Response response) { Deque<Response> deque = connectionsForGraph.get(graphId); if (deque == null) { return false; } if (deque.remove(response)) { nWaitingConsumers -= 1; LOG.debug("Removed closed connection from queue."); logQueueStatus(); return true; } return false; } private void logQueueStatus() { LOG.info("Status {} undelivered, {} consumers waiting.", nUndeliveredTasks, nWaitingConsumers); } /** * Pull the next job queue with undelivered work fairly from users and jobs. * Pass some of that work to a worker, blocking if necessary until there are workers available. */ public synchronized void deliverTasksForOneJob () throws InterruptedException { // Wait until there are some undelivered tasks. while (nUndeliveredTasks == 0) { LOG.debug("Task delivery thread is going to sleep, there are no tasks waiting for delivery."); logQueueStatus(); wait(); } LOG.debug("Task delivery thread is awake and there are some undelivered tasks."); logQueueStatus(); // Circular lists retain iteration state via their head pointers. Job job = null; while (job == null) { User user = users.advance(); if (user == null) { LOG.error("There should always be at least one user here, because there is an undelivered task."); } job = user.jobs.advanceToElement(e -> e.visibleTasks.size() > 0); } // We have found job with some undelivered tasks. Give them to a consumer, // waiting until one is available even if this means ignoring graph affinity. LOG.debug("Task delivery thread has found undelivered tasks in job {}.", job.jobId); while (true) { while (nWaitingConsumers == 0) { LOG.debug("Task delivery thread is going to sleep, there are no consumers waiting."); // Thread will be notified when there are new incoming consumer connections. wait(); } LOG.debug("Task delivery thread is awake, and some consumers are waiting."); logQueueStatus(); // Here, we know there are some consumer connections waiting, but we don't know if they're still open. // First try to get a consumer with affinity for this graph LOG.debug("Looking for an eligible consumer, respecting graph affinity."); Deque<Response> deque = connectionsForGraph.get(job.graphId); while (deque != null && !deque.isEmpty()) { Response response = deque.pop(); nWaitingConsumers -= 1; if (deliver(job, response)) { return; } } // Then try to get a consumer from the graph with the most workers LOG.debug("No consumers with the right affinity. Looking for any consumer."); List<Deque<Response>> deques = new ArrayList<>(connectionsForGraph.values()); deques.sort((d1, d2) -> Integer.compare(d2.size(), d1.size())); for (Deque<Response> d : deques) { while (!d.isEmpty()) { Response response = d.pop(); nWaitingConsumers -= 1; if (deliver(job, response)) { return; } } } // No workers were available to accept the tasks. The thread should wait on the next iteration. LOG.debug("No consumer was available. They all must have closed their connections."); if (nWaitingConsumers != 0) { throw new AssertionError("There should be no waiting consumers here, something is wrong."); } } } /** * Attempt to hand some tasks from the given job to a waiting consumer connection. * The write will fail if the consumer has closed the connection but it hasn't been removed from the connection * queue yet because the Broker methods are synchronized (the removal action is waiting to get the monitor). * @return whether the handoff succeeded. */ public synchronized boolean deliver (Job job, Response response) { // Check up-front whether the connection is still open. if (!response.getRequest().getRequest().getConnection().isOpen()) { LOG.debug("Consumer connection was closed. It will be removed."); return false; } // Get up to N tasks from the visibleTasks deque List<AnalystClusterRequest> tasks = new ArrayList<>(); while (tasks.size() < 4 && !job.visibleTasks.isEmpty()) { tasks.add(job.visibleTasks.poll()); } // Attempt to deliver the tasks to the given consumer. try { response.setStatus(HttpStatus.OK_200); OutputStream out = response.getOutputStream(); mapper.writeValue(out, tasks); response.resume(); } catch (IOException e) { // The connection was probably closed by the consumer, but treat it as a server error. LOG.debug("Consumer connection caused IO error, it will be removed."); response.setStatus(HttpStatus.INTERNAL_SERVER_ERROR_500); response.resume(); // Delivery failed, put tasks back on (the end of) the queue. job.visibleTasks.addAll(tasks); return false; } // Delivery succeeded, move tasks from undelivered to delivered status LOG.debug("Delivery of {} tasks succeeded.", tasks.size()); nUndeliveredTasks -= tasks.size(); job.markTasksDelivered(tasks); return true; } /** * Take a task out of the job, marking it as completed. The body of this DELETE request... * @return whether the task was found and removed. */ public synchronized boolean deleteTask (QueuePath queuePath) { User user = findUser(queuePath.userId, false); if (user == null) { return false; } Job job = user.findJob(queuePath.jobId, false); if (job == null) { return false; } // There could be thousands of invisible (delivered) tasks, so we use a hash map. // We only allow removal of invisible tasks for now. // Return whether removal call discovered an existing task. return job.invisibleTasks.remove(queuePath.taskId) != null; } // Todo: occasionally purge closed connections from connectionsForGraph @Override public void run() { while (true) { try { deliverTasksForOneJob(); } catch (InterruptedException e) { LOG.warn("Task pump thread was interrupted."); return; } } } /** Search through the users to find one with the given ID, without advancing the head of the circular list. */ public User findUser (String userId, boolean create) { for (User user : users) { if (user.userId.equals(userId)) { return user; } } if (create) { User user = new User(userId); users.insertAtTail(user); return user; } return null; } }
src/main/java/org/opentripplanner/analyst/broker/Broker.java
package org.opentripplanner.analyst.broker; import com.fasterxml.jackson.databind.ObjectMapper; import org.glassfish.grizzly.http.server.Response; import org.glassfish.grizzly.http.util.HttpStatus; import org.opentripplanner.analyst.cluster.AnalystClusterRequest; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.io.OutputStream; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collection; import java.util.Deque; import java.util.HashMap; import java.util.List; import java.util.Map; /** * This class watches for incoming requests for work tasks, and attempts to match them to enqueued tasks. * It draws tasks fairly from all users, and fairly from all jobs within each user, while attempting to respect the * cache affinity of each worker (give it tasks on the same graph it has been working on recently). * * When no work is available, the polling functions return immediately. Workers are expected to sleep and re-poll * after a few tens of seconds. * * TODO if there is a backlog of work (the usual case when jobs are lined up) workers will constantly change graphs * We need a queue of deferred work: (job, timestamp) when a job would have fairly had its work consumed if a worker was available. * Anything that survives at the head of that queue for more than e.g. one minute gets forced on a non-affinity worker. * Any new workers without an affinity preferentially pull work off the deferred queue. * Polling worker connections scan the deferred queue before ever going to the main circular queue. * When the deferred queue exceeds a certain size, that's when we must start more workers. * * We should distinguish between two cases: * 1. we were waiting for work and woke up because work became available. * 2. we were waiting for a consumer and woke up when one arrived. * * The first case implies that many workers should migrate toward the new work. * * Two key ideas are: * 1. Least recently serviced queue of jobs * 2. Affinity Homeostasis * * If we can constantly keep track of the ideal proportion of workers by graph (based on active queues), * and the true proportion of consumers by graph (based on incoming requests) then we can decide when a worker's graph * affinity should be ignored. * * It may also be helpful to mark jobs every time they are skipped in the LRU queue. Each time a job is serviced, * it is taken out of the queue and put at its end. Jobs that have not been serviced float to the top. */ public class Broker implements Runnable { // TODO catalog of recently seen consumers by affinity with IP: response.getRequest().getRemoteAddr(); private static final Logger LOG = LoggerFactory.getLogger(Broker.class); private CircularList<User> users = new CircularList<>(); private int nUndeliveredTasks = 0; private int nWaitingConsumers = 0; // including some that might be closed private int nextTaskId = 0; private ObjectMapper mapper = new ObjectMapper(); /** Outstanding requests from workers for tasks, grouped by worker graph affinity. */ Map<String, Deque<Response>> connectionsForGraph = new HashMap<>(); // Queue of tasks to complete Delete, Enqueue etc. to avoid synchronizing all the functions public synchronized void enqueueTasks (QueuePath queuePath, Collection<AnalystClusterRequest> tasks) { LOG.debug("Queue {}", queuePath); // Assumes tasks are pre-validated and are all on the same user/job User user = findUser(queuePath.userId, true); Job job = user.findJob(queuePath.jobId, true); for (AnalystClusterRequest task : tasks) { task.taskId = nextTaskId++; job.addTask(task); nUndeliveredTasks += 1; LOG.debug("Enqueued task id {} in job {}", task.taskId, job.jobId); } // Wake up the delivery thread if it's waiting on input. // This wakes whatever thread called wait() while holding the monitor for this Broker object. notify(); } /** Long poll operations are enqueued here. */ public synchronized void registerSuspendedResponse(String graphId, Response response) { // The workers are not allowed to request a specific job or task, just a specific graph and queue type. Deque<Response> deque = connectionsForGraph.get(graphId); if (deque == null) { deque = new ArrayDeque<>(); connectionsForGraph.put(graphId, deque); } deque.addLast(response); nWaitingConsumers += 1; // Wake up the delivery thread if it's waiting on consumers. // This is whatever thread called wait() while holding the monitor for this QBroker object. notify(); } /** When we notice that a long poll connection has closed, we remove it here. */ public synchronized boolean removeSuspendedResponse(String graphId, Response response) { Deque<Response> deque = connectionsForGraph.get(graphId); if (deque == null) { return false; } if (deque.remove(response)) { nWaitingConsumers -= 1; LOG.debug("Removed closed connection from queue."); logQueueStatus(); return true; } return false; } private void logQueueStatus() { LOG.info("Status {} undelivered, {} consumers waiting.", nUndeliveredTasks, nWaitingConsumers); } /** * Pull the next job queue with undelivered work fairly from users and jobs. * Pass some of that work to a worker, blocking if necessary until there are workers available. */ public synchronized void deliverTasksForOneJob () throws InterruptedException { // Wait until there are some undelivered tasks. while (nUndeliveredTasks == 0) { LOG.debug("Task delivery thread is going to sleep, there are no tasks waiting for delivery."); logQueueStatus(); wait(); } LOG.debug("Task delivery thread is awake and there are some undelivered tasks."); logQueueStatus(); // Circular lists retain iteration state via their head pointers. Job job = null; while (job == null) { User user = users.advance(); if (user == null) { LOG.error("There should always be at least one user here, because there is an undelivered task."); } job = user.jobs.advanceToElement(e -> e.visibleTasks.size() > 0); } // We have found job with some undelivered tasks. Give them to a consumer, // waiting until one is available even if this means ignoring graph affinity. LOG.debug("Task delivery thread has found undelivered tasks in job {}.", job.jobId); while (true) { while (nWaitingConsumers == 0) { LOG.debug("Task delivery thread is going to sleep, there are no consumers waiting."); // Thread will be notified when there are new incoming consumer connections. wait(); } LOG.debug("Task delivery thread is awake, and some consumers are waiting."); logQueueStatus(); // Here, we know there are some consumer connections waiting, but we don't know if they're still open. // First try to get a consumer with affinity for this graph LOG.debug("Looking for an eligible consumer, respecting graph affinity."); Deque<Response> deque = connectionsForGraph.get(job.graphId); while (deque != null && !deque.isEmpty()) { Response response = deque.pop(); nWaitingConsumers -= 1; if (deliver(job, response)) { return; } } // Then try to get a consumer from the graph with the most workers LOG.debug("No consumers with the right affinity. Looking for any consumer."); List<Deque<Response>> deques = new ArrayList<>(connectionsForGraph.values()); deques.sort((d1, d2) -> Integer.compare(d2.size(), d1.size())); for (Deque<Response> d : deques) { while (!d.isEmpty()) { Response response = d.pop(); nWaitingConsumers -= 1; if (deliver(job, response)) { return; } } } // No workers were available to accept the tasks. The thread should wait on the next iteration. LOG.debug("No consumer was available. They all must have closed their connections."); if (nWaitingConsumers != 0) { throw new AssertionError("There should be no waiting consumers here, something is wrong."); } } } /** * Attempt to hand some tasks from the given job to a waiting consumer connection. * The write will fail if the consumer has closed the connection but it hasn't been removed from the connection * queue yet because the Broker methods are synchronized (the removal action is waiting to get the monitor). * @return whether the handoff succeeded. */ public synchronized boolean deliver (Job job, Response response) { // Check up-front whether the connection is still open. if (!response.getRequest().getRequest().getConnection().isOpen()) { LOG.debug("Consumer connection was closed. It will be removed."); return false; } // Get up to N tasks from the visibleTasks deque List<AnalystClusterRequest> tasks = new ArrayList<>(); while (tasks.size() < 4 && !job.visibleTasks.isEmpty()) { tasks.add(job.visibleTasks.poll()); } // Attempt to deliver the tasks to the given consumer. try { response.setStatus(HttpStatus.OK_200); OutputStream out = response.getOutputStream(); mapper.writeValue(out, tasks); response.resume(); } catch (IOException e) { // The connection was probably closed by the consumer, but treat it as a server error. LOG.debug("Consumer connection caused IO error, it will be removed."); response.setStatus(HttpStatus.INTERNAL_SERVER_ERROR_500); response.resume(); // Delivery failed, put tasks back on (the end of) the queue. job.visibleTasks.addAll(tasks); return false; } // Delivery succeeded, move tasks from undelivered to delivered status LOG.debug("Delivery of {} tasks succeeded.", tasks.size()); nUndeliveredTasks -= tasks.size(); job.markTasksDelivered(tasks); return true; } /** * Take a task out of the job, marking it as completed. The body of this DELETE request... * @return whether the task was found and removed. */ public synchronized boolean deleteTask (QueuePath queuePath) { User user = findUser(queuePath.userId, false); if (user == null) { return false; } Job job = user.findJob(queuePath.jobId, false); if (job == null) { return false; } // There could be thousands of invisible (delivered) tasks, so we use a hash map. // We only allow removal of invisible tasks for now. // Return whether removal call discovered an existing task. return job.invisibleTasks.remove(queuePath.taskId) != null; } // Todo: occasionally purge closed connections from connectionsForGraph @Override public void run() { while (true) { try { deliverTasksForOneJob(); } catch (InterruptedException e) { LOG.warn("Task pump thread was interrupted."); return; } } } /** Search through the users to find one with the given ID, without advancing the head of the circular list. */ public User findUser (String userId, boolean create) { for (User user : users) { if (user.userId.equals(userId)) { return user; } } if (create) { User user = new User(userId); users.insertAtTail(user); return user; } return null; } }
stubs for high-priority tasks (single-point) Former-commit-id: 8412208b4b6a378f8950b41c0aa5a3ec80dcd1f2
src/main/java/org/opentripplanner/analyst/broker/Broker.java
stubs for high-priority tasks (single-point)
Java
mit
5d1d9ca7d2fd68b0c09cbffef19413a0c99b182c
0
FRCTeamPhoenix/Tester-Playground
package org.usfirst.frc.team2342.robot; import edu.wpi.first.wpilibj.IterativeRobot; import edu.wpi.first.wpilibj.Joystick; import edu.wpi.first.wpilibj.RobotDrive; import edu.wpi.first.wpilibj.Timer; import edu.wpi.first.wpilibj.livewindow.LiveWindow; import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; /** * The VM is configured to automatically run this class, and to call the * functions corresponding to each mode, as described in the IterativeRobot * documentation. If you change the name of this class or the package after * creating this project, you must also update the manifest file in the resource * directory. */ public class Robot extends IterativeRobot { RobotDrive myRobot = new RobotDrive(0, 3); Joystick stick = new Joystick(0); Joystick stick2 = new Joystick(1); Timer timer = new Timer(); /** * This function is run when the robot is first started up and should be * used for any initialization code. */ @Override public void robotInit() { } /** * This function is run once each time the robot enters autonomous mode */ @Override public void autonomousInit() { timer.reset(); timer.start(); } /** * This function is called periodically during autonomous */ @Override public void autonomousPeriodic() { // Drive for 2 seconds if (timer.get() < 2.0) { myRobot.drive(-0.5, 0.0); // drive forwards half speed } else { myRobot.drive(0.0, 0.0); // stop robot } } /** * This function is called once each time the robot enters tele-operated * mode */ @Override public void teleopInit() { //Memes for your entertainment SmartDashboard.putString("DB/String 0", "D4NK M3M35 ACTIVATED"); SmartDashboard.putString("DB/String 1", "P3P3 ACTIVATED"); SmartDashboard.putString("DB/String 2", "GN0M3 CH1LD ACTIVATED"); SmartDashboard.putString("DB/String 3", "D0G3 ACTIVATED"); SmartDashboard.putString("DB/String 4", "5HR3K ACTIVATED"); SmartDashboard.putString("DB/String 5", "D4T B01 ACTIVATED"); SmartDashboard.putString("DB/String 6", "P1CKL3 R1CK ACTIVATED"); SmartDashboard.putString("DB/String 7", "MY N4M3 1S J3FF ACTIVATED"); SmartDashboard.putString("DB/String 8", "1llUM1N4T1 ACTIVATED"); SmartDashboard.putString("DB/String 9", "SN00P D0G ACTIVATED"); } /** * This function is called periodically during operator control */ @Override public void teleopPeriodic() { myRobot.tankDrive(-stick.getY() * 0.3, -stick2.getY() * 0.3); } /** * This function is called periodically during test mode */ @Override public void testPeriodic() { LiveWindow.run(); } }
src/org/usfirst/frc/team2342/robot/Robot.java
package org.usfirst.frc.team2342.robot; import edu.wpi.first.wpilibj.IterativeRobot; import edu.wpi.first.wpilibj.Joystick; import edu.wpi.first.wpilibj.RobotDrive; import edu.wpi.first.wpilibj.Timer; import edu.wpi.first.wpilibj.livewindow.LiveWindow; import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; /** * The VM is configured to automatically run this class, and to call the * functions corresponding to each mode, as described in the IterativeRobot * documentation. If you change the name of this class or the package after * creating this project, you must also update the manifest file in the resource * directory. */ public class Robot extends IterativeRobot { RobotDrive myRobot = new RobotDrive(0, 1); Joystick stick = new Joystick(0); Joystick stick2 = new Joystick(1); Timer timer = new Timer(); /** * This function is run when the robot is first started up and should be * used for any initialization code. */ @Override public void robotInit() { } /** * This function is run once each time the robot enters autonomous mode */ @Override public void autonomousInit() { timer.reset(); timer.start(); } /** * This function is called periodically during autonomous */ @Override public void autonomousPeriodic() { // Drive for 2 seconds if (timer.get() < 2.0) { myRobot.drive(-0.5, 0.0); // drive forwards half speed } else { myRobot.drive(0.0, 0.0); // stop robot } } /** * This function is called once each time the robot enters tele-operated * mode */ @Override public void teleopInit() { //Memes for your entertainment SmartDashboard.putString("DB/String 0", "D4NK M3M35 ACTIVATED"); SmartDashboard.putString("DB/String 1", "P3P3 ACTIVATED"); SmartDashboard.putString("DB/String 2", "GN0M3 CH1LD ACTIVATED"); SmartDashboard.putString("DB/String 3", "D0G3 ACTIVATED"); SmartDashboard.putString("DB/String 4", "5HR3K ACTIVATED"); SmartDashboard.putString("DB/String 5", "D4T B01 ACTIVATED"); SmartDashboard.putString("DB/String 6", "P1CKL3 R1CK ACTIVATED"); SmartDashboard.putString("DB/String 7", "MY N4M3 1S J3FF ACTIVATED"); SmartDashboard.putString("DB/String 8", "1llUM1N4T1 ACTIVATED"); SmartDashboard.putString("DB/String 9", "SN00P D0G ACTIVATED"); } /** * This function is called periodically during operator control */ @Override public void teleopPeriodic() { myRobot.tankDrive(stick, stick2); } /** * This function is called periodically during test mode */ @Override public void testPeriodic() { LiveWindow.run(); } }
Fixed stuff
src/org/usfirst/frc/team2342/robot/Robot.java
Fixed stuff
Java
mit
5f0cee88a2adfc0aba371bfb1de45ebe35f60123
0
cs2103aug2014-w13-2j/main,cs2103aug2014-w13-2j/main
package edu.dynamic.dynamiz.UI; import java.awt.*; import java.awt.event.*; import javax.swing.*; import edu.dynamic.dynamiz.UI.DisplayerInterface; import edu.dynamic.dynamiz.controller.*; import edu.dynamic.dynamiz.structure.Feedback; /** * * @author XYLau * */ public class UI extends JPanel implements ActionListener { protected JTextField inputScreen; protected JTextArea displayScreen; private final static String newline = "\n"; public static Displayer disp = new Displayer(); public static Controller cont = new Controller(); public UI() { super(new GridBagLayout()); displayScreen = new JTextArea(40, 100); displayScreen.setEditable(false); JScrollPane scrollPane = new JScrollPane(displayScreen); inputScreen = new JTextField(20); inputScreen.addActionListener(this); // Add Components to this panel. GridBagConstraints c = new GridBagConstraints(); c.gridwidth = GridBagConstraints.REMAINDER; c.fill = GridBagConstraints.BOTH; c.weightx = 1.0; c.weighty = 1.0; add(scrollPane, c); c.fill = GridBagConstraints.HORIZONTAL; add(inputScreen, c); // Welcome message displayln(disp.displayWelcomeMessage()); displayln(disp.displayPrompt(1)); } public void run() { javax.swing.SwingUtilities.invokeLater(new Runnable() { public void run() { Screen(); } }); } /** * Event Handler for each event, where event refers to the entry of a single * command into the Screen interface */ public void actionPerformed(ActionEvent evt) { String text = inputScreen.getText(); /* * To be added once controller is completed (ZX) Feedback feedback = * controller.executeCommand(text); */ display(disp.displayPrompt()); displayln(text); // TODO: Awaiting Nhan's exit feedback // if (feedback.getCommandType().equalsIgnoreCase("exit")) { if (text.equalsIgnoreCase("exit")) { System.exit(0); } Feedback feedback = cont.executeCommand(text); display(disp.displayFeedback(feedback)); // Additional Feature: Retained Last-Entered Command inputScreen.selectAll(); // Make sure the new text is visible, even if there // was a selection in the text area. displayScreen.setCaretPosition(displayScreen.getDocument().getLength()); } /** * Displays a string onto the Screen with newline * @param text */ public void displayln(String text) { displayScreen.append(text + newline); } /** * Displays a string onto the Screen without a newline * * @param text */ public void display(String text) { displayScreen.append(text); } /** * Create the GUI and show it. For thread safety, this method should be * invoked from the event dispatch thread. */ public static void Screen() { // Create and set up the window. JFrame frame = new JFrame("Dynamiz"); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); // Add contents to the window. frame.add(new UI()); displayScreen(frame); } /** * Displays the screen * * @param frame */ private static void displayScreen(JFrame frame) { frame.pack(); frame.setVisible(true); } public static void main(String[] args) { // Schedule a job for the event dispatch thread: // creating and showing this application's GUI. javax.swing.SwingUtilities.invokeLater(new Runnable() { public void run() { Screen(); } }); } }
src/edu/dynamic/dynamiz/UI/UI.java
package edu.dynamic.dynamiz.UI; import java.awt.*; import java.awt.event.*; import javax.swing.*; import edu.dynamic.dynamiz.UI.DisplayerInterface; import edu.dynamic.dynamiz.controller.*; import edu.dynamic.dynamiz.structure.Feedback; /** * * @author XYLau * */ public class UI extends JPanel implements ActionListener { protected JTextField inputScreen; protected JTextArea displayScreen; private final static String newline = "\n"; public static Displayer disp = new Displayer(); public static Controller cont = new Controller(); public UI() { super(new GridBagLayout()); displayScreen = new JTextArea(40, 100); displayScreen.setEditable(false); JScrollPane scrollPane = new JScrollPane(displayScreen); inputScreen = new JTextField(20); inputScreen.addActionListener(this); // Add Components to this panel. GridBagConstraints c = new GridBagConstraints(); c.gridwidth = GridBagConstraints.REMAINDER; c.fill = GridBagConstraints.BOTH; c.weightx = 1.0; c.weighty = 1.0; add(scrollPane, c); c.fill = GridBagConstraints.HORIZONTAL; add(inputScreen, c); // Welcome message displayln(disp.displayWelcomeMessage()); displayln(disp.displayPrompt(1)); } public void run() { javax.swing.SwingUtilities.invokeLater(new Runnable() { public void run() { Screen(); } }); } /** * Event Handler for each event, where event refers to the entry of a single * command into the Screen interface */ public void actionPerformed(ActionEvent evt) { String text = inputScreen.getText(); /* * To be added once controller is completed (ZX) Feedback feedback = * controller.executeCommand(text); */ display(disp.displayPrompt()); displayln(text); Feedback feedback = cont.executeCommand(text); display(disp.displayFeedback(feedback)); if (feedback.getCommandType().equalsIgnoreCase("exit")) { // close frame } // Additional Feature: Retained Last-Entered Command inputScreen.selectAll(); // Make sure the new text is visible, even if there // was a selection in the text area. displayScreen.setCaretPosition(displayScreen.getDocument().getLength()); } /** * Displays a string onto the Screen with newline * @param text */ public void displayln(String text) { displayScreen.append(text + newline); } /** * Displays a string onto the Screen without a newline * * @param text */ public void display(String text) { displayScreen.append(text); } /** * Create the GUI and show it. For thread safety, this method should be * invoked from the event dispatch thread. */ public static void Screen() { // Create and set up the window. JFrame frame = new JFrame("Dynamiz"); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); // Add contents to the window. frame.add(new UI()); displayScreen(frame); } /** * Displays the screen * * @param frame */ private static void displayScreen(JFrame frame) { frame.pack(); frame.setVisible(true); } public static void main(String[] args) { // Schedule a job for the event dispatch thread: // creating and showing this application's GUI. javax.swing.SwingUtilities.invokeLater(new Runnable() { public void run() { Screen(); } }); } }
Establish exit function
src/edu/dynamic/dynamiz/UI/UI.java
Establish exit function
Java
mit
cd7cb7328bdb52e7aae38fbc9fda7665cb5b23af
0
sharpstewie/ICS4UFinal
/* GUESS THAT CHAMPION * * ---------------------------------------------------------------------------- * "Guess That Champion!" isn't endorsed by Riot Games and doesn't reflect * the views or opinions of Riot Games or anyone * officially involved in producing or managing League of Legends. * League of Legends and Riot Games are trademarks or * registered trademarks of Riot Games, Inc. League of Legends © Riot Games, Inc. * * ---------------------------------------------------------------------------- * * FEATURES * - Fully functional gameplay w/images of champion ability/passive as hint * - Select which categories to be tested on in menu screen * - Displays score in GUI * - Plays sound to let user know if their guess was correct * - Scoring becoomes disabled when timer reaches zero * - Player has a limited number of incorrect answers (lives) before their scoring doesn't work * - After playing on round, game loops to the category select screen * - Pressing the exit button closes the program (everywhere except during disclaimer) * * NEW FEATURES * - Time limit can now be changed to 30, 60, 90, 120, or 150 seconds * * PLANNED FEATURES * - Select which *champion* categories you'll be tested on (only Marksmen, only Fighters, etc.) * - System to keep track of high scores * * CODE ADJUSTMENTS * - Some commenting * * KNOWN BUGS * - Takes extremely long time to contact RiotAPI for full champion list (on slow connection) * */ import java.awt.Color; import java.awt.EventQueue; import java.awt.Font; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.KeyAdapter; import java.awt.event.KeyEvent; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.awt.image.BufferedImage; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import javax.imageio.ImageIO; import javax.sound.sampled.AudioInputStream; import javax.sound.sampled.AudioSystem; import javax.sound.sampled.Clip; import javax.swing.ImageIcon; import javax.swing.JButton; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JLayeredPane; import javax.swing.JPanel; import javax.swing.Timer; import javax.swing.border.BevelBorder; import javax.swing.border.SoftBevelBorder; import com.robrua.orianna.type.core.staticdata.Champion; public class guiGuess_v1_3 { // Instance variables // JFrame variables static JLayeredPane layeredPane; static JPanel mainPane; static JPanel timerBG; static JFrame frame; static GridBagLayout gridbag; static GridBagConstraints c; static JLabel lblBG; // Champion variables static ArrayList<Integer> used = new ArrayList<Integer>(); static ArrayList<Integer> usedFills = new ArrayList<Integer>(); static List<Champion> champions; static Champion champ; static Font text; static BufferedImage champAbi; static BufferedImage champPics[] = new BufferedImage[4]; static JButton champButts[] = new JButton[4]; // Hint variables static JLabel champAbility; static boolean passive; static boolean regular; static boolean ultimate; // Counters/temporary variables static String pass; static int answer; static int i; // Keep track of score static JLabel scoreLabel; static JLabel pointsLabel; static int score = 0; static int total = 0; static int points = 0; // Keep track of lives static JLabel lblLife1; static JLabel lblLife2; static JLabel lblLife3; static int lives = 3; // Keep track of time static StopWatch watch = new StopWatch(); static Timer timer; static JLabel timeLabel = new JLabel(); static long gameStart; static long roundStart; static long roundEnd; static long roundTime; static int cap; static long time = cap; /* Default: Passives [X] * Regular abilities [X] * Ultimate ability [X] */ public guiGuess_v1_3(List<Champion> champs, int limit) throws IOException{ passive = true; regular = true; ultimate = true; champions = champs; cap = limit; play(); } /* * Use parameters to select which types of icons to display */ public guiGuess_v1_3(List<Champion> champs, int limit, boolean doPassives, boolean doRegulars, boolean doUltimates) throws IOException{ passive = doPassives; regular = doRegulars; ultimate = doUltimates; champions = champs; cap = limit; play(); } /* * Display first set of icons */ protected static void play() throws IOException{ // Create JFrame frame = new JFrame("Guess That Champion!"); frame.setResizable(false); frame.setBounds(100, 100, 800, 600); frame.getContentPane().setLayout(null); frame.setLocationRelativeTo(null); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); // Initialize layered frame system; crucial to getting background image to work layeredPane = new JLayeredPane(); layeredPane.setBounds(0, 0, 800, 578); frame.getContentPane().add(layeredPane); // Background image setup/assignment BufferedImage BG = ImageIO.read(new File("lib//akaliBG.jpg")); lblBG = new JLabel(new ImageIcon(BG)); layeredPane.setLayer(lblBG, 0); lblBG.setBounds(0, 0, 800, 578); layeredPane.add(lblBG); // Add pane for buttons/content mainPane = new JPanel(); mainPane.setForeground(new Color(255, 255, 255)); layeredPane.setLayer(mainPane, 2); mainPane.setBounds(0, 0, 800, 578); layeredPane.add(mainPane); mainPane.setBackground(null); mainPane.setOpaque(false); mainPane.setLayout(null); // Choose title of application scoreLabel = new JLabel("Score: " + score + " / " + total); pointsLabel = new JLabel("Points: " + points); // Select champion, choose hint to be displayed champ = newChamp(); getAbilityType(); // Load and display image to be displayed as hint try{ champAbi = ImageIO.read(new File("lib/images/abilities/" + champ.getName() + "_" + pass + ".png")); }catch(IOException e){ System.out.println("lib/images/abilities/" + champ.getName() + "_" + pass + ".png"); } champAbility = new JLabel(new ImageIcon(champAbi)); // Load and display correct champion image, and 3 other champions answer = (int) (4 * Math.random()); for(int i = 0; i < champPics.length; i++){ if(i==answer) champPics[i] = ImageIO.read(new File("lib/images/champs/" + champ.getName() + ".png")); else champPics[i] = ImageIO.read(new File(newChampFill())); champButts[i] = new JButton(new ImageIcon(champPics[i])); } // Initialize buttons, score label and timers. champAbility.setBounds(368, 102, 64, 64); champButts[0].setBounds(270, 190, 120, 120); champButts[1].setBounds(408, 190, 120, 120); champButts[2].setBounds(270, 320, 120, 120); champButts[3].setBounds(408, 320, 120, 120); // Initialize score label pointsLabel.setFont(new Font("Bangla MN", Font.BOLD, 13)); pointsLabel.setForeground(new Color(255, 255, 255)); pointsLabel.setBounds(351, 470, 113, 25); // Initialize timers timeLabel.setText(Long.toString(time)); timeLabel.setFont(new Font("Lucida Grande", Font.PLAIN, 21)); timeLabel.setForeground(new Color(255, 255, 255)); timeLabel.setBounds(384, 512, 50, 37); // Initialize life bars BufferedImage life = ImageIO.read(new File("lib//images//lives.png")); lblLife1 = new JLabel(new ImageIcon(life)); lblLife1.setBounds(650, 200, 64, 64); lblLife2 = new JLabel(new ImageIcon(life)); lblLife2.setBounds(650, 274, 64, 64); lblLife3 = new JLabel(new ImageIcon(life)); lblLife3.setBounds(650, 348, 64, 64); // Add elements to pane mainPane.add(champAbility); mainPane.add(champButts[0]); mainPane.add(champButts[1]); mainPane.add(champButts[2]); mainPane.add(champButts[3]); mainPane.add(pointsLabel); mainPane.add(timeLabel); mainPane.add(lblLife1); mainPane.add(lblLife2); mainPane.add(lblLife3); // Background for timer/score tracker timerBG = new JPanel(); timerBG.setBorder(new SoftBevelBorder(BevelBorder.LOWERED, null, null, null, null)); timerBG.setBackground(new Color(0, 0, 0)); layeredPane.setLayer(timerBG, 1); timerBG.setBounds(300, 460, 200, 100); layeredPane.add(timerBG); // Add listeners to buttons champButts[0].addMouseListener(new MouseAdapter() { public void mousePressed(MouseEvent me) { try { handleScore(0); // Scoring // End round timer roundEnd = watch.getElapsedTimeSecs(); roundTime = roundEnd-roundStart; // Start new round roundStart = roundEnd; if((total < champions.size() - 3) && (gameStart < cap)) nextRound(); else{ frame.dispose(); new mainFrame_v4(0); } } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }); champButts[1].addMouseListener(new MouseAdapter() { public void mousePressed(MouseEvent me) { try { handleScore(1); // Scoring // End round timer roundEnd = watch.getElapsedTimeSecs(); roundTime = roundEnd-roundStart; // Start new round roundStart = roundEnd; if((total < champions.size() - 3) && (gameStart < cap)) nextRound(); else{ frame.dispose(); new mainFrame_v4(0); } } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }); champButts[2].addMouseListener(new MouseAdapter() { public void mousePressed(MouseEvent me) { try { handleScore(2); // Scoring // End round timer roundEnd = watch.getElapsedTimeSecs(); roundTime = roundEnd-roundStart; // Start new round roundStart = roundEnd; if((total < champions.size() - 3) && (gameStart < cap)) nextRound(); else{ frame.dispose(); new mainFrame_v4(0); } } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }); champButts[3].addMouseListener(new MouseAdapter() { public void mousePressed(MouseEvent me) { try { handleScore(3); // Scoring // End round timer roundEnd = watch.getElapsedTimeSecs(); roundTime = roundEnd-roundStart; // Start new round roundStart = roundEnd; if((total < champions.size() - 3) && (gameStart < cap)) nextRound(); else{ frame.dispose(); new mainFrame_v4(0); } } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }); // Create game timer int delay = 1000; //milliseconds ActionListener taskPerformer = new ActionListener() { public void actionPerformed(ActionEvent evt) { gameStart = watch.getElapsedTimeSecs(); if(gameStart < cap) timeLabel.setText(Long.toString(cap-gameStart)); else timeLabel.setText("0"); timeLabel.setVisible(true); } }; // Start timers new Timer(delay, taskPerformer).start(); watch.start(); gameStart = watch.getElapsedTimeSecs(); roundStart = watch.getElapsedTimeSecs(); // Refresh frame with new elements frame.setVisible(true); } /* * Display next set of icons */ public static void nextRound() throws IOException{ // Remove elements from pane mainPane.remove(champAbility); mainPane.remove(champButts[0]); mainPane.remove(champButts[1]); mainPane.remove(champButts[2]); mainPane.remove(champButts[3]); mainPane.remove(pointsLabel); mainPane.remove(timeLabel); mainPane.remove(lblLife1); mainPane.remove(lblLife2); mainPane.remove(lblLife3); layeredPane.remove(timerBG); // Select new champion, choose hint to be displayed champ = newChamp(); answer = (int) (4 * Math.random()); getAbilityType(); // Load and display hint image try{ champAbi = ImageIO.read(new File("lib/images/abilities/" + champ.getName() + "_" + pass + ".png")); }catch (IOException e){System.out.println("Can't read: lib/images/abilities/" + champ.getName() + "_" + pass + ".png");} frame.getContentPane().remove(champAbility); champAbility = new JLabel(new ImageIcon(champAbi)); // Load and display correct champion image, and 3 other champion images for(int i = 0; i < champPics.length; i++){ if(i==answer){ try{ champPics[i] = ImageIO.read(new File("lib/images/champs/" + champ.getName() + ".png")); }catch(IOException e) {System.out.println("lib/images/champs/" + champ.getName() + ".png");} }else{ try{ champPics[i] = ImageIO.read(new File(newChampFill())); }catch(IOException e){ System.out.println();} } } for(int i = 0; i < champButts.length; i++){ frame.getContentPane().remove(champButts[i]); champButts[i] = new JButton(new ImageIcon(champPics[i])); champButts[i].setVisible(true); } // Display score and time scoreLabel = new JLabel("Score: " + score + " / " + total); pointsLabel = new JLabel("Points: " + points); scoreLabel.setVisible(true); timeLabel.setVisible(true); pointsLabel.setVisible(true); // Initialize buttons, score label and timers. champAbility.setBounds(368, 102, 64, 64); champButts[0].setBounds(270, 190, 120, 120); champButts[1].setBounds(408, 190, 120, 120); champButts[2].setBounds(270, 320, 120, 120); champButts[3].setBounds(408, 320, 120, 120); // Initialize score label pointsLabel.setFont(new Font("Bangla MN", Font.BOLD, 13)); pointsLabel.setForeground(new Color(255, 255, 255)); pointsLabel.setBounds(351, 470, 113, 25); // Initialize timers timeLabel.setFont(new Font("Lucida Grande", Font.PLAIN, 21)); timeLabel.setForeground(new Color(255, 255, 255)); timeLabel.setBounds(384, 512, 50, 37); // Add elements to pane mainPane.add(champAbility); mainPane.add(champButts[0]); mainPane.add(champButts[1]); mainPane.add(champButts[2]); mainPane.add(champButts[3]); mainPane.add(pointsLabel); mainPane.add(timeLabel); layeredPane.add(timerBG); // Manage lives count if(lives>2) mainPane.add(lblLife1); if(lives>1) mainPane.add(lblLife2); if(lives>0) mainPane.add(lblLife3); // Add listeners to buttons champButts[0].addMouseListener(new MouseAdapter() { public void mousePressed(MouseEvent me) { try { if((total < champions.size() - 3) && (gameStart < cap) && (lives > 0)){ handleScore(0); // Scoring // End round timer roundEnd = watch.getElapsedTimeSecs(); roundTime = roundEnd-roundStart; // Start new round roundStart = roundEnd; if((total < champions.size() - 3) && (gameStart < cap)) nextRound(); else{ frame.dispose(); new mainFrame_v4(0); } }else{ frame.setVisible(false); newGame(); } } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }); champButts[1].addMouseListener(new MouseAdapter() { public void mousePressed(MouseEvent me) { try { if((total < champions.size() - 3) && (gameStart < cap) && (lives > 0)){ handleScore(0); // Scoring // End round timer roundEnd = watch.getElapsedTimeSecs(); roundTime = roundEnd-roundStart; // Start new round roundStart = roundEnd; nextRound(); }else{ frame.setVisible(false); newGame(); } } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }); champButts[2].addMouseListener(new MouseAdapter() { public void mousePressed(MouseEvent me) { try { if((total < champions.size() - 3) && (gameStart < cap) && (lives > 0)){ handleScore(2); // Scoring // End round timer roundEnd = watch.getElapsedTimeSecs(); roundTime = roundEnd-roundStart; // Start new round roundStart = roundEnd; nextRound(); }else{ frame.setVisible(false); newGame(); } } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }); champButts[3].addMouseListener(new MouseAdapter() { public void mousePressed(MouseEvent me) { try { if((total < champions.size() - 3) && (gameStart < cap) && (lives > 0)){ handleScore(3); // Scoring // End round timer roundEnd = watch.getElapsedTimeSecs(); roundTime = roundEnd-roundStart; // Start new round roundStart = roundEnd; nextRound(); }else{ frame.setVisible(false); newGame(); } } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }); // Refresh frame mainPane.setVisible(false); layeredPane.setVisible(false); mainPane.setVisible(true); layeredPane.setVisible(true); } /* * Starts a fresh new game */ public static void newGame(){ frame.setVisible(false); EventQueue.invokeLater(new Runnable() { public void run() { try { // Reset stats lives = 3; mainFrame_v4 window = new mainFrame_v4(points); points = 0; window.frame.setVisible(true); } catch (Exception e) { e.printStackTrace(); } } }); } /* * Find new champ, mark as used */ public static Champion newChamp(){ // Pull random champion from list int index = (int)(champions.size() * Math.random()); // Make sure champion hasn't already been used as an answer while(used.contains(index)) index = (int)(champions.size() * Math.random()); // Save that champion Champion c = champions.get(index); // Add champion to used array used.add(index); return c; } /* * Find new champ to fill in empty slot, don't mark as used yet */ public static String newChampFill(){ // Pull random champion from list int index = (int)(champions.size() * Math.random()); // Make sure champion hasn't already been used as an answer while(used.contains(index)||usedFills.contains(index)) index = (int)(champions.size() * Math.random()); // Save that champion Champion c = champions.get(index); usedFills.add(index); //Return the appropriate file name return "lib/images/champs/" + c.getName() + ".png"; } /* * Generate string for an ability type to display */ public static void getAbilityType(){ String returnThis = ""; if(passive){ if(regular){ if(ultimate){ // All enabled int rn = (int) (5 * Math.random()); if(rn==0) returnThis = "Q"; else if(rn==1) returnThis = "W"; else if(rn==2) returnThis = "E"; else if(rn==3) returnThis = "R"; else returnThis = "Passive"; }else{ // No ultimates int rn = (int) (4 * Math.random()); if(rn==0) returnThis = "Q"; else if(rn==1) returnThis = "W"; else if(rn==2) returnThis = "E"; else returnThis = "Passive"; } }else{ if(ultimate){ // No regular abilities int rn = (int) (3 * Math.random()); if(rn==0) returnThis = "Q"; else if(rn==1) returnThis = "R"; else returnThis = "Passive"; }else{ // Only passives returnThis = "Passive"; } } }else{ if(regular){ if(ultimate){ // No passive int rn = (int) (4 * Math.random()); if(rn==0) returnThis = "Q"; else if(rn==1) returnThis = "W"; else if(rn==2) returnThis = "E"; else returnThis = "R"; } else{ // Only regular abilities int rn = (int) (3 * Math.random()); if(rn==0) returnThis = "Q"; else if(rn==1) returnThis = "W"; else returnThis = "E"; } }else{ if(ultimate){ // Only ultimates returnThis = "R"; } } } pass = returnThis; } /* * Play a sound */ public static void playSound(String soundFile){ try { // Create AudioStream from sound file AudioInputStream audioInputStream = AudioSystem.getAudioInputStream(new File(soundFile).getAbsoluteFile()); // Create clip from AudioStream and play clip Clip clip = AudioSystem.getClip(); clip.open(audioInputStream); clip.start(); } catch(Exception ex) { System.out.println("Sound error on playing file: " + soundFile); ex.printStackTrace(); } } /* * Increment score when needed, and total guesses always */ public static void handleScore(int spot) throws IOException{ if((total < champions.size() - 3) && (gameStart < cap)){ int inc = (int) (400 / Math.pow(2, roundTime)); int dec = (int) (300 / Math.pow(1.5, roundTime)); // Play appropriate sound, change score if(answer==spot){ playSound("lib/sounds/correct.wav"); score++; points += inc; }else{ playSound("lib/sounds/incorrect.wav"); lives--; points -= dec; } total++; // System.out.println("P: " + points); } // Refresh score / points scoreLabel.setVisible(true); pointsLabel.setVisible(true); } }
src/guiGuess_v1_3.java
/* GUESS THAT CHAMPION * * ---------------------------------------------------------------------------- * "Guess That Champion!" isn't endorsed by Riot Games and doesn't reflect * the views or opinions of Riot Games or anyone * officially involved in producing or managing League of Legends. * League of Legends and Riot Games are trademarks or * registered trademarks of Riot Games, Inc. League of Legends © Riot Games, Inc. * * ---------------------------------------------------------------------------- * * FEATURES * - Fully functional gameplay w/images of champion ability/passive as hint * - Select which categories to be tested on in menu screen * - Displays score in GUI * - Plays sound to let user know if their guess was correct * - Scoring becoomes disabled when timer reaches zero * - Player has a limited number of incorrect answers (lives) before their scoring doesn't work * - After playing on round, game loops to the category select screen * - Pressing the exit button closes the program (everywhere except during disclaimer) * * NEW FEATURES * - Time limit can now be changed to 30, 60, 90, 120, or 150 seconds * * PLANNED FEATURES * - Select which *champion* categories you'll be tested on (only Marksmen, only Fighters, etc.) * - System to keep track of high scores * * CODE ADJUSTMENTS * - Some commenting * * KNOWN BUGS * - Takes extremely long time to contact RiotAPI for full champion list (on slow connection) * */ import java.awt.Color; import java.awt.EventQueue; import java.awt.Font; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.KeyAdapter; import java.awt.event.KeyEvent; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.awt.image.BufferedImage; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import javax.imageio.ImageIO; import javax.sound.sampled.AudioInputStream; import javax.sound.sampled.AudioSystem; import javax.sound.sampled.Clip; import javax.swing.ImageIcon; import javax.swing.JButton; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JLayeredPane; import javax.swing.JPanel; import javax.swing.Timer; import javax.swing.border.BevelBorder; import javax.swing.border.SoftBevelBorder; import com.robrua.orianna.type.core.staticdata.Champion; public class guiGuess_v1_3 { // Instance variables // JFrame variables static JLayeredPane layeredPane; static JPanel mainPane; static JPanel timerBG; static JFrame frame; static GridBagLayout gridbag; static GridBagConstraints c; static JLabel lblBG; // Champion variables static ArrayList<Integer> used = new ArrayList<Integer>(); static ArrayList<Integer> usedFills = new ArrayList<Integer>(); static List<Champion> champions; static Champion champ; static Font text; static BufferedImage champAbi; static BufferedImage champPics[] = new BufferedImage[4]; static JButton champButts[] = new JButton[4]; // Hint variables static JLabel champAbility; static boolean passive; static boolean regular; static boolean ultimate; // Counters/temporary variables static String pass; static int answer; static int i; // Keep track of score static JLabel scoreLabel; static JLabel pointsLabel; static int score = 0; static int total = 0; static int points = 0; // Keep track of lives static JLabel lblLife1; static JLabel lblLife2; static JLabel lblLife3; static int lives = 3; // Keep track of time static StopWatch watch = new StopWatch(); static Timer timer; static JLabel timeLabel = new JLabel(); static long gameStart; static long roundStart; static long roundEnd; static long roundTime; static int cap; static long time = cap; /* Default: Passives [X] * Regular abilities [X] * Ultimate ability [X] */ public guiGuess_v1_3(List<Champion> champs, int limit) throws IOException{ passive = true; regular = true; ultimate = true; champions = champs; cap = limit; play(); } /* * Use parameters to select which types of icons to display */ public guiGuess_v1_3(List<Champion> champs, int limit, boolean doPassives, boolean doRegulars, boolean doUltimates) throws IOException{ passive = doPassives; regular = doRegulars; ultimate = doUltimates; champions = champs; cap = limit; play(); } /* * Display first set of icons */ protected static void play() throws IOException{ // Create JFrame frame = new JFrame("Guess That Champion!"); frame.setResizable(false); frame.setBounds(100, 100, 800, 600); frame.getContentPane().setLayout(null); frame.setLocationRelativeTo(null); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); // Initialize layered frame system; crucial to getting background image to work layeredPane = new JLayeredPane(); layeredPane.setBounds(0, 0, 800, 578); frame.getContentPane().add(layeredPane); // Background image setup/assignment BufferedImage BG = ImageIO.read(new File("lib//akaliBG.jpg")); lblBG = new JLabel(new ImageIcon(BG)); layeredPane.setLayer(lblBG, 0); lblBG.setBounds(0, 0, 800, 578); layeredPane.add(lblBG); // Add pane for buttons/content mainPane = new JPanel(); mainPane.setForeground(new Color(255, 255, 255)); layeredPane.setLayer(mainPane, 2); mainPane.setBounds(0, 0, 800, 578); layeredPane.add(mainPane); mainPane.setBackground(null); mainPane.setOpaque(false); mainPane.setLayout(null); // Choose title of application scoreLabel = new JLabel("Score: " + score + " / " + total); pointsLabel = new JLabel("Points: " + points); // Select champion, choose hint to be displayed champ = newChamp(); getAbilityType(); // Load and display image to be displayed as hint try{ champAbi = ImageIO.read(new File("lib/images/abilities/" + champ.getName() + "_" + pass + ".png")); }catch(IOException e){ System.out.println("lib/images/abilities/" + champ.getName() + "_" + pass + ".png"); } champAbility = new JLabel(new ImageIcon(champAbi)); // Load and display correct champion image, and 3 other champions answer = (int) (4 * Math.random()); for(int i = 0; i < champPics.length; i++){ if(i==answer) champPics[i] = ImageIO.read(new File("lib/images/champs/" + champ.getName() + ".png")); else champPics[i] = ImageIO.read(new File(newChampFill())); champButts[i] = new JButton(new ImageIcon(champPics[i])); } // Initialize buttons, score label and timers. champAbility.setBounds(368, 102, 64, 64); champButts[0].setBounds(270, 190, 120, 120); champButts[1].setBounds(408, 190, 120, 120); champButts[2].setBounds(270, 320, 120, 120); champButts[3].setBounds(408, 320, 120, 120); // Initialize score label pointsLabel.setFont(new Font("Bangla MN", Font.BOLD, 13)); pointsLabel.setForeground(new Color(255, 255, 255)); pointsLabel.setBounds(351, 470, 113, 25); // Initialize timers timeLabel.setText(Long.toString(time)); timeLabel.setFont(new Font("Lucida Grande", Font.PLAIN, 21)); timeLabel.setForeground(new Color(255, 255, 255)); timeLabel.setBounds(384, 512, 50, 37); // Initialize life bars BufferedImage life = ImageIO.read(new File("lib//images//lives.png")); lblLife1 = new JLabel(new ImageIcon(life)); lblLife1.setBounds(650, 200, 64, 64); lblLife2 = new JLabel(new ImageIcon(life)); lblLife2.setBounds(650, 274, 64, 64); lblLife3 = new JLabel(new ImageIcon(life)); lblLife3.setBounds(650, 348, 64, 64); // Add elements to pane mainPane.add(champAbility); mainPane.add(champButts[0]); mainPane.add(champButts[1]); mainPane.add(champButts[2]); mainPane.add(champButts[3]); mainPane.add(pointsLabel); mainPane.add(timeLabel); mainPane.add(lblLife1); mainPane.add(lblLife2); mainPane.add(lblLife3); // Background for timer/score tracker timerBG = new JPanel(); timerBG.setBorder(new SoftBevelBorder(BevelBorder.LOWERED, null, null, null, null)); timerBG.setBackground(new Color(0, 0, 0)); layeredPane.setLayer(timerBG, 1); timerBG.setBounds(300, 460, 200, 100); layeredPane.add(timerBG); // Add listeners to buttons champButts[0].addMouseListener(new MouseAdapter() { public void mousePressed(MouseEvent me) { try { handleScore(0); // Scoring // End round timer roundEnd = watch.getElapsedTimeSecs(); roundTime = roundEnd-roundStart; // Start new round roundStart = roundEnd; if((total < champions.size() - 3) && (gameStart < cap) && (lives > 0)) nextRound(); else{ frame.dispose(); new mainFrame_v4(0); } } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }); champButts[1].addMouseListener(new MouseAdapter() { public void mousePressed(MouseEvent me) { try { handleScore(1); // Scoring // End round timer roundEnd = watch.getElapsedTimeSecs(); roundTime = roundEnd-roundStart; // Start new round roundStart = roundEnd; if((total < champions.size() - 3) && (gameStart < cap) && (lives > 0)) nextRound(); else{ frame.dispose(); new mainFrame_v4(0); } } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }); champButts[2].addMouseListener(new MouseAdapter() { public void mousePressed(MouseEvent me) { try { handleScore(2); // Scoring // End round timer roundEnd = watch.getElapsedTimeSecs(); roundTime = roundEnd-roundStart; // Start new round roundStart = roundEnd; if((total < champions.size() - 3) && (gameStart < cap) && (lives > 0)) nextRound(); else{ frame.dispose(); new mainFrame_v4(0); } } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }); champButts[3].addMouseListener(new MouseAdapter() { public void mousePressed(MouseEvent me) { try { handleScore(3); // Scoring // End round timer roundEnd = watch.getElapsedTimeSecs(); roundTime = roundEnd-roundStart; // Start new round roundStart = roundEnd; if((total < champions.size() - 3) && (gameStart < cap) && (lives > 0)) nextRound(); else{ frame.dispose(); new mainFrame_v4(0); } } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }); // Create game timer int delay = 1000; //milliseconds ActionListener taskPerformer = new ActionListener() { public void actionPerformed(ActionEvent evt) { gameStart = watch.getElapsedTimeSecs(); if(gameStart < cap) timeLabel.setText(Long.toString(cap-gameStart)); else timeLabel.setText("0"); timeLabel.setVisible(true); } }; // Start timers new Timer(delay, taskPerformer).start(); watch.start(); gameStart = watch.getElapsedTimeSecs(); roundStart = watch.getElapsedTimeSecs(); // Refresh frame with new elements frame.setVisible(true); } /* * Display next set of icons */ public static void nextRound() throws IOException{ // Remove elements from pane mainPane.remove(champAbility); mainPane.remove(champButts[0]); mainPane.remove(champButts[1]); mainPane.remove(champButts[2]); mainPane.remove(champButts[3]); mainPane.remove(pointsLabel); mainPane.remove(timeLabel); mainPane.remove(lblLife1); mainPane.remove(lblLife2); mainPane.remove(lblLife3); layeredPane.remove(timerBG); // Select new champion, choose hint to be displayed champ = newChamp(); answer = (int) (4 * Math.random()); getAbilityType(); // Load and display hint image try{ champAbi = ImageIO.read(new File("lib/images/abilities/" + champ.getName() + "_" + pass + ".png")); }catch (IOException e){System.out.println("Can't read: lib/images/abilities/" + champ.getName() + "_" + pass + ".png");} frame.getContentPane().remove(champAbility); champAbility = new JLabel(new ImageIcon(champAbi)); // Load and display correct champion image, and 3 other champion images for(int i = 0; i < champPics.length; i++){ if(i==answer){ try{ champPics[i] = ImageIO.read(new File("lib/images/champs/" + champ.getName() + ".png")); }catch(IOException e) {System.out.println("lib/images/champs/" + champ.getName() + ".png");} }else{ try{ champPics[i] = ImageIO.read(new File(newChampFill())); }catch(IOException e){ System.out.println();} } } for(int i = 0; i < champButts.length; i++){ frame.getContentPane().remove(champButts[i]); champButts[i] = new JButton(new ImageIcon(champPics[i])); champButts[i].setVisible(true); } // Display score and time scoreLabel = new JLabel("Score: " + score + " / " + total); pointsLabel = new JLabel("Points: " + points); scoreLabel.setVisible(true); timeLabel.setVisible(true); pointsLabel.setVisible(true); // Initialize buttons, score label and timers. champAbility.setBounds(368, 102, 64, 64); champButts[0].setBounds(270, 190, 120, 120); champButts[1].setBounds(408, 190, 120, 120); champButts[2].setBounds(270, 320, 120, 120); champButts[3].setBounds(408, 320, 120, 120); // Initialize score label pointsLabel.setFont(new Font("Bangla MN", Font.BOLD, 13)); pointsLabel.setForeground(new Color(255, 255, 255)); pointsLabel.setBounds(351, 470, 113, 25); // Initialize timers timeLabel.setFont(new Font("Lucida Grande", Font.PLAIN, 21)); timeLabel.setForeground(new Color(255, 255, 255)); timeLabel.setBounds(384, 512, 50, 37); // Add elements to pane mainPane.add(champAbility); mainPane.add(champButts[0]); mainPane.add(champButts[1]); mainPane.add(champButts[2]); mainPane.add(champButts[3]); mainPane.add(pointsLabel); mainPane.add(timeLabel); layeredPane.add(timerBG); // Manage lives count if(lives>2) mainPane.add(lblLife1); if(lives>1) mainPane.add(lblLife2); if(lives>0) mainPane.add(lblLife3); // Add listeners to buttons champButts[0].addMouseListener(new MouseAdapter() { public void mousePressed(MouseEvent me) { try { if((total < champions.size() - 3) && (gameStart < cap) && (lives > 0)){ handleScore(0); // Scoring // End round timer roundEnd = watch.getElapsedTimeSecs(); roundTime = roundEnd-roundStart; // Start new round roundStart = roundEnd; if((total < champions.size() - 3) && (gameStart < cap) && (lives > 0)) nextRound(); else{ frame.dispose(); new mainFrame_v4(0); } }else{ frame.setVisible(false); newGame(); } } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }); champButts[1].addMouseListener(new MouseAdapter() { public void mousePressed(MouseEvent me) { try { if((total < champions.size() - 3) && (gameStart < cap) && (lives > 0)){ handleScore(0); // Scoring // End round timer roundEnd = watch.getElapsedTimeSecs(); roundTime = roundEnd-roundStart; // Start new round roundStart = roundEnd; if((total < champions.size() - 3) && (gameStart < cap) && (lives > 0)) nextRound(); else{ frame.dispose(); new mainFrame_v4(0); } }else{ frame.setVisible(false); newGame(); } } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }); champButts[2].addMouseListener(new MouseAdapter() { public void mousePressed(MouseEvent me) { try { if((total < champions.size() - 3) && (gameStart < cap) && (lives > 0)){ handleScore(2); // Scoring // End round timer roundEnd = watch.getElapsedTimeSecs(); roundTime = roundEnd-roundStart; // Start new round roundStart = roundEnd; if((total < champions.size() - 3) && (gameStart < cap) && (lives > 0)) nextRound(); else{ frame.dispose(); new mainFrame_v4(0); } }else{ frame.setVisible(false); newGame(); } } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }); champButts[3].addMouseListener(new MouseAdapter() { public void mousePressed(MouseEvent me) { try { if((total < champions.size() - 3) && (gameStart < cap) && (lives > 0)){ handleScore(3); // Scoring // End round timer roundEnd = watch.getElapsedTimeSecs(); roundTime = roundEnd-roundStart; // Start new round roundStart = roundEnd; if((total < champions.size() - 3) && (gameStart < cap) && (lives > 0)) nextRound(); else{ frame.dispose(); new mainFrame_v4(0); } }else{ frame.setVisible(false); newGame(); } } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }); // Refresh frame mainPane.setVisible(false); layeredPane.setVisible(false); mainPane.setVisible(true); layeredPane.setVisible(true); } /* * Starts a fresh new game */ public static void newGame(){ frame.setVisible(false); EventQueue.invokeLater(new Runnable() { public void run() { try { // Reset stats lives = 3; mainFrame_v4 window = new mainFrame_v4(points); points = 0; window.frame.setVisible(true); } catch (Exception e) { e.printStackTrace(); } } }); } /* * Find new champ, mark as used */ public static Champion newChamp(){ // Pull random champion from list int index = (int)(champions.size() * Math.random()); // Make sure champion hasn't already been used as an answer while(used.contains(index)) index = (int)(champions.size() * Math.random()); // Save that champion Champion c = champions.get(index); // Add champion to used array used.add(index); return c; } /* * Find new champ to fill in empty slot, don't mark as used yet */ public static String newChampFill(){ // Pull random champion from list int index = (int)(champions.size() * Math.random()); // Make sure champion hasn't already been used as an answer while(used.contains(index)||usedFills.contains(index)) index = (int)(champions.size() * Math.random()); // Save that champion Champion c = champions.get(index); usedFills.add(index); //Return the appropriate file name return "lib/images/champs/" + c.getName() + ".png"; } /* * Generate string for an ability type to display */ public static void getAbilityType(){ String returnThis = ""; if(passive){ if(regular){ if(ultimate){ // All enabled int rn = (int) (5 * Math.random()); if(rn==0) returnThis = "Q"; else if(rn==1) returnThis = "W"; else if(rn==2) returnThis = "E"; else if(rn==3) returnThis = "R"; else returnThis = "Passive"; }else{ // No ultimates int rn = (int) (4 * Math.random()); if(rn==0) returnThis = "Q"; else if(rn==1) returnThis = "W"; else if(rn==2) returnThis = "E"; else returnThis = "Passive"; } }else{ if(ultimate){ // No regular abilities int rn = (int) (3 * Math.random()); if(rn==0) returnThis = "Q"; else if(rn==1) returnThis = "R"; else returnThis = "Passive"; }else{ // Only passives returnThis = "Passive"; } } }else{ if(regular){ if(ultimate){ // No passive int rn = (int) (4 * Math.random()); if(rn==0) returnThis = "Q"; else if(rn==1) returnThis = "W"; else if(rn==2) returnThis = "E"; else returnThis = "R"; } else{ // Only regular abilities int rn = (int) (3 * Math.random()); if(rn==0) returnThis = "Q"; else if(rn==1) returnThis = "W"; else returnThis = "E"; } }else{ if(ultimate){ // Only ultimates returnThis = "R"; } } } pass = returnThis; } /* * Play a sound */ public static void playSound(String soundFile){ try { // Create AudioStream from sound file AudioInputStream audioInputStream = AudioSystem.getAudioInputStream(new File(soundFile).getAbsoluteFile()); // Create clip from AudioStream and play clip Clip clip = AudioSystem.getClip(); clip.open(audioInputStream); clip.start(); } catch(Exception ex) { System.out.println("Sound error on playing file: " + soundFile); ex.printStackTrace(); } } /* * Increment score when needed, and total guesses always */ public static void handleScore(int spot) throws IOException{ if((total < champions.size() - 3) && (gameStart < cap)){ int inc = (int) (400 / Math.pow(2, roundTime)); int dec = (int) (300 / Math.pow(1.5, roundTime)); // Play appropriate sound, change score if(answer==spot){ playSound("lib/sounds/correct.wav"); score++; points += inc; }else{ playSound("lib/sounds/incorrect.wav"); lives--; points -= dec; } total++; // System.out.println("Lives: " + lives); } // Refresh score / points scoreLabel.setVisible(true); pointsLabel.setVisible(true); } }
Revert "Round ends immediately after running out of lives" This reverts commit c3f8bfa9a102899804a48e8d59047210928384db.
src/guiGuess_v1_3.java
Revert "Round ends immediately after running out of lives"
Java
mit
447a7cdfc5c68bade57dff630e13b9e20f99a69b
0
livoras/mirot
package server; import java.io.IOException; import java.net.ServerSocket; import java.net.Socket; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.json.*; import Common.src.Configuration; import Common.src.Logger; import Common.src.User; public class Server { public static ServerSocket server = null; public static Map<String, User> users = new HashMap<String, User>(); public static Map<String, ArrayList<User>> rooms = new HashMap<String, ArrayList<User>>(); public static void main(String[] args) throws JSONException, IOException { initServerSocekt(); waitForConnection(); } public static void initServerSocekt() throws IOException { server = new ServerSocket(Configuration.PORT); } public static void waitForConnection() throws IOException { Logger.log("Waiting for connection..."); while(true) { Socket client = server.accept(); new Thread(new SocketThread(client)).start(); } } public static void removeUser(String name) throws JSONException { users.remove(name); Logger.log(name + " logout, current total online users count is " + users.size()); Sender.sendUsersList(); } public static void leaveAllRooms(String name) { User toRemoveUser = users.get(name); Set<Entry<String, ArrayList<User>>> allRooms = rooms.entrySet(); for(Entry<String, ArrayList<User>> entry: allRooms) { ArrayList<User> users = entry.getValue(); String roomName = entry.getKey(); if (users.remove(toRemoveUser)) { Logger.log("Remove user " + toRemoveUser.name + " from a room: " + roomName); // Here should delete the room while there is nobody in the room. // if (users.size() == 0) { // rooms.remove(roomName); // } } } } }
src/server/Server.java
package server; import java.io.IOException; import java.net.ServerSocket; import java.net.Socket; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.json.*; import Common.src.Configuration; import Common.src.Logger; import Common.src.User; public class Server { public static ServerSocket server = null; public static Map<String, User> users = new HashMap<String, User>(); public static Map<String, ArrayList<User>> rooms = new HashMap<String, ArrayList<User>>(); public static void main(String[] args) throws JSONException, IOException { initServerSocekt(); waitForConnection(); } public static void initServerSocekt() throws IOException { server = new ServerSocket(Configuration.PORT); } public static void waitForConnection() throws IOException { Logger.log("Waiting for connection..."); while(true) { Socket client = server.accept(); new Thread(new SocketThread(client)).start(); } } public static void removeUser(String name) throws JSONException { users.remove(name); Logger.log(name + " logout, current total online users count is " + users.size()); Sender.sendUsersList(); } public static void leaveAllRooms(String name) { User toRemoveUser = users.get(name); Set<Entry<String, ArrayList<User>>> allRooms = rooms.entrySet(); for(Entry<String, ArrayList<User>> entry: allRooms) { ArrayList<User> users = entry.getValue(); String roomName = entry.getKey(); if (users.remove(toRemoveUser)) { Logger.log("Remove user " + toRemoveUser.name + " from a room: " + roomName); } } } }
add ui
src/server/Server.java
add ui
Java
mit
110aaf540364c7ee5330eee59432f6d933524bb4
0
AlexxEG/G2_Skin_Tweaks
package com.gmail.alexellingsen.g2skintweaks; import it.gmariotti.android.colorpicker.calendarstock.ColorPickerDialog; import it.gmariotti.android.colorpicker.calendarstock.ColorPickerSwatch.OnColorSelectedListener; import android.app.Activity; import android.app.AlertDialog; import android.app.Fragment; import android.content.Context; import android.content.DialogInterface; import android.graphics.Color; import android.os.Bundle; import android.text.Html; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.widget.Button; import android.widget.CheckBox; import android.widget.CompoundButton; import android.widget.CompoundButton.OnCheckedChangeListener; import android.widget.SeekBar; import android.widget.SeekBar.OnSeekBarChangeListener; import android.widget.TextView; import android.widget.Toast; public class MainActivity extends Activity { private PlaceholderFragment fragment = null; private static SettingsHelper settings = null; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); settings = new SettingsHelper(this); if (savedInstanceState == null) { fragment = new PlaceholderFragment(); getFragmentManager().beginTransaction().add(R.id.container, fragment).commit(); } } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.main, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { int id = item.getItemId(); if (id == R.id.action_reset_default) { fragment.askResetToDefault(); return true; } else if (id == R.id.action_enable_debugging) { item.setChecked(!item.isChecked()); settings.putBoolean(Prefs.ENABLE_DEBUGGING, item.isChecked()); return true; } return super.onOptionsItemSelected(item); } public static class PlaceholderFragment extends Fragment { private View rootView = null; public PlaceholderFragment() { } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { rootView = inflater.inflate(R.layout.fragment_main, container, false); setupReplaceSwitch(); setupMessengerCustomization(); setupMessengerFontSize(); setupTurnOnScreenNewSMS(); return rootView; } private int[] getColorChoice(Context context) { int[] mColorChoices = null; String[] color_array = context.getResources().getStringArray(R.array.default_color_choice_values); if (color_array != null && color_array.length > 0) { mColorChoices = new int[color_array.length]; for (int i = 0; i < color_array.length; i++) { mColorChoices[i] = Color.parseColor(color_array[i]); } } return mColorChoices; } public void askResetToDefault() { AlertDialog.Builder builder = new AlertDialog.Builder(getActivity()); builder.setTitle(getString(R.string.are_you_sure)); builder.setMessage(getString(R.string.confirm_reset_message)); builder.setPositiveButton(getString(R.string.yes), new AlertDialog.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { resetToDefault(); } }); builder.setNegativeButton(getString(R.string.no), new AlertDialog.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { // Do nothing } }); builder.show(); } public void resetToDefault() { ((CheckBox) rootView.findViewById(R.id.chb_replace_switch)).setChecked(false); ((CheckBox) rootView.findViewById(R.id.chb_square_bubble)).setChecked(false); ((Button) rootView.findViewById(R.id.btn_square_left_color)).setBackgroundColor(Color.WHITE); ((Button) rootView.findViewById(R.id.btn_square_right_color)).setBackgroundColor(Color.WHITE); ((CheckBox) rootView.findViewById(R.id.chb_sms_text_color)).setChecked(false); ((Button) rootView.findViewById(R.id.btn_sms_text_color_left)).setBackgroundColor(Color.BLACK); ((Button) rootView.findViewById(R.id.btn_sms_text_color_right)).setBackgroundColor(Color.BLACK); ((CheckBox) rootView.findViewById(R.id.chb_messenger_font_size)).setChecked(false); ((CheckBox) rootView.findViewById(R.id.chb_turn_on_screen)).setChecked(true); // Listeners will update most preferences settings.putInt(Prefs.SQUARE_COLOR_LEFT, Color.WHITE); settings.putInt(Prefs.SQUARE_COLOR_RIGHT, Color.WHITE); settings.putInt(Prefs.SMS_TEXT_COLOR_LEFT, Color.BLACK); settings.putInt(Prefs.SMS_TEXT_COLOR_RIGHT, Color.BLACK); settings.putInt(Prefs.SMS_BODY_SIZE, 18); settings.putInt(Prefs.SMS_DATE_SIZE, 18); updateFontSizeButton(18, 18); String text = getString(R.string.reboot_notice); Toast.makeText(getActivity(), text, Toast.LENGTH_LONG).show(); } private void setupMessengerFontSize() { boolean ENABLE_MESSENGER_FONT_SIZE = settings.getBoolean(Prefs.ENABLE_SMS_FONT_SIZE, false); final Button btnMessengerSetFontSize = (Button) rootView.findViewById(R.id.btn_messenger_set_font_size); int body_size = settings.getInt(Prefs.SMS_BODY_SIZE, 18); int date_size = settings.getInt(Prefs.SMS_DATE_SIZE, 18); updateFontSizeButton(body_size, date_size); btnMessengerSetFontSize.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { showMessengerFontSizePicker(); } }); CheckBox chbMessengerFontSize = (CheckBox) rootView.findViewById(R.id.chb_messenger_font_size); chbMessengerFontSize.setOnCheckedChangeListener(new OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { btnMessengerSetFontSize.setEnabled(isChecked); settings.putBoolean(Prefs.ENABLE_SMS_FONT_SIZE, isChecked); } }); chbMessengerFontSize.setChecked(ENABLE_MESSENGER_FONT_SIZE); } private void setupReplaceSwitch() { boolean ENABLE_REPLACE_SWITCH = settings.getBoolean(Prefs.ENABLE_REPLACE_SWICTH, false); CheckBox chbReplaceSwitch = (CheckBox) rootView.findViewById(R.id.chb_replace_switch); chbReplaceSwitch.setOnCheckedChangeListener(new OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { settings.putBoolean(Prefs.ENABLE_REPLACE_SWICTH, isChecked); } }); chbReplaceSwitch.setChecked(ENABLE_REPLACE_SWITCH); } private void setupMessengerCustomization() { boolean ENABLE_SQUARE_BUBBLE = settings.getBoolean(Prefs.ENABLE_SQUARE_BUBBLE, false); boolean ENABLE_SMS_TEXT_COLOR = settings.getBoolean(Prefs.ENABLE_SMS_TEXT_COLOR, false); final Button btnSquareLeftColor = (Button) rootView.findViewById(R.id.btn_square_left_color); final Button btnSquareRightColor = (Button) rootView.findViewById(R.id.btn_square_right_color); final Button btnSmsTextColorLeft = (Button) rootView.findViewById(R.id.btn_sms_text_color_left); final Button btnSmsTextColorRight = (Button) rootView.findViewById(R.id.btn_sms_text_color_right); btnSquareLeftColor.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { showSquareColorPicker(v, true); } }); btnSquareRightColor.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { showSquareColorPicker(v, false); } }); btnSquareLeftColor.setEnabled(ENABLE_SQUARE_BUBBLE); btnSquareRightColor.setEnabled(ENABLE_SQUARE_BUBBLE); btnSquareLeftColor.setBackgroundColor(settings.getInt(Prefs.SQUARE_COLOR_LEFT, Color.WHITE)); btnSquareRightColor.setBackgroundColor(settings.getInt(Prefs.SQUARE_COLOR_RIGHT, Color.WHITE)); btnSmsTextColorLeft.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { showSmsTextColorPicker(v, true); } }); btnSmsTextColorLeft.setBackgroundColor(settings.getInt(Prefs.SMS_TEXT_COLOR_LEFT, Color.BLACK)); btnSmsTextColorRight.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { showSmsTextColorPicker(v, false); } }); btnSmsTextColorRight.setBackgroundColor(settings.getInt(Prefs.SMS_TEXT_COLOR_RIGHT, Color.BLACK)); CheckBox chbSquareBubble = (CheckBox) rootView.findViewById(R.id.chb_square_bubble); chbSquareBubble.setOnCheckedChangeListener(new OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { settings.putBoolean(Prefs.ENABLE_SQUARE_BUBBLE, isChecked); btnSquareLeftColor.setEnabled(isChecked); btnSquareRightColor.setEnabled(isChecked); } }); chbSquareBubble.setChecked(ENABLE_SQUARE_BUBBLE); CheckBox chbSmsTextColor = (CheckBox) rootView.findViewById(R.id.chb_sms_text_color); chbSmsTextColor.setOnCheckedChangeListener(new OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { settings.putBoolean(Prefs.ENABLE_SMS_TEXT_COLOR, isChecked); btnSmsTextColorLeft.setEnabled(isChecked); btnSmsTextColorRight.setEnabled(isChecked); } }); chbSmsTextColor.setChecked(ENABLE_SMS_TEXT_COLOR); } private void setupTurnOnScreenNewSMS() { boolean enableTurnOnScreenNewSms = settings.getBoolean(Prefs.TURN_ON_SCREEN_NEW_SMS, true); boolean enablePowerLed = settings.getBoolean(Prefs.ENABLE_POWER_LED, true); final CheckBox chbTurnOnScreenNewSMS = (CheckBox) rootView.findViewById(R.id.chb_turn_on_screen); final CheckBox chbEnablePowerLed = (CheckBox) rootView.findViewById(R.id.chb_enable_power_led); final Button btnRequestRoot = (Button) rootView.findViewById(R.id.btn_request_root); chbTurnOnScreenNewSMS.setChecked(enableTurnOnScreenNewSms); chbTurnOnScreenNewSMS.setOnCheckedChangeListener(new OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { settings.putBoolean(Prefs.TURN_ON_SCREEN_NEW_SMS, isChecked); chbEnablePowerLed.setEnabled(!isChecked); btnRequestRoot.setEnabled(!isChecked); } }); chbEnablePowerLed.setChecked(enablePowerLed); chbEnablePowerLed.setEnabled(!enableTurnOnScreenNewSms); chbEnablePowerLed.setText(Html.fromHtml(getString(R.string.chb_flash_power_led))); chbEnablePowerLed.setOnCheckedChangeListener(new OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { settings.putBoolean(Prefs.ENABLE_POWER_LED, isChecked); } }); btnRequestRoot.setEnabled(!enableTurnOnScreenNewSms); btnRequestRoot.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { RootFunctions.requestRoot(); } }); } private void showSmsTextColorPicker(final View v, boolean left) { int[] mColor = getColorChoice(getActivity()); final String key = left ? Prefs.SMS_TEXT_COLOR_LEFT : Prefs.SMS_TEXT_COLOR_RIGHT; int mSelectedColor = settings.getInt(key, Color.BLACK); ColorPickerDialog colorCalendar = ColorPickerDialog.newInstance( R.string.color_picker_default_title, mColor, mSelectedColor, 4, ColorPickerDialog.SIZE_SMALL); colorCalendar.setOnColorSelectedListener(new OnColorSelectedListener() { @Override public void onColorSelected(int color) { settings.putInt(key, color); v.setBackgroundColor(color); } }); colorCalendar.show(getFragmentManager(), "cal"); } private void showSquareColorPicker(final View v, boolean left) { int[] mColor = getColorChoice(getActivity()); final String key = left ? Prefs.SQUARE_COLOR_LEFT : Prefs.SQUARE_COLOR_RIGHT; int mSelectedColor = settings.getInt(key, Color.WHITE); ColorPickerDialog colorCalendar = ColorPickerDialog.newInstance( R.string.color_picker_default_title, mColor, mSelectedColor, 4, ColorPickerDialog.SIZE_SMALL); colorCalendar.setOnColorSelectedListener(new OnColorSelectedListener() { @Override public void onColorSelected(int color) { settings.putInt(key, color); v.setBackgroundColor(color); } }); colorCalendar.show(getFragmentManager(), "cal"); } private void showMessengerFontSizePicker() { AlertDialog.Builder builder = new AlertDialog.Builder(getActivity()); LayoutInflater inflater = LayoutInflater.from(getActivity()); final View inflator = inflater.inflate(R.layout.text_size_picker, null); final TextView txt_sms_body_size = (TextView) inflator.findViewById(R.id.txt_sms_body_size); final TextView txt_sms_date_size = (TextView) inflator.findViewById(R.id.txt_sms_date_size); final SeekBar sms_body_size = (SeekBar) inflator.findViewById(R.id.sms_body_size); final SeekBar sms_date_size = (SeekBar) inflator.findViewById(R.id.sms_date_size); txt_sms_body_size.setText(settings.getInt(Prefs.SMS_BODY_SIZE, 18) + ""); txt_sms_date_size.setText(settings.getInt(Prefs.SMS_DATE_SIZE, 18) + ""); sms_body_size.setProgress(settings.getInt(Prefs.SMS_BODY_SIZE, 18) - 12); sms_date_size.setProgress(settings.getInt(Prefs.SMS_DATE_SIZE, 18) - 12); sms_body_size.setOnSeekBarChangeListener(new OnSeekBarChangeListener() { @Override public void onStopTrackingTouch(SeekBar seekBar) { } @Override public void onStartTrackingTouch(SeekBar seekBar) { } @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { int sp = progress + 12; txt_sms_body_size.setText(sp + ""); } }); sms_date_size.setOnSeekBarChangeListener(new OnSeekBarChangeListener() { @Override public void onStopTrackingTouch(SeekBar seekBar) { } @Override public void onStartTrackingTouch(SeekBar seekBar) { } @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { int sp = progress + 12; txt_sms_date_size.setText(sp + ""); } }); builder.setView(inflator) .setPositiveButton(getString(R.string.save), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { int spBody = sms_body_size.getProgress() + 12; int spDate = sms_date_size.getProgress() + 12; settings.putInt(Prefs.SMS_BODY_SIZE, spBody); settings.putInt(Prefs.SMS_DATE_SIZE, spDate); updateFontSizeButton(spBody, spDate); } }) .setNegativeButton(getString(R.string.cancel), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { } }); AlertDialog alertDialog = builder.create(); alertDialog.show(); } private void updateFontSizeButton(int body, int date) { String text = getString(R.string.button_set_font_size, body, date); Button button = (Button) rootView.findViewById(R.id.btn_messenger_set_font_size); button.setText(Html.fromHtml(text)); } } }
src/com/gmail/alexellingsen/g2skintweaks/MainActivity.java
package com.gmail.alexellingsen.g2skintweaks; import it.gmariotti.android.colorpicker.calendarstock.ColorPickerDialog; import it.gmariotti.android.colorpicker.calendarstock.ColorPickerSwatch.OnColorSelectedListener; import android.app.Activity; import android.app.AlertDialog; import android.app.Fragment; import android.content.Context; import android.content.DialogInterface; import android.graphics.Color; import android.os.Bundle; import android.text.Html; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.widget.Button; import android.widget.CheckBox; import android.widget.CompoundButton; import android.widget.CompoundButton.OnCheckedChangeListener; import android.widget.SeekBar; import android.widget.SeekBar.OnSeekBarChangeListener; import android.widget.TextView; import android.widget.Toast; public class MainActivity extends Activity { private PlaceholderFragment fragment = null; private static SettingsHelper settings = null; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); settings = new SettingsHelper(this); if (savedInstanceState == null) { fragment = new PlaceholderFragment(); getFragmentManager().beginTransaction().add(R.id.container, fragment).commit(); } } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.main, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { int id = item.getItemId(); if (id == R.id.action_reset_default) { fragment.askResetToDefault(); return true; } else if (id == R.id.action_enable_debugging) { item.setChecked(!item.isChecked()); settings.putBoolean(Prefs.ENABLE_DEBUGGING, item.isChecked()); return true; } return super.onOptionsItemSelected(item); } public static class PlaceholderFragment extends Fragment { private View rootView = null; public PlaceholderFragment() { } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { rootView = inflater.inflate(R.layout.fragment_main, container, false); setupReplaceSwitch(); setupMessengerCustomization(); setupMessengerFontSize(); setupTurnOnScreenNewSMS(); return rootView; } private int[] getColorChoice(Context context) { int[] mColorChoices = null; String[] color_array = context.getResources().getStringArray(R.array.default_color_choice_values); if (color_array != null && color_array.length > 0) { mColorChoices = new int[color_array.length]; for (int i = 0; i < color_array.length; i++) { mColorChoices[i] = Color.parseColor(color_array[i]); } } return mColorChoices; } public void askResetToDefault() { AlertDialog.Builder builder = new AlertDialog.Builder(getActivity()); builder.setTitle(getString(R.string.are_you_sure)); builder.setMessage(getString(R.string.confirm_reset_message)); builder.setPositiveButton(getString(R.string.yes), new AlertDialog.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { resetToDefault(); } }); builder.setNegativeButton(getString(R.string.no), new AlertDialog.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { // Do nothing } }); builder.show(); } public void resetToDefault() { ((CheckBox) rootView.findViewById(R.id.chb_replace_switch)).setChecked(false); ((CheckBox) rootView.findViewById(R.id.chb_square_bubble)).setChecked(false); ((Button) rootView.findViewById(R.id.btn_square_left_color)).setBackgroundColor(Color.WHITE); ((Button) rootView.findViewById(R.id.btn_square_right_color)).setBackgroundColor(Color.WHITE); ((CheckBox) rootView.findViewById(R.id.chb_sms_text_color)).setChecked(false); ((Button) rootView.findViewById(R.id.btn_sms_text_color_left)).setBackgroundColor(Color.BLACK); ((Button) rootView.findViewById(R.id.btn_sms_text_color_right)).setBackgroundColor(Color.BLACK); ((CheckBox) rootView.findViewById(R.id.chb_messenger_font_size)).setChecked(false); ((CheckBox) rootView.findViewById(R.id.chb_turn_on_screen)).setChecked(true); // Listeners will update most preferences settings.putInt(Prefs.SQUARE_COLOR_LEFT, Color.WHITE); settings.putInt(Prefs.SQUARE_COLOR_RIGHT, Color.WHITE); settings.putInt(Prefs.SMS_TEXT_COLOR_LEFT, Color.BLACK); settings.putInt(Prefs.SMS_TEXT_COLOR_RIGHT, Color.BLACK); settings.putInt(Prefs.SMS_BODY_SIZE, 18); settings.putInt(Prefs.SMS_DATE_SIZE, 18); updateFontSizeButton(18, 18); String text = getString(R.string.reboot_notice); Toast.makeText(getActivity(), text, Toast.LENGTH_LONG).show(); } private void setupMessengerFontSize() { boolean ENABLE_MESSENGER_FONT_SIZE = settings.getBoolean(Prefs.ENABLE_SMS_FONT_SIZE, false); final Button btnMessengerSetFontSize = (Button) rootView.findViewById(R.id.btn_messenger_set_font_size); int body_size = settings.getInt(Prefs.SMS_BODY_SIZE, 18); int date_size = settings.getInt(Prefs.SMS_DATE_SIZE, 18); updateFontSizeButton(body_size, date_size); btnMessengerSetFontSize.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { showMessengerFontSizePicker(); } }); CheckBox chbMessengerFontSize = (CheckBox) rootView.findViewById(R.id.chb_messenger_font_size); chbMessengerFontSize.setOnCheckedChangeListener(new OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { btnMessengerSetFontSize.setEnabled(isChecked); settings.putBoolean(Prefs.ENABLE_SMS_FONT_SIZE, isChecked); } }); chbMessengerFontSize.setChecked(ENABLE_MESSENGER_FONT_SIZE); } private void setupReplaceSwitch() { boolean ENABLE_REPLACE_SWITCH = settings.getBoolean(Prefs.ENABLE_REPLACE_SWICTH, false); CheckBox chbReplaceSwitch = (CheckBox) rootView.findViewById(R.id.chb_replace_switch); chbReplaceSwitch.setOnCheckedChangeListener(new OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { settings.putBoolean(Prefs.ENABLE_REPLACE_SWICTH, isChecked); } }); chbReplaceSwitch.setChecked(ENABLE_REPLACE_SWITCH); } private void setupMessengerCustomization() { boolean ENABLE_SQUARE_BUBBLE = settings.getBoolean(Prefs.ENABLE_SQUARE_BUBBLE, false); boolean ENABLE_SMS_TEXT_COLOR = settings.getBoolean(Prefs.ENABLE_SMS_TEXT_COLOR, false); final Button btnSquareLeftColor = (Button) rootView.findViewById(R.id.btn_square_left_color); final Button btnSquareRightColor = (Button) rootView.findViewById(R.id.btn_square_right_color); final Button btnSmsTextColorLeft = (Button) rootView.findViewById(R.id.btn_sms_text_color_left); final Button btnSmsTextColorRight = (Button) rootView.findViewById(R.id.btn_sms_text_color_right); btnSquareLeftColor.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { showSquareColorPicker(v, true); } }); btnSquareRightColor.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { showSquareColorPicker(v, false); } }); btnSquareLeftColor.setEnabled(ENABLE_SQUARE_BUBBLE); btnSquareRightColor.setEnabled(ENABLE_SQUARE_BUBBLE); btnSquareLeftColor.setBackgroundColor(settings.getInt(Prefs.SQUARE_COLOR_LEFT, Color.WHITE)); btnSquareRightColor.setBackgroundColor(settings.getInt(Prefs.SQUARE_COLOR_RIGHT, Color.WHITE)); btnSmsTextColorLeft.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { showSmsTextColorPicker(v, true); } }); btnSmsTextColorLeft.setBackgroundColor(settings.getInt(Prefs.SMS_TEXT_COLOR_LEFT, Color.BLACK)); btnSmsTextColorRight.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { showSmsTextColorPicker(v, false); } }); btnSmsTextColorRight.setBackgroundColor(settings.getInt(Prefs.SMS_TEXT_COLOR_RIGHT, Color.BLACK)); CheckBox chbSquareBubble = (CheckBox) rootView.findViewById(R.id.chb_square_bubble); chbSquareBubble.setOnCheckedChangeListener(new OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { settings.putBoolean(Prefs.ENABLE_SQUARE_BUBBLE, isChecked); btnSquareLeftColor.setEnabled(isChecked); btnSquareRightColor.setEnabled(isChecked); } }); chbSquareBubble.setChecked(ENABLE_SQUARE_BUBBLE); CheckBox chbSmsTextColor = (CheckBox) rootView.findViewById(R.id.chb_sms_text_color); chbSmsTextColor.setOnCheckedChangeListener(new OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { settings.putBoolean(Prefs.ENABLE_SMS_TEXT_COLOR, isChecked); btnSmsTextColorLeft.setEnabled(isChecked); btnSmsTextColorRight.setEnabled(isChecked); } }); chbSmsTextColor.setChecked(ENABLE_SMS_TEXT_COLOR); } private void setupTurnOnScreenNewSMS() { boolean enableTurnOnScreenNewSms = settings.getBoolean(Prefs.TURN_ON_SCREEN_NEW_SMS, true); boolean enablePowerLed = settings.getBoolean(Prefs.ENABLE_POWER_LED, true); final CheckBox chbTurnOnScreenNewSMS = (CheckBox) rootView.findViewById(R.id.chb_turn_on_screen); final CheckBox chbEnablePowerLed = (CheckBox) rootView.findViewById(R.id.chb_enable_power_led); final Button btnRequestRoot = (Button) rootView.findViewById(R.id.btn_request_root); chbTurnOnScreenNewSMS.setChecked(enableTurnOnScreenNewSms); chbTurnOnScreenNewSMS.setOnCheckedChangeListener(new OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { settings.putBoolean(Prefs.TURN_ON_SCREEN_NEW_SMS, isChecked); chbEnablePowerLed.setEnabled(!isChecked); btnRequestRoot.setEnabled(!isChecked); } }); chbEnablePowerLed.setChecked(enablePowerLed); chbEnablePowerLed.setEnabled(!enableTurnOnScreenNewSms); chbEnablePowerLed.setText(Html.fromHtml(getString(R.string.chb_flash_power_led))); chbEnablePowerLed.setOnCheckedChangeListener(new OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { settings.putBoolean(Prefs.ENABLE_POWER_LED, isChecked); } }); btnRequestRoot.setEnabled(!enableTurnOnScreenNewSms); btnRequestRoot.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { RootFunctions.requestRoot(); } }); } private void showSmsTextColorPicker(final View v, final boolean left) { int[] mColor = getColorChoice(getActivity()); int mSelectedColor = settings.getInt(left ? Prefs.SMS_TEXT_COLOR_LEFT : Prefs.SMS_TEXT_COLOR_RIGHT, Color.BLACK); ColorPickerDialog colorCalendar = ColorPickerDialog.newInstance( R.string.color_picker_default_title, mColor, mSelectedColor, 4, ColorPickerDialog.SIZE_SMALL); colorCalendar.setOnColorSelectedListener(new OnColorSelectedListener() { @Override public void onColorSelected(int color) { settings.putInt(left ? Prefs.SMS_TEXT_COLOR_LEFT : Prefs.SMS_TEXT_COLOR_RIGHT, color); v.setBackgroundColor(color); } }); colorCalendar.show(getFragmentManager(), "cal"); } private void showSquareColorPicker(final View v, boolean left) { int[] mColor = getColorChoice(getActivity()); final String key = left ? Prefs.SQUARE_COLOR_LEFT : Prefs.SQUARE_COLOR_RIGHT; int mSelectedColor = settings.getInt(key, Color.WHITE); ColorPickerDialog colorCalendar = ColorPickerDialog.newInstance( R.string.color_picker_default_title, mColor, mSelectedColor, 4, ColorPickerDialog.SIZE_SMALL); colorCalendar.setOnColorSelectedListener(new OnColorSelectedListener() { @Override public void onColorSelected(int color) { settings.putInt(key, color); v.setBackgroundColor(color); } }); colorCalendar.show(getFragmentManager(), "cal"); } private void showMessengerFontSizePicker() { AlertDialog.Builder builder = new AlertDialog.Builder(getActivity()); LayoutInflater inflater = LayoutInflater.from(getActivity()); final View inflator = inflater.inflate(R.layout.text_size_picker, null); final TextView txt_sms_body_size = (TextView) inflator.findViewById(R.id.txt_sms_body_size); final TextView txt_sms_date_size = (TextView) inflator.findViewById(R.id.txt_sms_date_size); final SeekBar sms_body_size = (SeekBar) inflator.findViewById(R.id.sms_body_size); final SeekBar sms_date_size = (SeekBar) inflator.findViewById(R.id.sms_date_size); txt_sms_body_size.setText(settings.getInt(Prefs.SMS_BODY_SIZE, 18) + ""); txt_sms_date_size.setText(settings.getInt(Prefs.SMS_DATE_SIZE, 18) + ""); sms_body_size.setProgress(settings.getInt(Prefs.SMS_BODY_SIZE, 18) - 12); sms_date_size.setProgress(settings.getInt(Prefs.SMS_DATE_SIZE, 18) - 12); sms_body_size.setOnSeekBarChangeListener(new OnSeekBarChangeListener() { @Override public void onStopTrackingTouch(SeekBar seekBar) { } @Override public void onStartTrackingTouch(SeekBar seekBar) { } @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { int sp = progress + 12; txt_sms_body_size.setText(sp + ""); } }); sms_date_size.setOnSeekBarChangeListener(new OnSeekBarChangeListener() { @Override public void onStopTrackingTouch(SeekBar seekBar) { } @Override public void onStartTrackingTouch(SeekBar seekBar) { } @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { int sp = progress + 12; txt_sms_date_size.setText(sp + ""); } }); builder.setView(inflator) .setPositiveButton(getString(R.string.save), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { int spBody = sms_body_size.getProgress() + 12; int spDate = sms_date_size.getProgress() + 12; settings.putInt(Prefs.SMS_BODY_SIZE, spBody); settings.putInt(Prefs.SMS_DATE_SIZE, spDate); updateFontSizeButton(spBody, spDate); } }) .setNegativeButton(getString(R.string.cancel), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { } }); AlertDialog alertDialog = builder.create(); alertDialog.show(); } private void updateFontSizeButton(int body, int date) { String text = getString(R.string.button_set_font_size, body, date); Button button = (Button) rootView.findViewById(R.id.btn_messenger_set_font_size); button.setText(Html.fromHtml(text)); } } }
Code style
src/com/gmail/alexellingsen/g2skintweaks/MainActivity.java
Code style
Java
mit
5735d9fc7051f14c739fe9f1df0323f171970556
0
frib-high-level-controls/dukescript-plotlyjs,daykin/dukescript-plotlyjs,frib-high-level-controls/dukescript-plotlyjs,daykin/dukescript-plotlyjs,daykin/dukescript-plotlyjs,frib-high-level-controls/dukescript-plotlyjs
package net.java.html.plotlyjs; /* * #%L * This software is Copyright by the Board of Trustees of Michigan State University. * Contact Information: * Facility for Rare Isotope Beams * Michigan State University * East Lansing, MI 48824-1321 * http://frib.msu.edu * %% * Copyright (C) 2016 Board of Trustees of Michigan State University * %% * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. * #L% */ import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.PropertyAccessor; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JavaType; import com.fasterxml.jackson.databind.ObjectMapper; import net.java.html.js.JavaScriptBody; import net.java.html.js.JavaScriptResource; @JsonInclude(JsonInclude.Include.NON_NULL) @JavaScriptResource("plotly.min.js") @SuppressWarnings("unused") public final class Plotly <T extends Trace>{ static private ObjectMapper mapper = new ObjectMapper(); static private JavaType type; static private String id; private Data<T> data; private Layout layout; private Plotly(String id, Data<T> data, Layout layout){ Plotly.id = id; this.data = data; this.layout = layout; } public static Plotly<?> newPlot(String id, Data<?> data, Layout layout) throws PlotlyException { try { Plotly.mapper.setVisibility(PropertyAccessor.FIELD, Visibility.ANY); String strdata = Plotly.mapper.writeValueAsString(data.getTraces()); String strlayout = Plotly.mapper.writeValueAsString(layout); System.out.println(strlayout); jsNewPlot(id,strdata,strlayout); return new Plotly<>(id, data, layout); } catch (JsonProcessingException e) { throw new PlotlyException(e); } } /**Restyle the trace array * @param update a <code>Data</code> object containing the restyle parameters * @param indices the indices in the trace array to apply the new style * @throws PlotlyException */ public void restyle(Data<?> update, int... indices)throws PlotlyException{ try{ jsRestyle(id,Plotly.mapper.writeValueAsString(update),indices); } catch(JsonProcessingException e){ throw new PlotlyException(e); } } /**Update just the chart layout more nicely than redraw. @param layout a <code>Layout</code> object containing the layout parameters */ public void relayout(Layout layout) throws JsonProcessingException{ jsRelayout(id,Plotly.mapper.writeValueAsString(layout)); } /**Add trace(s) to the chart. @param traces an Array of <code>Trace</code>s containing the trace parameters *@throws PlotlyException */ public void addTraces(Trace... traces) throws PlotlyException{ try{ jsAddTraces(id,Plotly.mapper.writeValueAsString(traces)); } catch(JsonProcessingException e){ throw new PlotlyException(e); } } /**Delete n traces. @param traces integer indices traces to delete. */ public void deleteTraces(int... traces){ jsDeleteTraces(id, traces); } /**Move indices to the end of the trace array. Affects the layering and legend of the plot. @param traces the indices to bump to the end. */ public void moveTraces(int... traces){ jsMoveTraces(id,traces); } /**Move traces in an array to different specified indices, respectively. @param from Array to pull from @param to Array of the respective positions */ public void moveTraces(int[] from, int[] to){ jsMoveTraces(id, from, to); } public void redraw(){ jsRedraw(id); } @JavaScriptBody(args={"elementId","update","indices"}, body = "" + "if(indices){" + "Plotly.restyle(document.getElementById(elementId), JSON.parse(update), indices);" + "}" + "else{" + "(Plotly.restyle(document.getElementById(elementId), JSON.parse(update)));" + "}") private static native void jsRestyle(String elementId, String update, int... indices); @JavaScriptBody(args={"elementId","update"}, body = "" + "Plotly.relayout(document.getElementById(elementId), JSON.parse(update));") private static native void jsRelayout(String elementId, String update); @JavaScriptBody(args={"elementId","rawTracesJson"}, body = "" + "Plotly.addTraces(document.getElementById(elementId),JSON.parse(rawTracesJson));") private static native void jsAddTraces(String elementId, String rawTracesJson); @JavaScriptBody(args={"elementId","traces"}, body = "" + "Plotly.deleteTraces(document.getElementById(elementId), traces);") private static native void jsDeleteTraces(String elementId, int... traces); @JavaScriptBody(args = {"elementId", "indices"}, body = "" + "Plotly.moveTraces(document.getElementById(elementId), indices);") private static native void jsMoveTraces(String elementId, int... indices); @JavaScriptBody(args = {"elementId", "from", "to"}, body = "" + "Plotly.moveTraces(document.getElementById(elementId),from, to);") private static native void jsMoveTraces(String elementId, int[] from, int[] to); /**Redraw the chart element. @param elementId the associated DOM element */ @JavaScriptBody(args = {"elementId"}, body = "Plotly.redraw(document.getElementById(elementId));") private static native void jsRedraw(String elementId); /**Create a new plot. *@param strElementId the associated DOM element to contain the plot *@param strdata A JSON-formatted string containing the trace parameters. *@param strlayout A JSON-formatted string containing the layout parameters. *@return an Object representing the plot's DOM. */ @JavaScriptBody(args = { "strElementId", "strdata", "strlayout" }, body = "var data = JSON.parse(strdata);\n" + "var layout = JSON.parse(strlayout);\n" + "var elementId = document.getElementById(strElementId);\n" + "Plotly.newPlot(elementId, data, layout);\n" + "return document.getElementById(strElementId);" ) native static Object jsNewPlot(String strElementId, String strdata, String strlayout); }
plotlyjs-api/src/main/java/net/java/html/plotlyjs/Plotly.java
package net.java.html.plotlyjs; /* * #%L * This software is Copyright by the Board of Trustees of Michigan State University. * Contact Information: * Facility for Rare Isotope Beams * Michigan State University * East Lansing, MI 48824-1321 * http://frib.msu.edu * %% * Copyright (C) 2016 Board of Trustees of Michigan State University * %% * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. * #L% */ import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.PropertyAccessor; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JavaType; import com.fasterxml.jackson.databind.ObjectMapper; import net.java.html.js.JavaScriptBody; import net.java.html.js.JavaScriptResource; @JsonInclude(JsonInclude.Include.NON_NULL) @JavaScriptResource("plotly.min.js") @SuppressWarnings("unused") public final class Plotly <T extends Trace>{ static private ObjectMapper mapper = new ObjectMapper(); static private JavaType type; static private String id; private Data<T> data; private Layout layout; private Plotly(String id, Data<T> data, Layout layout){ Plotly.id = id; this.data = data; this.layout = layout; } public static Plotly<?> newPlot(String id, Data<?> data, Layout layout) throws PlotlyException { try { Plotly.mapper.setVisibility(PropertyAccessor.FIELD, Visibility.ANY); String strdata = Plotly.mapper.writeValueAsString(data.getTraces()); String strlayout = Plotly.mapper.writeValueAsString(layout); System.out.println(strlayout); jsNewPlot(id,strdata,strlayout); return new Plotly<>(id, data, layout); } catch (JsonProcessingException e) { throw new PlotlyException(e); } } /**Restyle the trace array @param updateObj a <code>Data</code> object containing the restyle parameters @param indices the indices in the trace array to apply the new style */ public void restyle(Data<?> updateObj, int... indices) throws JsonProcessingException{ jsRestyle(id,Plotly.mapper.writeValueAsString(updateObj),indices); } /**Update just the chart layout more nicely than redraw. @param layoutPojo a <code>Layout</code> object containing the layout parameters */ public void relayout(Layout layoutPojo) throws JsonProcessingException{ jsRelayout(id,Plotly.mapper.writeValueAsString(layoutPojo)); } /**Add trace(s) to the chart. @param traces an Array of <code>Trace</code>s containing the trace parameters */ public void addTraces(Trace[] traces) throws JsonProcessingException{ jsAddTraces(id,Plotly.mapper.writeValueAsString(traces)); } /**Delete n traces. @param traces integer indices traces to delete. */ public void deleteTraces(int... traces){ jsDeleteTraces(id, traces); } /**Move indices to the end of the trace array. Affects the layering and legend of the plot. @param traces the indices to bump to the end. */ public void moveTraces(int... traces){ jsMoveTraces(id,traces); } /**Move traces in an array to different specified indices, respectively. @param from Array to pull from @param to Array of the respective positions */ public void moveTraces(int[] from, int[] to){ jsMoveTraces(id, from, to); } public void redraw(){ jsRedraw(id); } @JavaScriptBody(args={"elementId","update","indices"}, body = "" + "if(indices){" + "Plotly.restyle(document.getElementById(elementId), JSON.parse(update), indices);" + "}" + "else{" + "(Plotly.restyle(document.getElementById(elementId), JSON.parse(update)));" + "}") private static native void jsRestyle(String elementId, String update, int... indices); @JavaScriptBody(args={"elementId","update"}, body = "" + "Plotly.relayout(document.getElementById(elementId), JSON.parse(update));") private static native void jsRelayout(String elementId, String update); @JavaScriptBody(args={"elementId","rawTracesJson"}, body = "" + "Plotly.addTraces(document.getElementById(elementId),JSON.parse(rawTracesJson));") private static native void jsAddTraces(String elementId, String rawTracesJson); @JavaScriptBody(args={"elementId","traces"}, body = "" + "Plotly.deleteTraces(document.getElementById(elementId), traces);") private static native void jsDeleteTraces(String elementId, int... traces); @JavaScriptBody(args = {"elementId", "indices"}, body = "" + "Plotly.moveTraces(document.getElementById(elementId), indices);") private static native void jsMoveTraces(String elementId, int... indices); @JavaScriptBody(args = {"elementId", "from", "to"}, body = "" + "Plotly.moveTraces(document.getElementById(elementId),from, to);") private static native void jsMoveTraces(String elementId, int[] from, int[] to); /**Redraw the chart element. @param elementId the associated DOM element */ @JavaScriptBody(args = {"elementId"}, body = "Plotly.redraw(document.getElementById(elementId));") private static native void jsRedraw(String elementId); /**Create a new plot. *@param strElementId the associated DOM element to contain the plot *@param strdata A JSON-formatted string containing the trace parameters. *@param strlayout A JSON-formatted string containing the layout parameters. *@return an Object representing the plot's DOM. */ @JavaScriptBody(args = { "strElementId", "strdata", "strlayout" }, body = "var data = JSON.parse(strdata);\n" + "var layout = JSON.parse(strlayout);\n" + "var elementId = document.getElementById(strElementId);\n" + "Plotly.newPlot(elementId, data, layout);\n" + "return document.getElementById(strElementId);" ) native static Object jsNewPlot(String strElementId, String strdata, String strlayout); }
modified Plotly.addTraces to use Varargs changed relayout argument name from layoutPojo to just layout changed restyle argument name from updateObj to data restyle(Data<?> updateObj, int... indices) throws PlotlyException instead
plotlyjs-api/src/main/java/net/java/html/plotlyjs/Plotly.java
modified Plotly.addTraces to use Varargs changed relayout argument name from layoutPojo to just layout changed restyle argument name from updateObj to data restyle(Data<?> updateObj, int... indices) throws PlotlyException instead
Java
epl-1.0
99af47da33d8661129c2069d5793f85e33aa8f17
0
theanuradha/debrief,debrief/debrief,debrief/debrief,theanuradha/debrief,theanuradha/debrief,debrief/debrief,theanuradha/debrief,theanuradha/debrief,debrief/debrief,theanuradha/debrief,theanuradha/debrief,debrief/debrief,debrief/debrief
/* * Debrief - the Open Source Maritime Analysis Application * http://debrief.info * * (C) 2000-2014, PlanetMayo Ltd * * This library is free software; you can redistribute it and/or * modify it under the terms of the Eclipse Public License v1.0 * (http://www.eclipse.org/legal/epl-v10.html) * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. */ package MWC.TacticalData; import java.awt.Color; import java.beans.IntrospectionException; import java.beans.PropertyDescriptor; import java.io.Serializable; import java.util.Date; import MWC.GUI.Editable; import MWC.GUI.ExcludeFromRightClickEdit; import MWC.GUI.FireExtended; import MWC.GUI.FireReformatted; import MWC.GUI.Plottable; import MWC.GenericData.HiResDate; import MWC.Utilities.TextFormatting.DebriefFormatDateTime; public final class NarrativeEntry implements MWC.GUI.Plottable, Serializable, ExcludeFromRightClickEdit { public static final String DTG = "DTG"; public static final Color DEFAULT_COLOR = new Color(178,0,0); // /////////////////////////////////////////// // member variables // /////////////////////////////////////////// private String _track; private HiResDate _DTG; private String _entry; private String _type; private boolean _visible = true; String _DTGString = null; /** cache the hashcode, it's an expensive operation * */ private Integer _hashCode = null; private transient NarrativeEntryInfo _myInfo; private Color _color = DEFAULT_COLOR; /** * */ private static final long serialVersionUID = 1L; // /////////////////////////////////////////// // constructor // /////////////////////////////////////////// /** * new constructor - for narrative entries which include the type of entry * (typically for SMNT narratives) * * @param track * name of the track this applies to * @param type * what sort of entry this is (or null) * @param DTG * when the entry was recorded * @param entry * the content of the entry */ public NarrativeEntry(final String track, final String type, final HiResDate DTG, final String entry) { _track = track; _DTG = DTG; _entry = entry; _type = type; } /** * old constructor - for when narratives didn't include the type attribute * * @param track * name of the track this applies to * @param DTG * when the entry was recorded * @param entry * the content of the entry */ public NarrativeEntry(final String track, final HiResDate DTG, final String entry) { this(track, null, DTG, entry); } // /////////////////////////////////////////// // accessor methods // /////////////////////////////////////////// public final String getTrackName() { return _track; } public final String getSource() { return _track; } @FireReformatted public final void setSource(final String track) { _track = track; // and clear the hash code clearHash(); } public final String getEntry() { return _entry; } @FireReformatted public void setEntry(final String val) { _entry = val; // and clear the hash code clearHash(); } public final HiResDate getDTG() { return _DTG; } @FireExtended public void setDTG(final HiResDate date) { _DTG = date; // and clear the hash code clearHash(); } public final String getType() { return _type; } @FireReformatted public void setType(final String type) { _type = type; // and clear the hash code clearHash(); } public final String getDTGString() { if (_DTGString == null) _DTGString = DebriefFormatDateTime.toStringHiRes(_DTG); return _DTGString; } public void setColor(Color color) { _color = color; } public Color getColor() { return _color; } // primarily by name, secondarily by value; null-safe; case-insensitive public int compareTo(final String myStr, final String other) { int result = nullSafeStringComparator(myStr, other); if (result != 0) { return result; } return nullSafeStringComparator(myStr, other); } public static int nullSafeStringComparator(final String one, final String two) { if (one == null ^ two == null) { return (one == null) ? -1 : 1; } if (one == null && two == null) { return 0; } return one.compareToIgnoreCase(two); } /** * member function to meet requirements of comparable interface * */ public final int compareTo(final Plottable o) { final NarrativeEntry other = (NarrativeEntry) o; int result = _DTG.compareTo(other._DTG); if (result == 0) { result = compareTo(getTrackName(), other.getTrackName()); } if (result == 0) { result = compareTo(getType(), other.getType()); } if (result == 0) { result = compareTo(getEntry(), other.getEntry()); } return result; } // /////////////////////////////////////////// // member methods to meet requirements of Plottable interface // /////////////////////////////////////////// /** * paint this object to the specified canvas */ public final void paint(final MWC.GUI.CanvasType dest) { } /** * find the data area occupied by this item */ public final MWC.GenericData.WorldArea getBounds() { return null; } /** * it this item currently visible? */ public final boolean getVisible() { return _visible; } /** * set the visibility (although we ignore this) */ @FireReformatted public final void setVisible(final boolean val) { _visible = val; } /** * how far away are we from this point? or return null if it can't be * calculated */ public final double rangeFrom(final MWC.GenericData.WorldLocation other) { return -1; } /** * get the editor for this item * * @return the BeanInfo data for this editable object */ public final MWC.GUI.Editable.EditorType getInfo() { if (_myInfo == null) _myInfo = new NarrativeEntryInfo(this, this.toString()); return _myInfo; } /** * whether there is any edit information for this item this is a convenience * function to save creating the EditorType data first * * @return yes/no */ public final boolean hasEditor() { return true; } /** * get the name of this entry, using the formatted DTG */ public final String getName() { return DebriefFormatDateTime.toStringHiRes(_DTG); } public final String toString() { return getName(); } // public boolean equals(Object obj) { // if (obj == null) // return false; // if (obj == this) // return true; // if (!(obj instanceof NarrativeEntry)) // return false; // // NarrativeEntry other = (NarrativeEntry) obj; // return (other.getSource().equals(getSource())) && // (other.getType().equals(getType())) && // (other.getDTG().equals(getDTG())) && // (other.getEntry().equals(getEntry())); // // } protected void clearHash() { _hashCode = null; } @Override public int hashCode() { if(_hashCode == null) { final int prime = 31; int result = 1; result = prime * result + ((_DTG == null) ? 0 : _DTG.hashCode()); result = prime * result + ((_entry == null) ? 0 : _entry.hashCode()); result = prime * result + ((_track == null) ? 0 : _track.hashCode()); result = prime * result + ((_type == null) ? 0 : _type.hashCode()); _hashCode = result; } return _hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; NarrativeEntry other = (NarrativeEntry) obj; if (_DTG == null) { if (other._DTG != null) return false; } else if (!_DTG.equals(other._DTG)) return false; if (_entry == null) { if (other._entry != null) return false; } else if (!_entry.equals(other._entry)) return false; if (_track == null) { if (other._track != null) return false; } else if (!_track.equals(other._track)) return false; if (_type == null) { if (other._type != null) return false; } else if (!_type.equals(other._type)) return false; return true; } // //////////////////////////////////////////////////// // bean info for this class // /////////////////////////////////////////////////// public final class NarrativeEntryInfo extends Editable.EditorType { public NarrativeEntryInfo(final NarrativeEntry data, final String theName) { super(data, theName, data.toString()); } public final PropertyDescriptor[] getPropertyDescriptors() { try { final PropertyDescriptor[] myRes = { prop("Type", "the type of entry", FORMAT), prop("Source", "the source for this entry", FORMAT), prop(DTG, "the time this entry was recorded", FORMAT), prop("Color", "the color for this narrative entry", FORMAT), prop("Entry", "the content of this entry", FORMAT), }; return myRes; } catch (final IntrospectionException e) { e.printStackTrace(); return super.getPropertyDescriptors(); } } } // //////////////////////////////////////////////////////////////////////////////////////////////// // testing for this class // //////////////////////////////////////////////////////////////////////////////////////////////// static public final class testMe extends junit.framework.TestCase { static public final String TEST_ALL_TEST_TYPE = "UNIT"; public testMe(final String val) { super(val); } public final void testMyParams() { final HiResDate hd = new HiResDate(new Date()); final NarrativeEntry ne = new NarrativeEntry("aaa", "bbb", hd, "vvvv"); editableTesterSupport.testParams(ne, this); } } }
org.mwc.cmap.legacy/src/MWC/TacticalData/NarrativeEntry.java
/* * Debrief - the Open Source Maritime Analysis Application * http://debrief.info * * (C) 2000-2014, PlanetMayo Ltd * * This library is free software; you can redistribute it and/or * modify it under the terms of the Eclipse Public License v1.0 * (http://www.eclipse.org/legal/epl-v10.html) * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. */ package MWC.TacticalData; import java.awt.Color; import java.beans.IntrospectionException; import java.beans.PropertyDescriptor; import java.io.Serializable; import java.util.Date; import MWC.GUI.Editable; import MWC.GUI.ExcludeFromRightClickEdit; import MWC.GUI.FireExtended; import MWC.GUI.FireReformatted; import MWC.GUI.Plottable; import MWC.GenericData.HiResDate; import MWC.Utilities.TextFormatting.DebriefFormatDateTime; public final class NarrativeEntry implements MWC.GUI.Plottable, Serializable, ExcludeFromRightClickEdit { public static final String DTG = "DTG"; public static final Color DEFAULT_COLOR = new Color(178,0,0); // /////////////////////////////////////////// // member variables // /////////////////////////////////////////// private String _track; private HiResDate _DTG; private String _entry; private String _type; private boolean _visible = true; String _DTGString = null; private transient NarrativeEntryInfo _myInfo; private Color _color = DEFAULT_COLOR; /** * */ private static final long serialVersionUID = 1L; // /////////////////////////////////////////// // constructor // /////////////////////////////////////////// /** * new constructor - for narrative entries which include the type of entry * (typically for SMNT narratives) * * @param track * name of the track this applies to * @param type * what sort of entry this is (or null) * @param DTG * when the entry was recorded * @param entry * the content of the entry */ public NarrativeEntry(final String track, final String type, final HiResDate DTG, final String entry) { _track = track; _DTG = DTG; _entry = entry; _type = type; } /** * old constructor - for when narratives didn't include the type attribute * * @param track * name of the track this applies to * @param DTG * when the entry was recorded * @param entry * the content of the entry */ public NarrativeEntry(final String track, final HiResDate DTG, final String entry) { this(track, null, DTG, entry); } // /////////////////////////////////////////// // accessor methods // /////////////////////////////////////////// public final String getTrackName() { return _track; } public final String getSource() { return _track; } @FireReformatted public final void setSource(final String track) { _track = track; } public final String getEntry() { return _entry; } @FireReformatted public void setEntry(final String val) { _entry = val; } public final HiResDate getDTG() { return _DTG; } @FireExtended public void setDTG(final HiResDate date) { _DTG = date; } public final String getType() { return _type; } @FireReformatted public void setType(final String type) { _type = type; } public final String getDTGString() { if (_DTGString == null) _DTGString = DebriefFormatDateTime.toStringHiRes(_DTG); return _DTGString; } public void setColor(Color color) { _color = color; } public Color getColor() { return _color; } // primarily by name, secondarily by value; null-safe; case-insensitive public int compareTo(final String myStr, final String other) { int result = nullSafeStringComparator(myStr, other); if (result != 0) { return result; } return nullSafeStringComparator(myStr, other); } public static int nullSafeStringComparator(final String one, final String two) { if (one == null ^ two == null) { return (one == null) ? -1 : 1; } if (one == null && two == null) { return 0; } return one.compareToIgnoreCase(two); } /** * member function to meet requirements of comparable interface * */ public final int compareTo(final Plottable o) { final NarrativeEntry other = (NarrativeEntry) o; int result = _DTG.compareTo(other._DTG); if (result == 0) { result = compareTo(getTrackName(), other.getTrackName()); } if (result == 0) { result = compareTo(getType(), other.getType()); } if (result == 0) { result = compareTo(getEntry(), other.getEntry()); } return result; } // /////////////////////////////////////////// // member methods to meet requirements of Plottable interface // /////////////////////////////////////////// /** * paint this object to the specified canvas */ public final void paint(final MWC.GUI.CanvasType dest) { } /** * find the data area occupied by this item */ public final MWC.GenericData.WorldArea getBounds() { return null; } /** * it this item currently visible? */ public final boolean getVisible() { return _visible; } /** * set the visibility (although we ignore this) */ @FireReformatted public final void setVisible(final boolean val) { _visible = val; } /** * how far away are we from this point? or return null if it can't be * calculated */ public final double rangeFrom(final MWC.GenericData.WorldLocation other) { return -1; } /** * get the editor for this item * * @return the BeanInfo data for this editable object */ public final MWC.GUI.Editable.EditorType getInfo() { if (_myInfo == null) _myInfo = new NarrativeEntryInfo(this, this.toString()); return _myInfo; } /** * whether there is any edit information for this item this is a convenience * function to save creating the EditorType data first * * @return yes/no */ public final boolean hasEditor() { return true; } /** * get the name of this entry, using the formatted DTG */ public final String getName() { return DebriefFormatDateTime.toStringHiRes(_DTG); } public final String toString() { return getName(); } // public boolean equals(Object obj) { // if (obj == null) // return false; // if (obj == this) // return true; // if (!(obj instanceof NarrativeEntry)) // return false; // // NarrativeEntry other = (NarrativeEntry) obj; // return (other.getSource().equals(getSource())) && // (other.getType().equals(getType())) && // (other.getDTG().equals(getDTG())) && // (other.getEntry().equals(getEntry())); // // } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((_DTG == null) ? 0 : _DTG.hashCode()); result = prime * result + ((_entry == null) ? 0 : _entry.hashCode()); result = prime * result + ((_track == null) ? 0 : _track.hashCode()); result = prime * result + ((_type == null) ? 0 : _type.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; NarrativeEntry other = (NarrativeEntry) obj; if (_DTG == null) { if (other._DTG != null) return false; } else if (!_DTG.equals(other._DTG)) return false; if (_entry == null) { if (other._entry != null) return false; } else if (!_entry.equals(other._entry)) return false; if (_track == null) { if (other._track != null) return false; } else if (!_track.equals(other._track)) return false; if (_type == null) { if (other._type != null) return false; } else if (!_type.equals(other._type)) return false; return true; } // //////////////////////////////////////////////////// // bean info for this class // /////////////////////////////////////////////////// public final class NarrativeEntryInfo extends Editable.EditorType { public NarrativeEntryInfo(final NarrativeEntry data, final String theName) { super(data, theName, data.toString()); } public final PropertyDescriptor[] getPropertyDescriptors() { try { final PropertyDescriptor[] myRes = { prop("Type", "the type of entry", FORMAT), prop("Source", "the source for this entry", FORMAT), prop(DTG, "the time this entry was recorded", FORMAT), prop("Color", "the color for this narrative entry", FORMAT), prop("Entry", "the content of this entry", FORMAT), }; return myRes; } catch (final IntrospectionException e) { e.printStackTrace(); return super.getPropertyDescriptors(); } } } // //////////////////////////////////////////////////////////////////////////////////////////////// // testing for this class // //////////////////////////////////////////////////////////////////////////////////////////////// static public final class testMe extends junit.framework.TestCase { static public final String TEST_ALL_TEST_TYPE = "UNIT"; public testMe(final String val) { super(val); } public final void testMyParams() { final HiResDate hd = new HiResDate(new Date()); final NarrativeEntry ne = new NarrativeEntry("aaa", "bbb", hd, "vvvv"); editableTesterSupport.testParams(ne, this); } } }
cache the hash code
org.mwc.cmap.legacy/src/MWC/TacticalData/NarrativeEntry.java
cache the hash code
Java
mpl-2.0
15ba09443ed6d5cac57be133bbb65face17f2f17
0
ajschult/etomica,etomica/etomica,etomica/etomica,etomica/etomica,ajschult/etomica,ajschult/etomica
package etomica.graphics; import java.awt.BorderLayout; import java.awt.Component; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import javax.swing.JCheckBox; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.border.TitledBorder; import javax.swing.event.TableModelEvent; import javax.swing.event.TableModelListener; import etomica.action.Action; import etomica.data.Data; import etomica.data.DataPump; import etomica.data.DataSourceFunction; import etomica.data.DataSourcePoints; import etomica.modifier.ModifierGeneral; import etomica.units.Length; import etomica.units.Null; import etomica.util.Function; public class DevicePlotPoints { public static final int X_DIM = 0; public static final int Y_DIM = 1; public static final int MIN_X = 0; public static final int MAX_X = 1; public static final int MIN_Y = 2; public static final int MAX_Y = 3; private final int TOTAL_X_SAMPLES = 20; private final double[] scaleInit = new double[] {-50.0, 50.0, -250.0, 250.0}; private double[] scaleMins = new double[] {-50.0, 0.0, -250.0, 0.0}; private double[] scaleMaxs = new double[] {0.0, 50.0, 0.0, 250.0}; private JPanel controlPanel; private JPanel parameterPanel; private JPanel resizePanel; private JPanel scalePanel; private DeviceTableModelGeneric tableModel; private DisplayPlot plot; private DeviceTable table; private DeviceSlider[] plotSizeSliders = new DeviceSlider[4]; private JCheckBox buttonAuto; private DeviceSlider funcSlider[]; private Action updateAction; private GridBagConstraints vertGBC; private int numFunctions; private Function[] functions; private DataSourceFunction[] dataSourceFuncs; private DataPump funcPumps[]; private ModifierGeneral[] mods; private String[] funcParmLabels; public DevicePlotPoints(Function[] fncts, String[] funcNames) { this(null, fncts, funcNames, true); } public DevicePlotPoints(String[] sliderLabels, Function[] fncts, String[] funcNames, boolean verticalParameters) { numFunctions = fncts.length; funcParmLabels = sliderLabels; int numSliders = 0; if(sliderLabels != null) { numSliders = sliderLabels.length; } // GBC that is reused throughout vertGBC = new GridBagConstraints(); vertGBC.gridx = 0; vertGBC.gridy = GridBagConstraints.RELATIVE; vertGBC.insets = new java.awt.Insets(3,1,3,1); // Graphics panels controlPanel = new JPanel(); controlPanel.setLayout(new GridBagLayout()); resizePanel = new JPanel(); resizePanel.setLayout(new BorderLayout()); // function parameter adjustment sliders JPanel sliderPanel[] = new JPanel[numSliders]; funcSlider = new DeviceSlider[numSliders]; mods = new ModifierGeneral[numSliders]; for(int slide = 0; slide < numSliders; slide++) { sliderPanel[slide] = new JPanel(); sliderPanel[slide].setBorder(new TitledBorder(null, sliderLabels[slide], TitledBorder.CENTER, TitledBorder.TOP)); funcSlider[slide] = new DeviceSlider(null); funcSlider[slide].setPrecision(1); funcSlider[slide].setMinimum(0); funcSlider[slide].setMaximum(10); funcSlider[slide].setNMajor(5); funcSlider[slide].setValue(5); funcSlider[slide].setShowValues(true); funcSlider[slide].setEditValues(false); mods[slide] = new ModifierGeneral(funcSlider[slide], "value"); funcSlider[slide].setModifier(mods[slide]); sliderPanel[slide].add(funcSlider[slide].graphic()); } // // Data point table // tableModel = new DeviceTableModelGeneric(null, new String[] {"X", "Y"}); table = new DeviceTable(tableModel); table.setPreferredSize(200, 200); table.setSize(200, 200); table.setTitle("Data Points"); tableModel.addTableModelListener(new TableChangeListener()); // Deleting a point DeviceButton delButton = new DeviceButton(null); delButton.setLabel("Delete Point(s)"); ((JPanel)table.graphic()).add(delButton.graphic(), vertGBC); Action deletePointAction = new Action() { public void actionPerformed() { int[] selRows = table.getSelectedRows(); while(selRows.length > 0) { if(selRows[0] == (tableModel.getRowCount()-1)) { break; } else { tableModel.deleteRow(selRows[0]); selRows = table.getSelectedRows(); } } plot.getPlot().repaint(); } }; delButton.setAction(deletePointAction); // Add table to the "control panel" controlPanel.add(table.graphic(), vertGBC); // Add parameter adjust scrollbars to a scrolled window which // is added to the "control panel" JPanel sPanel = new JPanel(); sPanel.setLayout(new GridBagLayout()); GridBagConstraints horizGBC = new GridBagConstraints(); horizGBC.gridx = GridBagConstraints.RELATIVE; horizGBC.gridy = 0; for(int slide = 0; slide < numSliders; slide++) { sPanel.add(sliderPanel[slide], verticalParameters ? vertGBC : horizGBC); } parameterPanel = new JPanel(); if (verticalParameters) { JScrollPane scrollPane = new JScrollPane(sPanel); scrollPane.setPreferredSize(new java.awt.Dimension(250, 320)); parameterPanel.add(scrollPane); controlPanel.add(parameterPanel, vertGBC); } else { parameterPanel.setLayout(new GridBagLayout()); parameterPanel.add(sPanel); } parameterPanel.setBorder(new TitledBorder(null, "Function Parameter Adjustment", TitledBorder.CENTER, TitledBorder.TOP)); // // Plot axis adjustment sliders // scalePanel = new JPanel(); scalePanel.setLayout(new GridBagLayout()); scalePanel.setBorder(new TitledBorder(null, "Plot Scaling", TitledBorder.CENTER, TitledBorder.TOP)); JPanel scaleTypePanel = new JPanel(); scaleTypePanel.setBorder(new TitledBorder(null, "Scaling Type", TitledBorder.CENTER, TitledBorder.TOP)); buttonAuto = new JCheckBox("Auto Scale Y Axis"); buttonAuto.setSelected(false); scaleTypePanel.add(buttonAuto); JPanel[] scalePanels = new JPanel[4]; String[] scaleTitles = new String[] {"Minimum", "Maximum", "Minimum", "Maximum"}; ModifierGeneral[] scaleMods = new ModifierGeneral[4]; for(int slide = MIN_X; slide <= MAX_Y; slide++) { scalePanels[slide] = new JPanel(); scalePanels[slide].setBorder(new TitledBorder(null, scaleTitles[slide], TitledBorder.CENTER, TitledBorder.TOP)); plotSizeSliders[slide] = new DeviceSlider(null); plotSizeSliders[slide].setPrecision(0); plotSizeSliders[slide].setMinimum(scaleMins[slide]); plotSizeSliders[slide].setMaximum(scaleMaxs[slide]); plotSizeSliders[slide].setNMajor(5); plotSizeSliders[slide].setValue(scaleInit[slide]); scaleMods[slide] = new ModifierGeneral(plotSizeSliders[slide], "value"); plotSizeSliders[slide].setModifier(scaleMods[slide]); scalePanels[slide].add(plotSizeSliders[slide].graphic()); } JPanel scaleXAxisPanel = new JPanel(); scaleXAxisPanel.setBorder(new TitledBorder(null, "X Axis Scaling", TitledBorder.CENTER, TitledBorder.TOP)); scaleXAxisPanel.add(scalePanels[MIN_X]); scaleXAxisPanel.add(scalePanels[MAX_X]); JPanel scaleYAxisPanel = new JPanel(); scaleYAxisPanel.setBorder(new TitledBorder(null, "Y Axis Scaling", TitledBorder.CENTER, TitledBorder.TOP)); scaleYAxisPanel.add(scalePanels[MIN_Y]); scaleYAxisPanel.add(scalePanels[MAX_Y]); scalePanel.add(scaleTypePanel, vertGBC); scalePanel.add(scaleXAxisPanel, vertGBC); scalePanel.add(scaleYAxisPanel, vertGBC); resizePanel.add(scalePanel, BorderLayout.SOUTH); // The Plot plot = new DisplayPlot(); plot.getPlot().setTitle("Function Display"); plot.setSize(650, 400); // Initialize functions displayed on plot functions = new Function[numFunctions]; dataSourceFuncs = new DataSourceFunction[numFunctions]; funcPumps = new DataPump[numFunctions]; for(int f = 0; f < numFunctions; f++) { functions[f] = fncts[f]; dataSourceFuncs[f] = new DataSourceFunction(funcNames[f],Null.DIMENSION,functions[f], TOTAL_X_SAMPLES,"x",Length.DIMENSION); funcPumps[f] = new DataPump(dataSourceFuncs[f], plot.getDataSet().makeDataSink()); } // point display on the plot final DataSourcePoints dspts = new DataSourcePoints("Independent Points", Null.DIMENSION); final DataPump ptPump = new DataPump(dspts, plot.getDataSet().makeDataSink()); plot.getPlot().setMarksStyle("dots", numFunctions); plot.getPlot().setConnected(false, numFunctions); // // Update action to pipe any data changes and redraw plot // updateAction = new Action() { public void actionPerformed() { // sniff out min and max y values from the functions. double maxY = Double.MIN_VALUE; double minY = Double.MAX_VALUE; for(int f = 0; f < numFunctions; f++) { dataSourceFuncs[f].getXSource().setXMax(plotSizeSliders[MAX_X].getValue()); dataSourceFuncs[f].getXSource().setXMin(plotSizeSliders[MIN_X].getValue()); dataSourceFuncs[f].update(); Data yFunc = dataSourceFuncs[f].getData(); for (int i=0; i<yFunc.getLength(); i++) { if (yFunc.getValue(i) > maxY) { maxY = yFunc.getValue(i); } if (yFunc.getValue(i) < minY) { minY = yFunc.getValue(i); } } funcPumps[f].actionPerformed(); } dspts.update(getPoints(X_DIM), getPoints(Y_DIM)); double[] yPoints = getPoints(Y_DIM); for (int i = 0; i<yPoints.length; i++) { if (yPoints[i] > maxY) { maxY = yPoints[i]; } if (yPoints[i] < minY) { minY = yPoints[i]; } } // we could also sniff min and max y here ptPump.actionPerformed(); // don't auto-scale in X. this means that entered data outside // the range won't show up. plot.getPlot().setXRange(plotSizeSliders[MIN_X].getValue(), plotSizeSliders[MAX_X].getValue()); if(buttonAuto.isSelected() == false) { plot.getPlot().setYRange(plotSizeSliders[MIN_Y].getValue(), plotSizeSliders[MAX_Y].getValue()); } else { plot.getPlot().setYRange(minY, maxY); } } }; for(int slide = 0; slide < numSliders; slide++) { funcSlider[slide].setPostAction(updateAction); } plotSizeSliders[MIN_X].setPostAction(updateAction); plotSizeSliders[MAX_X].setPostAction(updateAction); plotSizeSliders[MIN_Y].setPostAction(updateAction); plotSizeSliders[MAX_Y].setPostAction(updateAction); // // Update on scale type selection // buttonAuto.addActionListener(new ToggleButtonListener()); plot.getDataSet().setUpdatingOnAnyChange(true); // Update axis min/max values updateAction.actionPerformed(); } /** * Returns the top level panel that the function plot components sit on. */ public DisplayPlot getDisplayPlot() { return plot; } /** * Returns the top level panel that the function plot components sit on. */ public Component graphic() { return plot.graphic(); } /** * Returns the top level panel that the points table and (if parameters are * vertically-oriented) function parameter slider components sit on. */ public Component controlGraphic() { return controlPanel; } /** * Returns the panel that contains the parameter sliders. You probably * don't want this unless you have horizontal layout for the parameters. */ public Component parameterGraphic() { return parameterPanel; } /** * Returns the top level panel that the plot scale resizing components sit on. */ public Component resizeGraphic() { return resizePanel; } /** * Instructs the plot to redisplay any plotted functions with the most * recent data. Any points contained in the data point input table will be * displayed (if the data point input table is displayed). The plot axis * limits are updated. */ public void refresh() { updateAction.actionPerformed(); } /** * Returns the plot scale slider for the given parameter (values can be * DevicePlotPoints.MIN_X, MAX_X, MIN_Y or MAX_Y. */ public DeviceSlider getPlotSizeSlider(int minMaxXY) { return plotSizeSliders[minMaxXY]; } /** * Sets the minimum and maximum X-axis limits on the function display * sliders. * @param min Minimum X-axis value * @param max Maximum X-axis value * @param middle Value at which to split the min/max X-axis limit sliders */ public void setXScale(double min, double max, double middle) { if(min < max && middle > min && middle < max) { scaleMins[MIN_X] = min; scaleMins[MAX_X] = middle; scaleMaxs[MIN_X] = middle; scaleMaxs[MAX_X] = max; plotSizeSliders[MIN_X].setMinimum(scaleMins[MIN_X]); plotSizeSliders[MAX_X].setMinimum(scaleMins[MAX_X]); plotSizeSliders[MIN_X].setMaximum(scaleMaxs[MIN_X]); plotSizeSliders[MAX_X].setMaximum(scaleMaxs[MAX_X]); } } /** * Sets the minimum and maximum Y-axis limits on the function display * sliders. * @param min Minimum Y-axis value * @param max Maximum Y-axis value * @param middle Value at which to split the min/max Y-axis limit sliders */ public void setYScale(double min, double max, double middle) { if(min < max && middle > min && middle < max) { scaleMins[MIN_Y] = min; scaleMins[MAX_Y] = middle; scaleMaxs[MIN_Y] = middle; scaleMaxs[MAX_Y] = max; plotSizeSliders[MIN_Y].setMinimum(scaleMins[MIN_Y]); plotSizeSliders[MAX_Y].setMinimum(scaleMins[MAX_Y]); plotSizeSliders[MIN_Y].setMaximum(scaleMaxs[MIN_Y]); plotSizeSliders[MAX_Y].setMaximum(scaleMaxs[MAX_Y]); } } /** * Sets the visibility of the plot axis limit sliders. Default is true(show). * @param show show/unshow boolean */ public void showScale(boolean show) { if(show == false) { resizePanel.remove(scalePanel); } else { resizePanel.add(scalePanel, BorderLayout.SOUTH); } } /** * Sets the visibility of the data point input table. Default is true(show). * @param show show/unshow boolean */ public void showPointInput(boolean show) { if(show == false) { controlPanel.remove(table.graphic()); } else { controlPanel.add(table.graphic(), vertGBC); } } /** * Returns the value of the function parameter slider. * @param desc label of parameter setting slider. The label was passed * into the ctor. * @return slider setting of the parameter value. */ public double getParameterValue(String desc) { double parmValue = 0.0; for(int i = 0; i < funcParmLabels.length; i++) { if(desc.compareTo(funcParmLabels[i]) == 0) { parmValue = mods[i].getValue(); break; } } return parmValue; } /** * Sets the minimum and maximum allowable values for a function parameter. * @param desc label of parameter setting slider. The label was passed * into the ctor. * @param min Minimum allowable value for the function parameter. * @param max Maxminimum allowable value for the function parameter. */ public void setParameterLimits(String desc, double min, double max) { for(int i = 0; i < funcParmLabels.length; i++) { if(desc.compareTo(funcParmLabels[i]) == 0) { funcSlider[i].setMinimum(min); funcSlider[i].setMaximum(max); if(funcSlider[i].getValue() < min) { funcSlider[i].setValue(min); } else if(funcSlider[i].getValue() > max) { funcSlider[i].setValue(max); } break; } } } public DeviceTableModelGeneric getTableModel() { return tableModel; } private double[] getPoints(int column) { double[] points = null; int nonBlankRowCount = 0; int numRows = tableModel.getRowCount(); for(int row = 0; row < numRows; row++) { if(((String)tableModel.getValueAt(row, column)).compareTo("") != 0) { nonBlankRowCount++; } } points = new double[nonBlankRowCount]; for(int row = 0; row < numRows; row++) { if(((String)tableModel.getValueAt(row, column)).compareTo("") != 0) { points[row] = Double.valueOf(((String)tableModel.getValueAt(row, column))).doubleValue(); } } return points; } /** * Returns the slider for the given parameter. */ public DeviceSlider getSlider(String desc) { for(int i = 0; i < funcParmLabels.length; i++) { if(desc.compareTo(funcParmLabels[i]) == 0) { return funcSlider[i]; } } return null; } public void setAutoScale(boolean isAutoScale) { buttonAuto.setSelected(isAutoScale); new ToggleButtonListener().actionPerformed(null); updateAction.actionPerformed(); } private class ToggleButtonListener implements ActionListener { public void actionPerformed(ActionEvent e) { if(buttonAuto.isSelected()) { plotSizeSliders[MIN_X].getSlider().setEnabled(true); plotSizeSliders[MAX_X].getSlider().setEnabled(true); plotSizeSliders[MIN_Y].getSlider().setEnabled(false); plotSizeSliders[MAX_Y].getSlider().setEnabled(false); } else { plotSizeSliders[MIN_X].getSlider().setEnabled(true); plotSizeSliders[MAX_X].getSlider().setEnabled(true); plotSizeSliders[MIN_Y].getSlider().setEnabled(true); plotSizeSliders[MAX_Y].getSlider().setEnabled(true); } updateAction.actionPerformed(); } } // TODO : Hmmm, no order gaurenteed between this listener and other listeners... // Like the one in DeviceTableModelGeneric private class TableChangeListener implements TableModelListener { public void tableChanged(TableModelEvent e) { // If a row changed and is a complete entry(both x and y values) // plot the points on the display. if(e.getType() == TableModelEvent.UPDATE) { Object blank = ""; if(tableModel.getValueAt(e.getFirstRow(), 0).equals(blank) == false && tableModel.getValueAt(e.getFirstRow(), 1).equals(blank) == false) { updateAction.actionPerformed(); } } // If a row is removed, remove it from the display. else if(e.getType() == TableModelEvent.DELETE) { updateAction.actionPerformed(); } } } // end class TableChangeListener }
etomica/graphics/DevicePlotPoints.java
package etomica.graphics; import java.awt.BorderLayout; import java.awt.Component; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import javax.swing.JCheckBox; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.border.TitledBorder; import javax.swing.event.TableModelEvent; import javax.swing.event.TableModelListener; import etomica.action.Action; import etomica.data.Data; import etomica.data.DataPump; import etomica.data.DataSourceFunction; import etomica.data.DataSourcePoints; import etomica.modifier.ModifierGeneral; import etomica.units.Length; import etomica.units.Null; import etomica.util.Function; public class DevicePlotPoints { public static final int X_DIM = 0; public static final int Y_DIM = 1; public static final int MIN_X = 0; public static final int MAX_X = 1; public static final int MIN_Y = 2; public static final int MAX_Y = 3; private final int TOTAL_X_SAMPLES = 20; private final double[] scaleInit = new double[] {-50.0, 50.0, -250.0, 250.0}; private double[] scaleMins = new double[] {-50.0, 0.0, -250.0, 0.0}; private double[] scaleMaxs = new double[] {0.0, 50.0, 0.0, 250.0}; private JPanel controlPanel; private JPanel parameterPanel; private JPanel resizePanel; private JPanel scalePanel; private DeviceTableModelGeneric tableModel; private DisplayPlot plot; private DeviceTable table; private DeviceSlider[] plotSizeSliders = new DeviceSlider[4]; private JCheckBox buttonAuto; private DeviceSlider funcSlider[]; private Action updateAction; private GridBagConstraints vertGBC; private int numFunctions; private Function[] functions; private DataSourceFunction[] dataSourceFuncs; private DataPump funcPumps[]; private ModifierGeneral[] mods; private String[] funcParmLabels; public DevicePlotPoints(Function[] fncts, String[] funcNames) { this(null, fncts, funcNames, true); } public DevicePlotPoints(String[] sliderLabels, Function[] fncts, String[] funcNames, boolean verticalParameters) { numFunctions = fncts.length; funcParmLabels = sliderLabels; int numSliders = 0; if(sliderLabels != null) { numSliders = sliderLabels.length; } // GBC that is reused throughout vertGBC = new GridBagConstraints(); vertGBC.gridx = 0; vertGBC.gridy = GridBagConstraints.RELATIVE; vertGBC.insets = new java.awt.Insets(3,1,3,1); // Graphics panels controlPanel = new JPanel(); controlPanel.setLayout(new GridBagLayout()); resizePanel = new JPanel(); resizePanel.setLayout(new BorderLayout()); // function parameter adjustment sliders JPanel sliderPanel[] = new JPanel[numSliders]; funcSlider = new DeviceSlider[numSliders]; mods = new ModifierGeneral[numSliders]; for(int slide = 0; slide < numSliders; slide++) { sliderPanel[slide] = new JPanel(); sliderPanel[slide].setBorder(new TitledBorder(null, sliderLabels[slide], TitledBorder.CENTER, TitledBorder.TOP)); funcSlider[slide] = new DeviceSlider(null); funcSlider[slide].setPrecision(1); funcSlider[slide].setMinimum(0); funcSlider[slide].setMaximum(10); funcSlider[slide].setNMajor(5); funcSlider[slide].setValue(5); funcSlider[slide].setShowValues(true); funcSlider[slide].setEditValues(false); mods[slide] = new ModifierGeneral(funcSlider[slide], "value"); funcSlider[slide].setModifier(mods[slide]); sliderPanel[slide].add(funcSlider[slide].graphic()); } // // Data point table // tableModel = new DeviceTableModelGeneric(null, new String[] {"X", "Y"}); table = new DeviceTable(tableModel); table.setPreferredSize(200, 200); table.setSize(200, 200); table.setTitle("Data Points"); tableModel.addTableModelListener(new TableChangeListener()); // Deleting a point DeviceButton delButton = new DeviceButton(null); delButton.setLabel("Delete Point(s)"); ((JPanel)table.graphic()).add(delButton.graphic(), vertGBC); Action deletePointAction = new Action() { public void actionPerformed() { int[] selRows = table.getSelectedRows(); while(selRows.length > 0) { if(selRows[0] == (tableModel.getRowCount()-1)) { break; } else { tableModel.deleteRow(selRows[0]); selRows = table.getSelectedRows(); } } plot.getPlot().repaint(); } }; delButton.setAction(deletePointAction); // Add table to the "control panel" controlPanel.add(table.graphic(), vertGBC); // Add parameter adjust scrollbars to a scrolled window which // is added to the "control panel" JPanel sPanel = new JPanel(); sPanel.setLayout(new GridBagLayout()); GridBagConstraints horizGBC = new GridBagConstraints(); horizGBC.gridx = GridBagConstraints.RELATIVE; horizGBC.gridy = 0; for(int slide = 0; slide < numSliders; slide++) { sPanel.add(sliderPanel[slide], verticalParameters ? vertGBC : horizGBC); } parameterPanel = new JPanel(); if (verticalParameters) { JScrollPane scrollPane = new JScrollPane(sPanel); scrollPane.setPreferredSize(new java.awt.Dimension(250, 320)); parameterPanel.add(scrollPane); controlPanel.add(parameterPanel, vertGBC); } else { parameterPanel.setLayout(new GridBagLayout()); parameterPanel.add(sPanel); } parameterPanel.setBorder(new TitledBorder(null, "Function Parameter Adjustment", TitledBorder.CENTER, TitledBorder.TOP)); // // Plot axis adjustment sliders // scalePanel = new JPanel(); scalePanel.setLayout(new GridBagLayout()); scalePanel.setBorder(new TitledBorder(null, "Plot Scaling", TitledBorder.CENTER, TitledBorder.TOP)); JPanel scaleTypePanel = new JPanel(); scaleTypePanel.setBorder(new TitledBorder(null, "Scaling Type", TitledBorder.CENTER, TitledBorder.TOP)); buttonAuto = new JCheckBox("Auto Scale Y Axis"); buttonAuto.setSelected(false); scaleTypePanel.add(buttonAuto); JPanel[] scalePanels = new JPanel[4]; String[] scaleTitles = new String[] {"Minimum", "Maximum", "Minimum", "Maximum"}; ModifierGeneral[] scaleMods = new ModifierGeneral[4]; for(int slide = MIN_X; slide <= MAX_Y; slide++) { scalePanels[slide] = new JPanel(); scalePanels[slide].setBorder(new TitledBorder(null, scaleTitles[slide], TitledBorder.CENTER, TitledBorder.TOP)); plotSizeSliders[slide] = new DeviceSlider(null); plotSizeSliders[slide].setPrecision(0); plotSizeSliders[slide].setMinimum(scaleMins[slide]); plotSizeSliders[slide].setMaximum(scaleMaxs[slide]); plotSizeSliders[slide].setNMajor(5); plotSizeSliders[slide].setValue(scaleInit[slide]); scaleMods[slide] = new ModifierGeneral(plotSizeSliders[slide], "value"); plotSizeSliders[slide].setModifier(scaleMods[slide]); scalePanels[slide].add(plotSizeSliders[slide].graphic()); } JPanel scaleXAxisPanel = new JPanel(); scaleXAxisPanel.setBorder(new TitledBorder(null, "X Axis Scaling", TitledBorder.CENTER, TitledBorder.TOP)); scaleXAxisPanel.add(scalePanels[MIN_X]); scaleXAxisPanel.add(scalePanels[MAX_X]); JPanel scaleYAxisPanel = new JPanel(); scaleYAxisPanel.setBorder(new TitledBorder(null, "Y Axis Scaling", TitledBorder.CENTER, TitledBorder.TOP)); scaleYAxisPanel.add(scalePanels[MIN_Y]); scaleYAxisPanel.add(scalePanels[MAX_Y]); scalePanel.add(scaleTypePanel, vertGBC); scalePanel.add(scaleXAxisPanel, vertGBC); scalePanel.add(scaleYAxisPanel, vertGBC); resizePanel.add(scalePanel, BorderLayout.SOUTH); // The Plot plot = new DisplayPlot(); plot.getPlot().setTitle("Function Display"); plot.setSize(650, 400); // Initialize functions displayed on plot functions = new Function[numFunctions]; dataSourceFuncs = new DataSourceFunction[numFunctions]; funcPumps = new DataPump[numFunctions]; for(int f = 0; f < numFunctions; f++) { functions[f] = fncts[f]; dataSourceFuncs[f] = new DataSourceFunction(funcNames[f],Null.DIMENSION,functions[f], TOTAL_X_SAMPLES,"x",Length.DIMENSION); funcPumps[f] = new DataPump(dataSourceFuncs[f], plot.getDataSet().makeDataSink()); } // point display on the plot final DataSourcePoints dspts = new DataSourcePoints("Independent Points", Null.DIMENSION); final DataPump ptPump = new DataPump(dspts, plot.getDataSet().makeDataSink()); plot.getPlot().setMarksStyle("dots", numFunctions); plot.getPlot().setConnected(false, numFunctions); // // Update action to pipe any data changes and redraw plot // updateAction = new Action() { public void actionPerformed() { // sniff out min and max y values from the functions. double maxY = Double.MIN_VALUE; double minY = Double.MAX_VALUE; for(int f = 0; f < numFunctions; f++) { dataSourceFuncs[f].getXSource().setXMax(plotSizeSliders[MAX_X].getValue()); dataSourceFuncs[f].getXSource().setXMin(plotSizeSliders[MIN_X].getValue()); dataSourceFuncs[f].update(); Data yFunc = dataSourceFuncs[f].getData(); for (int i=0; i<yFunc.getLength(); i++) { if (yFunc.getValue(i) > maxY) { maxY = yFunc.getValue(i); } if (yFunc.getValue(i) < minY) { minY = yFunc.getValue(i); } } funcPumps[f].actionPerformed(); } dspts.update(getPoints(X_DIM), getPoints(Y_DIM)); // we could also sniff min and max y here ptPump.actionPerformed(); // don't auto-scale in X. this means that entered data outside // the range won't show up. plot.getPlot().setXRange(plotSizeSliders[MIN_X].getValue(), plotSizeSliders[MAX_X].getValue()); if(buttonAuto.isSelected() == false) { plot.getPlot().setYRange(plotSizeSliders[MIN_Y].getValue(), plotSizeSliders[MAX_Y].getValue()); } else { plot.getPlot().setYRange(minY, maxY); } } }; for(int slide = 0; slide < numSliders; slide++) { funcSlider[slide].setPostAction(updateAction); } plotSizeSliders[MIN_X].setPostAction(updateAction); plotSizeSliders[MAX_X].setPostAction(updateAction); plotSizeSliders[MIN_Y].setPostAction(updateAction); plotSizeSliders[MAX_Y].setPostAction(updateAction); // // Update on scale type selection // buttonAuto.addActionListener(new ToggleButtonListener()); plot.getDataSet().setUpdatingOnAnyChange(true); // Update axis min/max values updateAction.actionPerformed(); } /** * Returns the top level panel that the function plot components sit on. */ public DisplayPlot getDisplayPlot() { return plot; } /** * Returns the top level panel that the function plot components sit on. */ public Component graphic() { return plot.graphic(); } /** * Returns the top level panel that the points table and (if parameters are * vertically-oriented) function parameter slider components sit on. */ public Component controlGraphic() { return controlPanel; } /** * Returns the panel that contains the parameter sliders. You probably * don't want this unless you have horizontal layout for the parameters. */ public Component parameterGraphic() { return parameterPanel; } /** * Returns the top level panel that the plot scale resizing components sit on. */ public Component resizeGraphic() { return resizePanel; } /** * Instructs the plot to redisplay any plotted functions with the most * recent data. Any points contained in the data point input table will be * displayed (if the data point input table is displayed). The plot axis * limits are updated. */ public void refresh() { updateAction.actionPerformed(); } /** * Returns the plot scale slider for the given parameter (values can be * DevicePlotPoints.MIN_X, MAX_X, MIN_Y or MAX_Y. */ public DeviceSlider getPlotSizeSlider(int minMaxXY) { return plotSizeSliders[minMaxXY]; } /** * Sets the minimum and maximum X-axis limits on the function display * sliders. * @param min Minimum X-axis value * @param max Maximum X-axis value * @param middle Value at which to split the min/max X-axis limit sliders */ public void setXScale(double min, double max, double middle) { if(min < max && middle > min && middle < max) { scaleMins[MIN_X] = min; scaleMins[MAX_X] = middle; scaleMaxs[MIN_X] = middle; scaleMaxs[MAX_X] = max; plotSizeSliders[MIN_X].setMinimum(scaleMins[MIN_X]); plotSizeSliders[MAX_X].setMinimum(scaleMins[MAX_X]); plotSizeSliders[MIN_X].setMaximum(scaleMaxs[MIN_X]); plotSizeSliders[MAX_X].setMaximum(scaleMaxs[MAX_X]); } } /** * Sets the minimum and maximum Y-axis limits on the function display * sliders. * @param min Minimum Y-axis value * @param max Maximum Y-axis value * @param middle Value at which to split the min/max Y-axis limit sliders */ public void setYScale(double min, double max, double middle) { if(min < max && middle > min && middle < max) { scaleMins[MIN_Y] = min; scaleMins[MAX_Y] = middle; scaleMaxs[MIN_Y] = middle; scaleMaxs[MAX_Y] = max; plotSizeSliders[MIN_Y].setMinimum(scaleMins[MIN_Y]); plotSizeSliders[MAX_Y].setMinimum(scaleMins[MAX_Y]); plotSizeSliders[MIN_Y].setMaximum(scaleMaxs[MIN_Y]); plotSizeSliders[MAX_Y].setMaximum(scaleMaxs[MAX_Y]); } } /** * Sets the visibility of the plot axis limit sliders. Default is true(show). * @param show show/unshow boolean */ public void showScale(boolean show) { if(show == false) { resizePanel.remove(scalePanel); } else { resizePanel.add(scalePanel, BorderLayout.SOUTH); } } /** * Sets the visibility of the data point input table. Default is true(show). * @param show show/unshow boolean */ public void showPointInput(boolean show) { if(show == false) { controlPanel.remove(table.graphic()); } else { controlPanel.add(table.graphic(), vertGBC); } } /** * Returns the value of the function parameter slider. * @param desc label of parameter setting slider. The label was passed * into the ctor. * @return slider setting of the parameter value. */ public double getParameterValue(String desc) { double parmValue = 0.0; for(int i = 0; i < funcParmLabels.length; i++) { if(desc.compareTo(funcParmLabels[i]) == 0) { parmValue = mods[i].getValue(); break; } } return parmValue; } /** * Sets the minimum and maximum allowable values for a function parameter. * @param desc label of parameter setting slider. The label was passed * into the ctor. * @param min Minimum allowable value for the function parameter. * @param max Maxminimum allowable value for the function parameter. */ public void setParameterLimits(String desc, double min, double max) { for(int i = 0; i < funcParmLabels.length; i++) { if(desc.compareTo(funcParmLabels[i]) == 0) { funcSlider[i].setMinimum(min); funcSlider[i].setMaximum(max); if(funcSlider[i].getValue() < min) { funcSlider[i].setValue(min); } else if(funcSlider[i].getValue() > max) { funcSlider[i].setValue(max); } break; } } } public DeviceTableModelGeneric getTableModel() { return tableModel; } private double[] getPoints(int column) { double[] points = null; int nonBlankRowCount = 0; int numRows = tableModel.getRowCount(); for(int row = 0; row < numRows; row++) { if(((String)tableModel.getValueAt(row, column)).compareTo("") != 0) { nonBlankRowCount++; } } points = new double[nonBlankRowCount]; for(int row = 0; row < numRows; row++) { if(((String)tableModel.getValueAt(row, column)).compareTo("") != 0) { points[row] = Double.valueOf(((String)tableModel.getValueAt(row, column))).doubleValue(); } } return points; } /** * Returns the slider for the given parameter. */ public DeviceSlider getSlider(String desc) { for(int i = 0; i < funcParmLabels.length; i++) { if(desc.compareTo(funcParmLabels[i]) == 0) { return funcSlider[i]; } } return null; } public void setAutoScale(boolean isAutoScale) { buttonAuto.setSelected(isAutoScale); new ToggleButtonListener().actionPerformed(null); updateAction.actionPerformed(); } private class ToggleButtonListener implements ActionListener { public void actionPerformed(ActionEvent e) { if(buttonAuto.isSelected()) { plotSizeSliders[MIN_X].getSlider().setEnabled(true); plotSizeSliders[MAX_X].getSlider().setEnabled(true); plotSizeSliders[MIN_Y].getSlider().setEnabled(false); plotSizeSliders[MAX_Y].getSlider().setEnabled(false); } else { plotSizeSliders[MIN_X].getSlider().setEnabled(true); plotSizeSliders[MAX_X].getSlider().setEnabled(true); plotSizeSliders[MIN_Y].getSlider().setEnabled(true); plotSizeSliders[MAX_Y].getSlider().setEnabled(true); } updateAction.actionPerformed(); } } // TODO : Hmmm, no order gaurenteed between this listener and other listeners... // Like the one in DeviceTableModelGeneric private class TableChangeListener implements TableModelListener { public void tableChanged(TableModelEvent e) { // If a row changed and is a complete entry(both x and y values) // plot the points on the display. if(e.getType() == TableModelEvent.UPDATE) { Object blank = ""; if(tableModel.getValueAt(e.getFirstRow(), 0).equals(blank) == false && tableModel.getValueAt(e.getFirstRow(), 1).equals(blank) == false) { updateAction.actionPerformed(); } } // If a row is removed, remove it from the display. else if(e.getType() == TableModelEvent.DELETE) { updateAction.actionPerformed(); } } } // end class TableChangeListener }
use entered y data points for manual autoscale
etomica/graphics/DevicePlotPoints.java
use entered y data points for manual autoscale
Java
mpl-2.0
e48d22617f2927b2841f4fec34657c6aa4e0c91a
0
JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core
/* * MultiStratumBackend.java * * Created on 24. March 2004, 13:48 */ package mod._cfgmgr2; import com.sun.star.lang.XMultiServiceFactory; import com.sun.star.uno.UnoRuntime; import com.sun.star.uno.XInterface; import com.sun.star.util.XStringSubstitution; import java.io.PrintWriter; import lib.TestCase; import lib.TestEnvironment; import lib.TestParameters; public class MultiStratumBackend extends TestCase { protected TestEnvironment createTestEnvironment(TestParameters tParam, PrintWriter log) { XInterface oObj = null; Object[] args = new Object[1]; String AdminURL = ""; try { XStringSubstitution sts = createStringSubstitution( (XMultiServiceFactory) tParam.getMSF()); AdminURL = sts.getSubstituteVariableValue("$(inst)") + "/share/registry"; args[0] = ((XMultiServiceFactory)tParam.getMSF()) .createInstance("com.sun.star.configuration.bootstrap.BootstrapContext"); oObj = (XInterface) ((XMultiServiceFactory)tParam.getMSF()) .createInstanceWithArguments("com.sun.star.comp.configuration.backend.MultiStratumBackend",args); } catch (com.sun.star.uno.Exception e) { } log.println("Implementation name: "+ util.utils.getImplName(oObj)); TestEnvironment tEnv = new TestEnvironment(oObj); //objRelation for XInitialization tEnv.addObjRelation("XInitialization.args", args); //objRelation for XBackendEntities tEnv.addObjRelation("NoAdmin", AdminURL); tEnv.addObjRelation("MSB", Boolean.TRUE); return tEnv; } public static XStringSubstitution createStringSubstitution(XMultiServiceFactory xMSF) { Object xPathSubst = null; try { xPathSubst = xMSF.createInstance( "com.sun.star.util.PathSubstitution"); } catch (com.sun.star.uno.Exception e) { e.printStackTrace(); } if (xPathSubst != null) { return (XStringSubstitution) UnoRuntime.queryInterface( XStringSubstitution.class, xPathSubst); } else { return null; } } }
qadevOOo/tests/java/mod/_cfgmgr2/MultiStratumBackend.java
/* * MultiStratumBackend.java * * Created on 24. Mrz 2004, 13:48 */ package mod._cfgmgr2; import com.sun.star.lang.XMultiServiceFactory; import com.sun.star.uno.UnoRuntime; import com.sun.star.uno.XInterface; import com.sun.star.util.XStringSubstitution; import java.io.PrintWriter; import lib.TestCase; import lib.TestEnvironment; import lib.TestParameters; public class MultiStratumBackend extends TestCase { protected TestEnvironment createTestEnvironment(TestParameters tParam, PrintWriter log) { XInterface oObj = null; Object[] args = new Object[1]; String AdminURL = ""; try { XStringSubstitution sts = createStringSubstitution( (XMultiServiceFactory) tParam.getMSF()); AdminURL = sts.getSubstituteVariableValue("$(inst)") + "/share/registry"; args[0] = ((XMultiServiceFactory)tParam.getMSF()) .createInstance("com.sun.star.configuration.bootstrap.BootstrapContext"); oObj = (XInterface) ((XMultiServiceFactory)tParam.getMSF()) .createInstanceWithArguments("com.sun.star.comp.configuration.backend.MultiStratumBackend",args); } catch (com.sun.star.uno.Exception e) { } log.println("Implementation name: "+ util.utils.getImplName(oObj)); TestEnvironment tEnv = new TestEnvironment(oObj); //objRelation for XInitialization tEnv.addObjRelation("XInitialization.args", args); //objRelation for XBackendEntities tEnv.addObjRelation("NoAdmin", AdminURL); tEnv.addObjRelation("MSB", Boolean.TRUE); return tEnv; } public static XStringSubstitution createStringSubstitution(XMultiServiceFactory xMSF) { Object xPathSubst = null; try { xPathSubst = xMSF.createInstance( "com.sun.star.util.PathSubstitution"); } catch (com.sun.star.uno.Exception e) { e.printStackTrace(); } if (xPathSubst != null) { return (XStringSubstitution) UnoRuntime.queryInterface( XStringSubstitution.class, xPathSubst); } else { return null; } } }
INTEGRATION: CWS gcj1 (1.2.52); FILE MERGED 2004/12/01 15:55:48 cmc 1.2.52.1: #i38188# gcj is picky about using non ascii characters in java source, ascii-ize multi-copied bad single quote symbol, handle some uncompilable directories
qadevOOo/tests/java/mod/_cfgmgr2/MultiStratumBackend.java
INTEGRATION: CWS gcj1 (1.2.52); FILE MERGED 2004/12/01 15:55:48 cmc 1.2.52.1: #i38188# gcj is picky about using non ascii characters in java source, ascii-ize multi-copied bad single quote symbol, handle some uncompilable directories
Java
agpl-3.0
302c2a61f2272fb557e00941d8c8dbd3310cc7d7
0
elki-project/elki,elki-project/elki,elki-project/elki
package de.lmu.ifi.dbs.distance; import de.lmu.ifi.dbs.data.RealVector; import de.lmu.ifi.dbs.database.Database; import de.lmu.ifi.dbs.linearalgebra.Matrix; import de.lmu.ifi.dbs.pca.CorrelationPCA; import de.lmu.ifi.dbs.preprocessing.CorrelationDimensionPreprocessor; import de.lmu.ifi.dbs.preprocessing.KnnQueryBasedCorrelationDimensionPreprocessor; import de.lmu.ifi.dbs.utilities.optionhandling.NoParameterValueException; import de.lmu.ifi.dbs.utilities.optionhandling.OptionHandler; import de.lmu.ifi.dbs.utilities.optionhandling.UnusedParameterException; import java.util.Hashtable; import java.util.Map; import java.util.regex.Pattern; /** * Abstract class that provides the Correlation distance for real valued vectors. * All subclasses must implement a method to process the preprocessing step * in terms of doing the PCA for each object of the database. * * @author Elke Achtert (<a href="mailto:[email protected]">[email protected]</a>) */ public class CorrelationDistanceFunction extends RealVectorDistanceFunction { /** * The association id to associate a pca to an object. */ public static final String ASSOCIATION_ID_PCA = CorrelationDimensionPreprocessor.ASSOCIATION_ID_PCA; /** * Indicates a separator. */ public static final Pattern SEPARATOR = Pattern.compile("x"); /** * The default value for delta. */ public static final double DEFAULT_DELTA = 0.25; /** * Option string for parameter delta. */ public static final String DELTA_P = "delta"; /** * Description for parameter delta. */ public static final String DELTA_D = "<double>a double specifying the threshold of a " + "distance between a vector q and a given space " + "that indicates that q adds a new dimension " + "to the space (default is delta = " + DEFAULT_DELTA + ")"; /** * The default preprocessor class name. */ public static final Class DEFAULT_PREPROCESSOR_CLASS = KnnQueryBasedCorrelationDimensionPreprocessor.class; /** * Parameter for preprocessor. */ public static final String PREPROCESSOR_CLASS_P = "preprocessor"; /** * Description for parameter preprocessor. */ public static final String PREPROCESSOR_CLASS_D = "<classname>the preprocessor to determine the correlation dimensions " + "of the objects - must implement " + CorrelationDimensionPreprocessor.class.getName() + ". " + "(Default: " + DEFAULT_PREPROCESSOR_CLASS.getName() + ")."; /** * OptionHandler for handling options. */ protected OptionHandler optionHandler; /** * The database that holds the associations for the MetricalObject * for which the distances should be computed. */ protected Database db; /** * The threshold of a distance between a vector q and a given space * that indicates that q adds a new dimension to the space. */ private double delta; /** * The preprocessor to determine the correlation dimensions of the objects. */ private CorrelationDimensionPreprocessor preprocessor; /** * Provides a CorrelationDistanceFunction with a pattern defined to accept * Strings that define an Integer followed by a separator followed by a Double. */ public CorrelationDistanceFunction() { super(Pattern.compile("\\d+" + SEPARATOR.pattern() + "\\d+(\\.\\d+)?([eE][-]?\\d+)?")); Map<String, String> parameterToDescription = new Hashtable<String, String>(); parameterToDescription.put(DELTA_P + OptionHandler.EXPECTS_VALUE, DELTA_D); parameterToDescription.put(PREPROCESSOR_CLASS_P + OptionHandler.EXPECTS_VALUE, PREPROCESSOR_CLASS_D); optionHandler = new OptionHandler(parameterToDescription, ""); } /** * Provides the Correlation distance between the given two vectors. * * @return the Correlation distance between the given two vectors as an * instance of {@link CorrelationDistance CorrelationDistance}. * @see RealVectorDistanceFunction#distance(de.lmu.ifi.dbs.data.RealVector, * de.lmu.ifi.dbs.data.RealVector) */ public Distance distance(RealVector rv1, RealVector rv2) { return correlationDistance(rv1, rv2); } /** * Provides a distance suitable to this DistanceFunction * based on the given pattern. * * @param pattern A pattern defining a distance suitable to this DistanceFunction * @return a distance suitable to this DistanceFunction * based on the given pattern * @throws IllegalArgumentException if the given pattern is not compatible * with the requirements of this DistanceFunction */ public Distance valueOf(String pattern) throws IllegalArgumentException { if (matches(pattern)) { String[] values = SEPARATOR.split(pattern); return new CorrelationDistance(Integer.parseInt(values[0]), Double.parseDouble(values[1])); } else { throw new IllegalArgumentException("Given pattern \"" + pattern + "\" does not match required pattern \"" + requiredInputPattern() + "\""); } } /** * Provides an infinite distance. * * @return an infinite distance */ public Distance infiniteDistance() { return new CorrelationDistance(Integer.MAX_VALUE, Double.POSITIVE_INFINITY); } /** * Provides a null distance. * * @return a null distance */ public Distance nullDistance() { return new CorrelationDistance(0, 0); } /** * Provides an undefined distance. * * @return an undefined distance */ public Distance undefinedDistance() { return new CorrelationDistance(-1, Double.NaN); } /** * Returns a description of the class and the required parameters. * * @return String a description of the class and the required parameters */ public String description() { return optionHandler.usage("Correlation distance for RealVectors. " + "Pattern for defining a range: \"" + requiredInputPattern() + "\".",false); } /** * Computes the necessary PCA associations for * each object of the database. * Afterwards the database is set to get later on * the PCA associations needed for distance computing. * * @param db the database to be set */ public void setDatabase(Database db) { this.db = db; preprocessor.run(db); } /** * Sets the values for the parameters delta and preprocessor if specified. * If the parameters are not specified default values are set. * * @see de.lmu.ifi.dbs.utilities.optionhandling.Parameterizable#setParameters(String[]) */ public String[] setParameters(String[] args) throws IllegalArgumentException { String[] remainingParameters = optionHandler.grabOptions(args); if (optionHandler.isSet(DELTA_P)) { try { delta = Double.parseDouble(optionHandler.getOptionValue(DELTA_P)); if (delta < 0) throw new IllegalArgumentException("CorrelationDistanceFunction: delta has to be greater than zero!"); } catch (UnusedParameterException e) { throw new IllegalArgumentException(e.getMessage()); } catch (NoParameterValueException e) { throw new IllegalArgumentException(e.getMessage()); } } else { delta = DEFAULT_DELTA; } if (optionHandler.isSet(PREPROCESSOR_CLASS_P)) { try { preprocessor = (CorrelationDimensionPreprocessor) Class.forName(optionHandler.getOptionValue(PREPROCESSOR_CLASS_P)).newInstance(); } catch (UnusedParameterException e) { throw new IllegalArgumentException(e.getMessage()); } catch (NoParameterValueException e) { throw new IllegalArgumentException(e.getMessage()); } catch (IllegalAccessException e) { throw new IllegalArgumentException(e.getMessage()); } catch (ClassNotFoundException e) { throw new IllegalArgumentException(e.getMessage()); } catch (InstantiationException e) { throw new IllegalArgumentException(e.getMessage()); } } else { try { preprocessor = (CorrelationDimensionPreprocessor) DEFAULT_PREPROCESSOR_CLASS.newInstance(); } catch (InstantiationException e) { throw new IllegalArgumentException(e.getMessage()); } catch (IllegalAccessException e) { throw new IllegalArgumentException(e.getMessage()); } } return preprocessor.setParameters(remainingParameters); } /** * Computes the correlation distance between the two specified vectors. * * @param rv1 first RealVector * @param rv2 second RealVector * @return the correlation distance between the two specified vectors */ private CorrelationDistance correlationDistance(RealVector rv1, RealVector rv2) { // TODO nur in eine Richtung? int dim = rv1.getDimensionality(); // pca of rv1 CorrelationPCA pca1 = (CorrelationPCA) db.getAssociation(ASSOCIATION_ID_PCA, rv1.getID()); Matrix v1 = pca1.getEigenvectors(); Matrix v1_strong = pca1.strongEigenVectors(); Matrix e1_czech = pca1.getSelectionMatrixOfStrongEigenvectors().copy(); int lambda1 = pca1.getCorrelationDimension(); // int lambda1 = 0; // pca of rv2 CorrelationPCA pca2 = (CorrelationPCA) db.getAssociation(ASSOCIATION_ID_PCA, rv2.getID()); Matrix v2 = pca2.getEigenvectors(); Matrix v2_strong = pca2.strongEigenVectors(); Matrix e2_czech = pca2.getSelectionMatrixOfStrongEigenvectors(); int lambda2 = pca2.getCorrelationDimension(); // int lambda2 = 0; // for all strong eigenvectors of rv2 Matrix m1_czech = v1.times(e1_czech).times(v1.transpose()); for (int i = 0; i < v2_strong.getColumnDimension(); i++) { Matrix v2_i = v2_strong.getColumn(i); // check, if distance of v2_i to the space of rv1 > delta // (i.e., if v2_i spans up a new dimension) double dist = Math.sqrt(v2_i.transpose().times(v2_i).get(0, 0) - v2_i.transpose().times(m1_czech).times(v2_i).get(0, 0)); // if so, insert v2_i into v1 and adjust v1 // and compute m1_czech new, increase lambda1 if (lambda1 < dim && dist > delta) { adjust(v1, e1_czech, v2_i, lambda1++); m1_czech = v1.times(e1_czech).times(v1.transpose()); } } // for all strong eigenvectors of rv1 Matrix m2_czech = v2.times(e2_czech).times(v2.transpose()); for (int i = 0; i < v1_strong.getColumnDimension(); i++) { Matrix v1_i = v1_strong.getColumn(i); // check, if distance of v1_i to the space of rv2 > delta // (i.e., if v1_i spans up a new dimension) double dist = Math.sqrt(v1_i.transpose().times(v1_i).get(0, 0) - v1_i.transpose().times(m2_czech).times(v1_i).get(0, 0)); // if so, insert v1_i into v2 and adjust v2 // and compute m2_czech new , increase lambda2 if (lambda2 < dim && dist > delta) { adjust(v2, e2_czech, v1_i, lambda2++); m2_czech = v2.times(e2_czech).times(v2.transpose()); } } int correlationDistance = Math.max(lambda1, lambda2); // TODO // Matrix m_1_czech = v1.times(e1_czech).times(v1.transpose()); // double dist_1 = normalizedDistance(rv1, rv2, m1_czech); // Matrix m_2_czech = v2.times(e2_czech).times(v2.transpose()); // double dist_2 = normalizedDistance(rv1, rv2, m2_czech); // if (dist_1 > delta || dist_2 > delta) { // correlationDistance++; // } double euclideanDistance = euclideanDistance(rv1, rv2); return new CorrelationDistance(correlationDistance, euclideanDistance); } /** * Inserts the specified vector into the given orthonormal matrix <code>v</code> at * column <code>corrDim</code>. After insertion the matrix <code>v</code> * is orthonormalized and column <code>corrDim</code> of matrix * <code>e_czech</code> is set to the <code>corrDim</code>-th unit vector.. * * @param v the orthonormal matrix of the eigenvectors * @param e_czech the selection matrix of the strong eigenvectors * @param vector the vector to be inserted * @param corrDim the column at which the vector should be inserted */ private void adjust(Matrix v, Matrix e_czech, Matrix vector, int corrDim) { int dim = v.getRowDimension(); // set e_czech[corrDim][corrDim] := 1 e_czech.set(corrDim, corrDim, 1); // normalize v Matrix v_i = vector.copy(); Matrix sum = new Matrix(dim, 1); for (int k = 0; k < corrDim; k++) { Matrix v_k = v.getColumn(k); sum = sum.plus(v_k.times(v_i.scalarProduct(0, v_k, 0))); } v_i = v_i.minus(sum); v_i = v_i.times(1.0 / v_i.euclideanNorm(0)); v.setColumn(corrDim, v_i); } /** * * @return the Euklidean distance between the given two vectors */ /** * Computes the euklidean distance between the given two vectors. * * @param rv1 first RealVector * @param rv2 second RealVector * @return the euklidean distance between the given two vectors */ private double euclideanDistance(RealVector rv1, RealVector rv2) { if (rv1.getDimensionality() != rv2.getDimensionality()) { throw new IllegalArgumentException("Different dimensionality of RealVectors\n first argument: " + rv1.toString() + "\n second argument: " + rv2.toString()); } double sqrDist = 0; for (int i = 1; i <= rv1.getDimensionality(); i++) { double manhattanI = rv1.getValue(i) - rv2.getValue(i); sqrDist += manhattanI * manhattanI; } return Math.sqrt(sqrDist); } }
src/de/lmu/ifi/dbs/distance/CorrelationDistanceFunction.java
package de.lmu.ifi.dbs.distance; import de.lmu.ifi.dbs.data.RealVector; import de.lmu.ifi.dbs.database.Database; import de.lmu.ifi.dbs.linearalgebra.Matrix; import de.lmu.ifi.dbs.pca.CorrelationPCA; import de.lmu.ifi.dbs.preprocessing.CorrelationDimensionPreprocessor; import de.lmu.ifi.dbs.preprocessing.KnnQueryBasedCorrelationDimensionPreprocessor; import de.lmu.ifi.dbs.utilities.optionhandling.NoParameterValueException; import de.lmu.ifi.dbs.utilities.optionhandling.OptionHandler; import de.lmu.ifi.dbs.utilities.optionhandling.UnusedParameterException; import java.util.Hashtable; import java.util.Map; import java.util.regex.Pattern; /** * Abstract class that provides the Correlation distance for real valued vectors. * All subclasses must implement a method to process the preprocessing step * in terms of doing the PCA for each object of the database. * * @author Elke Achtert (<a href="mailto:[email protected]">[email protected]</a>) */ public class CorrelationDistanceFunction extends RealVectorDistanceFunction { /** * The association id to associate a pca to an object. */ public static final String ASSOCIATION_ID_PCA = CorrelationDimensionPreprocessor.ASSOCIATION_ID_PCA; /** * Indicates a separator. */ public static final Pattern SEPARATOR = Pattern.compile("x"); /** * The default value for delta. */ public static final double DEFAULT_DELTA = 0.25; /** * Option string for parameter delta. */ public static final String DELTA_P = "delta"; /** * Description for parameter delta. */ public static final String DELTA_D = "<double>a double specifying the threshold of a " + "distance between a vector q and a given space " + "that indicates that q adds a new dimension " + "to the space (default is delta = " + DEFAULT_DELTA + ")"; /** * The default preprocessor class name. */ public static final Class DEFAULT_PREPROCESSOR_CLASS = KnnQueryBasedCorrelationDimensionPreprocessor.class; /** * Parameter for preprocessor. */ public static final String PREPROCESSOR_CLASS_P = "preprocessor"; /** * Description for parameter preprocessor. */ public static final String PREPROCESSOR_CLASS_D = "<classname>the preprocessor to determine the correlation dimensions " + "of the objects - must implement " + CorrelationDimensionPreprocessor.class.getName() + ". " + "(Default: " + DEFAULT_PREPROCESSOR_CLASS.getName() + ")."; /** * OptionHandler for handling options. */ protected OptionHandler optionHandler; /** * The database that holds the associations for the MetricalObject * for which the distances should be computed. */ protected Database db; /** * The threshold of a distance between a vector q and a given space * that indicates that q adds a new dimension to the space. */ private double delta; /** * The preprocessor to determine the correlation dimensions of the objects. */ private CorrelationDimensionPreprocessor preprocessor; /** * Provides a CorrelationDistanceFunction with a pattern defined to accept * Strings that define an Integer followed by a separator followed by a Double. */ public CorrelationDistanceFunction() { super(Pattern.compile("\\d+" + SEPARATOR.pattern() + "\\d+(\\.\\d+)?([eE][-]?\\d+)?")); Map<String, String> parameterToDescription = new Hashtable<String, String>(); parameterToDescription.put(DELTA_P + OptionHandler.EXPECTS_VALUE, DELTA_D); parameterToDescription.put(PREPROCESSOR_CLASS_P + OptionHandler.EXPECTS_VALUE, PREPROCESSOR_CLASS_D); optionHandler = new OptionHandler(parameterToDescription, ""); } /** * Provides the Correlation distance between the given two vectors. * * @return the Correlation distance between the given two vectors as an * instance of {@link CorrelationDistance CorrelationDistance}. * @see RealVectorDistanceFunction#distance(de.lmu.ifi.dbs.data.RealVector, * de.lmu.ifi.dbs.data.RealVector) */ public Distance distance(RealVector rv1, RealVector rv2) { return correlationDistance(rv1, rv2); } /** * Provides a distance suitable to this DistanceFunction * based on the given pattern. * * @param pattern A pattern defining a distance suitable to this DistanceFunction * @return a distance suitable to this DistanceFunction * based on the given pattern * @throws IllegalArgumentException if the given pattern is not compatible * with the requirements of this DistanceFunction */ public Distance valueOf(String pattern) throws IllegalArgumentException { if (matches(pattern)) { String[] values = SEPARATOR.split(pattern); return new CorrelationDistance(Integer.parseInt(values[0]), Double.parseDouble(values[1])); } else { throw new IllegalArgumentException("Given pattern \"" + pattern + "\" does not match required pattern \"" + requiredInputPattern() + "\""); } } /** * Provides an infinite distance. * * @return an infinite distance */ public Distance infiniteDistance() { return new CorrelationDistance(Integer.MAX_VALUE, Double.POSITIVE_INFINITY); } /** * Provides a null distance. * * @return a null distance */ public Distance nullDistance() { return new CorrelationDistance(0, 0); } /** * Provides an undefined distance. * * @return an undefined distance */ public Distance undefinedDistance() { return new CorrelationDistance(-1, Double.NaN); } /** * Returns a description of the class and the required parameters. * * @return String a description of the class and the required parameters */ public String description() { return "Correlation distance for RealVectors. No parameters required. " + "Pattern for defining a range: \"" + requiredInputPattern() + "\"."; } /** * Computes the necessary PCA associations for * each object of the database. * Afterwards the database is set to get later on * the PCA associations needed for distance computing. * * @param db the database to be set */ public void setDatabase(Database db) { this.db = db; preprocessor.run(db); } /** * Sets the values for the parameters delta and preprocessor if specified. * If the parameters are not specified default values are set. * * @see de.lmu.ifi.dbs.utilities.optionhandling.Parameterizable#setParameters(String[]) */ public String[] setParameters(String[] args) throws IllegalArgumentException { String[] remainingParameters = optionHandler.grabOptions(args); if (optionHandler.isSet(DELTA_P)) { try { delta = Double.parseDouble(optionHandler.getOptionValue(DELTA_P)); if (delta < 0) throw new IllegalArgumentException("CorrelationDistanceFunction: delta has to be greater than zero!"); } catch (UnusedParameterException e) { throw new IllegalArgumentException(e.getMessage()); } catch (NoParameterValueException e) { throw new IllegalArgumentException(e.getMessage()); } } else { delta = DEFAULT_DELTA; } if (optionHandler.isSet(PREPROCESSOR_CLASS_P)) { try { preprocessor = (CorrelationDimensionPreprocessor) Class.forName(optionHandler.getOptionValue(PREPROCESSOR_CLASS_P)).newInstance(); } catch (UnusedParameterException e) { throw new IllegalArgumentException(e.getMessage()); } catch (NoParameterValueException e) { throw new IllegalArgumentException(e.getMessage()); } catch (IllegalAccessException e) { throw new IllegalArgumentException(e.getMessage()); } catch (ClassNotFoundException e) { throw new IllegalArgumentException(e.getMessage()); } catch (InstantiationException e) { throw new IllegalArgumentException(e.getMessage()); } } else { try { preprocessor = (CorrelationDimensionPreprocessor) DEFAULT_PREPROCESSOR_CLASS.newInstance(); } catch (InstantiationException e) { throw new IllegalArgumentException(e.getMessage()); } catch (IllegalAccessException e) { throw new IllegalArgumentException(e.getMessage()); } } return preprocessor.setParameters(remainingParameters); } /** * Computes the correlation distance between the two specified vectors. * * @param rv1 first RealVector * @param rv2 second RealVector * @return the correlation distance between the two specified vectors */ private CorrelationDistance correlationDistance(RealVector rv1, RealVector rv2) { // TODO nur in eine Richtung? int dim = rv1.getDimensionality(); // pca of rv1 CorrelationPCA pca1 = (CorrelationPCA) db.getAssociation(ASSOCIATION_ID_PCA, rv1.getID()); Matrix v1 = pca1.getEigenvectors(); Matrix v1_strong = pca1.strongEigenVectors(); Matrix e1_czech = pca1.getSelectionMatrixOfStrongEigenvectors().copy(); int lambda1 = pca1.getCorrelationDimension(); // int lambda1 = 0; // pca of rv2 CorrelationPCA pca2 = (CorrelationPCA) db.getAssociation(ASSOCIATION_ID_PCA, rv2.getID()); Matrix v2 = pca2.getEigenvectors(); Matrix v2_strong = pca2.strongEigenVectors(); Matrix e2_czech = pca2.getSelectionMatrixOfStrongEigenvectors(); int lambda2 = pca2.getCorrelationDimension(); // int lambda2 = 0; // for all strong eigenvectors of rv2 Matrix m1_czech = v1.times(e1_czech).times(v1.transpose()); for (int i = 0; i < v2_strong.getColumnDimension(); i++) { Matrix v2_i = v2_strong.getColumn(i); // check, if distance of v2_i to the space of rv1 > delta // (i.e., if v2_i spans up a new dimension) double dist = Math.sqrt(v2_i.transpose().times(v2_i).get(0, 0) - v2_i.transpose().times(m1_czech).times(v2_i).get(0, 0)); // if so, insert v2_i into v1 and adjust v1 // and compute m1_czech new, increase lambda1 if (lambda1 < dim && dist > delta) { adjust(v1, e1_czech, v2_i, lambda1++); m1_czech = v1.times(e1_czech).times(v1.transpose()); } } // for all strong eigenvectors of rv1 Matrix m2_czech = v2.times(e2_czech).times(v2.transpose()); for (int i = 0; i < v1_strong.getColumnDimension(); i++) { Matrix v1_i = v1_strong.getColumn(i); // check, if distance of v1_i to the space of rv2 > delta // (i.e., if v1_i spans up a new dimension) double dist = Math.sqrt(v1_i.transpose().times(v1_i).get(0, 0) - v1_i.transpose().times(m2_czech).times(v1_i).get(0, 0)); // if so, insert v1_i into v2 and adjust v2 // and compute m2_czech new , increase lambda2 if (lambda2 < dim && dist > delta) { adjust(v2, e2_czech, v1_i, lambda2++); m2_czech = v2.times(e2_czech).times(v2.transpose()); } } int correlationDistance = Math.max(lambda1, lambda2); // TODO // Matrix m_1_czech = v1.times(e1_czech).times(v1.transpose()); // double dist_1 = normalizedDistance(rv1, rv2, m1_czech); // Matrix m_2_czech = v2.times(e2_czech).times(v2.transpose()); // double dist_2 = normalizedDistance(rv1, rv2, m2_czech); // if (dist_1 > delta || dist_2 > delta) { // correlationDistance++; // } double euclideanDistance = euclideanDistance(rv1, rv2); return new CorrelationDistance(correlationDistance, euclideanDistance); } /** * Inserts the specified vector into the given orthonormal matrix <code>v</code> at * column <code>corrDim</code>. After insertion the matrix <code>v</code> * is orthonormalized and column <code>corrDim</code> of matrix * <code>e_czech</code> is set to the <code>corrDim</code>-th unit vector.. * * @param v the orthonormal matrix of the eigenvectors * @param e_czech the selection matrix of the strong eigenvectors * @param vector the vector to be inserted * @param corrDim the column at which the vector should be inserted */ private void adjust(Matrix v, Matrix e_czech, Matrix vector, int corrDim) { int dim = v.getRowDimension(); // set e_czech[corrDim][corrDim] := 1 e_czech.set(corrDim, corrDim, 1); // normalize v Matrix v_i = vector.copy(); Matrix sum = new Matrix(dim, 1); for (int k = 0; k < corrDim; k++) { Matrix v_k = v.getColumn(k); sum = sum.plus(v_k.times(v_i.scalarProduct(0, v_k, 0))); } v_i = v_i.minus(sum); v_i = v_i.times(1.0 / v_i.euclideanNorm(0)); v.setColumn(corrDim, v_i); } /** * * @return the Euklidean distance between the given two vectors */ /** * Computes the euklidean distance between the given two vectors. * * @param rv1 first RealVector * @param rv2 second RealVector * @return the euklidean distance between the given two vectors */ private double euclideanDistance(RealVector rv1, RealVector rv2) { if (rv1.getDimensionality() != rv2.getDimensionality()) { throw new IllegalArgumentException("Different dimensionality of RealVectors\n first argument: " + rv1.toString() + "\n second argument: " + rv2.toString()); } double sqrDist = 0; for (int i = 1; i <= rv1.getDimensionality(); i++) { double manhattanI = rv1.getValue(i) - rv2.getValue(i); sqrDist += manhattanI * manhattanI; } return Math.sqrt(sqrDist); } }
description
src/de/lmu/ifi/dbs/distance/CorrelationDistanceFunction.java
description
Java
lgpl-2.1
0d065235b72cad64f211139ce37883a2d80f869f
0
johnscancella/spotbugs,spotbugs/spotbugs,spotbugs/spotbugs,spotbugs/spotbugs,KengoTODA/spotbugs,johnscancella/spotbugs,KengoTODA/spotbugs,spotbugs/spotbugs,sewe/spotbugs,sewe/spotbugs,sewe/spotbugs,sewe/spotbugs,johnscancella/spotbugs,johnscancella/spotbugs,KengoTODA/spotbugs,KengoTODA/spotbugs,spotbugs/spotbugs
/* * FindBugs - Find bugs in Java programs * Copyright (C) 2003-2008 University of Maryland * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package edu.umd.cs.findbugs; import java.awt.GraphicsEnvironment; import java.io.BufferedOutputStream; import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.Reader; import java.io.StringWriter; import java.io.UnsupportedEncodingException; import java.io.Writer; import java.math.BigInteger; import java.net.URL; import java.net.URLConnection; import java.security.MessageDigest; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedSet; import java.util.TreeMap; import java.util.TreeSet; import java.util.logging.Level; import java.util.logging.Logger; import java.util.zip.GZIPInputStream; import java.util.zip.GZIPOutputStream; import javax.annotation.CheckForNull; import javax.annotation.Nonnull; import javax.annotation.WillClose; import javax.annotation.WillNotClose; import javax.xml.transform.TransformerException; import edu.umd.cs.findbugs.ba.AnalysisContext; import edu.umd.cs.findbugs.ba.MissingClassException; import edu.umd.cs.findbugs.charsets.UTF8; import edu.umd.cs.findbugs.cloud.Cloud; import edu.umd.cs.findbugs.cloud.CloudFactory; import edu.umd.cs.findbugs.log.Profiler; import edu.umd.cs.findbugs.model.ClassFeatureSet; import edu.umd.cs.findbugs.util.Util; import edu.umd.cs.findbugs.xml.Dom4JXMLOutput; import edu.umd.cs.findbugs.xml.OutputStreamXMLOutput; import edu.umd.cs.findbugs.xml.XMLAttributeList; import edu.umd.cs.findbugs.xml.XMLOutput; import edu.umd.cs.findbugs.xml.XMLOutputUtil; import org.dom4j.Document; import org.dom4j.DocumentException; import org.dom4j.DocumentFactory; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import org.xml.sax.SAXParseException; import org.xml.sax.XMLReader; import org.xml.sax.helpers.XMLReaderFactory; /** * An implementation of {@link BugCollection} that keeps the BugInstances sorted * by class (using the native comparison ordering of BugInstance's compareTo() * method as a tie-breaker). * * @see BugInstance * @author David Hovemeyer */ public class SortedBugCollection implements BugCollection { private static final Logger LOGGER = Logger.getLogger(SortedBugCollection.class.getName()); long analysisTimestamp = System.currentTimeMillis(); String analysisVersion = Version.RELEASE; private boolean withMessages = false; private boolean minimalXML = false; private boolean applySuppressions = false; private Cloud cloud; boolean shouldNotUsePlugin; long timeStartedLoading, timeFinishedLoading; String dataSource = ""; private Map<String, String> xmlCloudDetails = Collections.emptyMap(); /** * @return Returns the timeStartedLoading. */ public long getTimeStartedLoading() { return timeStartedLoading; } /** * @return Returns the timeFinishedLoading. */ public long getTimeFinishedLoading() { return timeFinishedLoading; } public String getDataSource() { return dataSource; } final Project project; public Project getProject() { return project; } public @CheckForNull Cloud getCloudLazily() { if (cloud != null && bugsPopulated) cloud.bugsPopulated(); return cloud; } public @Nonnull Cloud getCloud() { if (shouldNotUsePlugin) { return CloudFactory.getPlainCloud(this); } if (cloud == null) { IGuiCallback callback = getProject().getGuiCallback(); try { cloud = CloudFactory.createCloudWithoutInitializing(this); callback.registerCloud(getProject(), this, cloud); CloudFactory.initializeCloud(this, cloud); } catch (Exception e) { LOGGER.log(Level.SEVERE, "Could not load cloud plugin", e); callback.showMessageDialog("Unable to connect to cloud: " + e.getClass().getSimpleName() + ": " + e.getMessage()); if (CloudFactory.FAIL_ON_CLOUD_ERROR) throw new IllegalStateException("Could not load FindBugs Cloud plugin - to avoid this message, " + "set -D" + CloudFactory.FAIL_ON_CLOUD_ERROR_PROP + "=false", e); cloud = CloudFactory.getPlainCloud(this); } } if (bugsPopulated) cloud.bugsPopulated(); return cloud; } public boolean isApplySuppressions() { return applySuppressions; } public void setApplySuppressions(boolean applySuppressions) { this.applySuppressions = applySuppressions; } private static final boolean REPORT_SUMMARY_HTML = SystemProperties.getBoolean("findbugs.report.SummaryHTML"); public long getAnalysisTimestamp() { return analysisTimestamp; } public void setAnalysisTimestamp(long timestamp) { analysisTimestamp = timestamp; } /** * Add a Collection of BugInstances to this BugCollection object. This just * calls add(BugInstance) for each instance in the input collection. * * @param collection * the Collection of BugInstances to add */ public void addAll(Collection<BugInstance> collection) { for (BugInstance bug : collection) { add(bug); } } /** * Add a Collection of BugInstances to this BugCollection object. * * @param collection * the Collection of BugInstances to add * @param updateActiveTime * true if active time of added BugInstances should be updated to * match collection: false if not */ public void addAll(Collection<BugInstance> collection, boolean updateActiveTime) { for (BugInstance warning : collection) { add(warning, updateActiveTime); } } /** * Add a BugInstance to this BugCollection. This just calls add(bugInstance, * true). * * @param bugInstance * the BugInstance * @return true if the BugInstance was added, or false if a matching * BugInstance was already in the BugCollection */ public boolean add(BugInstance bugInstance) { return add(bugInstance, bugInstance.getFirstVersion() == 0L && bugInstance.getLastVersion() == 0L); } /** * Add an analysis error. * * @param message * the error message */ public void addError(String message) { addError(message, null); } /** * Get the current AppVersion. */ public AppVersion getCurrentAppVersion() { return new AppVersion(getSequenceNumber()).setReleaseName(getReleaseName()).setTimestamp(getTimestamp()) .setNumClasses(getProjectStats().getNumClasses()).setCodeSize(getProjectStats().getCodeSize()); } /** * Read XML data from given file into this object, populating given Project * as a side effect. * * @param fileName * name of the file to read */ public void readXML(String fileName) throws IOException, DocumentException { readXML(new File(fileName)); } /** * Read XML data from given file into this object, populating given Project * as a side effect. * * @param file * the file */ public void readXML(File file) throws IOException, DocumentException { project.setCurrentWorkingDirectory(file.getParentFile()); dataSource = file.getAbsolutePath(); InputStream in = progessMonitoredInputStream(file, "Loading analysis"); try { readXML(in, file); } catch (IOException e) { throw newIOException(file, e); } catch (DocumentException e) { throw new DocumentException("Failing reading " + file, e); } } /** * @param file * @param e * @return */ private IOException newIOException(Object file, IOException e) { IOException result = new IOException("Failing reading " + file); result.initCause(e); return result; } public void readXML(URL u) throws IOException, DocumentException { InputStream in = progessMonitoredInputStream(u.openConnection(), "Loading analysis"); dataSource = u.toString(); try { readXML(in); } catch (IOException e) { throw newIOException(u, e); } catch (DocumentException e) { throw new DocumentException("Failing reading " + u, e); } } /** * Read XML data from given input stream into this object, populating the * Project as a side effect. An attempt will be made to close the input * stream (even if an exception is thrown). * * @param in * the InputStream */ public void readXML(@WillClose InputStream in, File base) throws IOException, DocumentException { try { doReadXML(in, base); } finally { in.close(); } } public void readXML(@WillClose InputStream in) throws IOException, DocumentException { assert project != null; assert in != null; doReadXML(in, null); } public void readXML(@WillClose Reader reader) throws IOException, DocumentException { assert project != null; assert reader != null; doReadXML(reader, null); } private void doReadXML(@WillClose InputStream in, @CheckForNull File base) throws IOException, DocumentException { try { checkInputStream(in); Reader reader = Util.getReader(in); doReadXML(reader, base); } catch (RuntimeException e) { in.close(); throw e; }catch (IOException e) { in.close(); throw e; } } private void doReadXML(@WillClose Reader reader, @CheckForNull File base) throws IOException, DocumentException { timeStartedLoading = System.currentTimeMillis(); SAXBugCollectionHandler handler = new SAXBugCollectionHandler(this, base); Profiler profiler = getProjectStats().getProfiler(); profiler.start(handler.getClass()); try { XMLReader xr; try { xr = XMLReaderFactory.createXMLReader(); } catch (SAXException e) { AnalysisContext.logError("Couldn't create XMLReaderFactory", e); throw new DocumentException("Sax error ", e); } xr.setContentHandler(handler); xr.setErrorHandler(handler); xr.parse(new InputSource(reader)); } catch (SAXParseException e) { if (base != null) throw new DocumentException("Parse error at line " + e.getLineNumber() + " : " + e.getColumnNumber() + " of " + base, e); throw new DocumentException("Parse error at line " + e.getLineNumber() + " : " + e.getColumnNumber(), e); } catch (SAXException e) { // FIXME: throw SAXException from method? if (base != null) throw new DocumentException("Sax error while parsing " + base, e); throw new DocumentException("Sax error ", e); } finally { Util.closeSilently(reader); profiler.end(handler.getClass()); } timeFinishedLoading = System.currentTimeMillis(); bugsPopulated(); // Presumably, project is now up-to-date project.setModified(false); } public void writeXML(OutputStream out) throws IOException { writeXML(UTF8.writer(out)); } /** * Write this BugCollection to a file as XML. * * @param fileName * the file to write to */ public void writeXML(String fileName) throws IOException { OutputStream out = new FileOutputStream(fileName); if (fileName.endsWith(".gz")) out = new GZIPOutputStream(out); writeXML(out); } /** * Write this BugCollection to a file as XML. * * @param file * the file to write to */ public void writeXML(File file) throws IOException { OutputStream out = new FileOutputStream(file); if (file.getName().endsWith(".gz")) out = new GZIPOutputStream(out); writeXML(out); } /** * Convert the BugCollection into a dom4j Document object. * * @return the Document representing the BugCollection as a dom4j tree */ public Document toDocument() { // if (project == null) throw new NullPointerException("No project"); assert project != null; DocumentFactory docFactory = new DocumentFactory(); Document document = docFactory.createDocument(); Dom4JXMLOutput treeBuilder = new Dom4JXMLOutput(document); try { writeXML(treeBuilder); } catch (IOException e) { // Can't happen } return document; } /** * Write the BugCollection to given output stream as XML. The output stream * will be closed, even if an exception is thrown. * * @param out * the OutputStream to write to */ public void writeXML(@WillClose Writer out) throws IOException { assert project != null; bugsPopulated(); XMLOutput xmlOutput; // if (project == null) throw new NullPointerException("No project"); if (withMessages) { Cloud cloud = getCloud(); cloud.bugsPopulated(); cloud.initiateCommunication(); cloud.waitUntilIssueDataDownloaded(); String token = SystemProperties.getProperty("findbugs.cloud.token"); if (token != null && token.trim().length() > 0) { LOGGER.info("Cloud token specified - uploading new issues, if necessary..."); cloud.waitUntilNewIssuesUploaded(); } xmlOutput = new OutputStreamXMLOutput(out, "http://findbugs.sourceforge.net/xsl/default.xsl"); } else { xmlOutput = new OutputStreamXMLOutput(out); } writeXML(xmlOutput); } public void writePrologue(XMLOutput xmlOutput) throws IOException { xmlOutput.beginDocument(); xmlOutput.openTag( ROOT_ELEMENT_NAME, new XMLAttributeList().addAttribute("version", analysisVersion) .addAttribute("sequence", String.valueOf(getSequenceNumber())) .addAttribute("timestamp", String.valueOf(getTimestamp())) .addAttribute("analysisTimestamp", String.valueOf(getAnalysisTimestamp())) .addAttribute("release", getReleaseName())); project.writeXML(xmlOutput, null, this); } // private String getQuickInstanceHash(BugInstance bugInstance) { // String hash = bugInstance.getInstanceHash(); // if (hash != null) return hash; // MessageDigest digest = null; // try { digest = MessageDigest.getInstance("MD5"); // } catch (Exception e2) { // // OK, we won't digest // assert true; // } // hash = bugInstance.getInstanceKey(); // if (digest != null) { // byte [] data = digest.digest(hash.getBytes()); // String tmp = new BigInteger(1,data).toString(16); // if (false) System.out.println(hash + " -> " + tmp); // hash = tmp; // } // bugInstance.setInstanceHash(hash); // return hash; // } public void computeBugHashes() { if (preciseHashOccurrenceNumbersAvailable) return; invalidateHashes(); MessageDigest digest = Util.getMD5Digest(); HashMap<String, Integer> seen = new HashMap<String, Integer>(); for (BugInstance bugInstance : getCollection()) { String hash = bugInstance.getInstanceHash(); if (hash == null) { hash = bugInstance.getInstanceKey(); try { byte[] data = digest.digest(hash.getBytes("UTF-8")); hash = new BigInteger(1, data).toString(16); } catch (UnsupportedEncodingException e) { throw new IllegalStateException(e); } bugInstance.setInstanceHash(hash); } Integer count = seen.get(hash); if (count == null) { bugInstance.setInstanceOccurrenceNum(0); seen.put(hash, 0); } else { bugInstance.setInstanceOccurrenceNum(count + 1); seen.put(hash, count + 1); } } for (BugInstance bugInstance : getCollection()) bugInstance.setInstanceOccurrenceMax(seen.get(bugInstance.getInstanceHash())); preciseHashOccurrenceNumbersAvailable = true; } /** * Write the BugCollection to an XMLOutput object. The finish() method of * the XMLOutput object is guaranteed to be called. * * <p> * To write the SummaryHTML element, set property * findbugs.report.SummaryHTML to "true". * </p> * * @param xmlOutput * the XMLOutput object */ public void writeXML(@WillClose XMLOutput xmlOutput) throws IOException { assert project != null; try { writePrologue(xmlOutput); if (withMessages) { computeBugHashes(); getProjectStats().computeFileStats(this); String commonBase = null; for (String s : project.getSourceDirList()) { if (commonBase == null) commonBase = s; else commonBase = commonBase.substring(0, commonPrefix(commonBase, s)); } if (commonBase != null && commonBase.length() > 0) { if (commonBase.indexOf("/./") > 0) commonBase = commonBase.substring(0, commonBase.indexOf("/.")); File base = new File(commonBase); if (base.exists() && base.isDirectory() && base.canRead()) SourceLineAnnotation.generateRelativeSource(base, project); } } if (earlyStats && !minimalXML) getProjectStats().writeXML(xmlOutput, withMessages); // Write BugInstances for (BugInstance bugInstance : getCollection()) if (!applySuppressions || !project.getSuppressionFilter().match(bugInstance)) bugInstance.writeXML(xmlOutput, this, withMessages); writeEpilogue(xmlOutput); } finally { xmlOutput.finish(); SourceLineAnnotation.clearGenerateRelativeSource(); } } int commonPrefix(String s1, String s2) { int pos = 0; while (pos < s1.length() && pos < s2.length() && s1.charAt(pos) == s2.charAt(pos)) pos++; return pos; } boolean earlyStats = SystemProperties.getBoolean("findbugs.report.summaryFirst"); public void writeEpilogue(XMLOutput xmlOutput) throws IOException { if (withMessages) { writeBugCategories(xmlOutput); writeBugPatterns(xmlOutput); writeBugCodes(xmlOutput); } // Errors, missing classes if (!minimalXML) emitErrors(xmlOutput); if (!earlyStats && !minimalXML) { // Statistics getProjectStats().writeXML(xmlOutput, withMessages); } // // Class and method hashes // xmlOutput.openTag(CLASS_HASHES_ELEMENT_NAME); // for (Iterator<ClassHash> i = classHashIterator(); i.hasNext();) { // ClassHash classHash = i.next(); // classHash.writeXML(xmlOutput); // } // xmlOutput.closeTag(CLASS_HASHES_ELEMENT_NAME); // Class features xmlOutput.openTag("ClassFeatures"); for (Iterator<ClassFeatureSet> i = classFeatureSetIterator(); i.hasNext();) { ClassFeatureSet classFeatureSet = i.next(); classFeatureSet.writeXML(xmlOutput); } xmlOutput.closeTag("ClassFeatures"); // AppVersions xmlOutput.openTag(HISTORY_ELEMENT_NAME); for (Iterator<AppVersion> i = appVersionIterator(); i.hasNext();) { AppVersion appVersion = i.next(); appVersion.writeXML(xmlOutput); } xmlOutput.closeTag(HISTORY_ELEMENT_NAME); // Summary HTML if (REPORT_SUMMARY_HTML) { String html = getSummaryHTML(); if (html != null && !html.equals("")) { xmlOutput.openTag(SUMMARY_HTML_ELEMENT_NAME); xmlOutput.writeCDATA(html); xmlOutput.closeTag(SUMMARY_HTML_ELEMENT_NAME); } } xmlOutput.closeTag(ROOT_ELEMENT_NAME); } private void writeBugPatterns(XMLOutput xmlOutput) throws IOException { // Find bug types reported Set<String> bugTypeSet = new HashSet<String>(); for (Iterator<BugInstance> i = iterator(); i.hasNext();) { BugInstance bugInstance = i.next(); BugPattern bugPattern = bugInstance.getBugPattern(); if (bugPattern != null) { bugTypeSet.add(bugPattern.getType()); } } // Emit element describing each reported bug pattern for (String bugType : bugTypeSet) { BugPattern bugPattern = DetectorFactoryCollection.instance().lookupBugPattern(bugType); if (bugPattern == null) continue; XMLAttributeList attributeList = new XMLAttributeList(); attributeList.addAttribute("type", bugType); attributeList.addAttribute("abbrev", bugPattern.getAbbrev()); attributeList.addAttribute("category", bugPattern.getCategory()); if (bugPattern.getCWEid() != 0) { attributeList.addAttribute("cweid", Integer.toString(bugPattern.getCWEid())); } xmlOutput.openTag("BugPattern", attributeList); xmlOutput.openTag("ShortDescription"); xmlOutput.writeText(bugPattern.getShortDescription()); xmlOutput.closeTag("ShortDescription"); xmlOutput.openTag("Details"); xmlOutput.writeCDATA(bugPattern.getDetailText()); xmlOutput.closeTag("Details"); xmlOutput.closeTag("BugPattern"); } } private void writeBugCodes(XMLOutput xmlOutput) throws IOException { // Find bug codes reported Set<String> bugCodeSet = new HashSet<String>(); for (Iterator<BugInstance> i = iterator(); i.hasNext();) { BugInstance bugInstance = i.next(); String bugCode = bugInstance.getAbbrev(); if (bugCode != null) { bugCodeSet.add(bugCode); } } // Emit element describing each reported bug code for (String bugCodeAbbrev : bugCodeSet) { BugCode bugCode = DetectorFactoryCollection.instance().getBugCode(bugCodeAbbrev); String bugCodeDescription = bugCode.getDescription(); if (bugCodeDescription == null) continue; XMLAttributeList attributeList = new XMLAttributeList(); attributeList.addAttribute("abbrev", bugCodeAbbrev); if (bugCode.getCWEid() != 0) { attributeList.addAttribute("cweid", Integer.toString(bugCode.getCWEid())); } xmlOutput.openTag("BugCode", attributeList); xmlOutput.openTag("Description"); xmlOutput.writeText(bugCodeDescription); xmlOutput.closeTag("Description"); xmlOutput.closeTag("BugCode"); } } private void writeBugCategories(XMLOutput xmlOutput) throws IOException { // Find bug categories reported Set<String> bugCatSet = new HashSet<String>(); for (Iterator<BugInstance> i = iterator(); i.hasNext();) { BugInstance bugInstance = i.next(); BugPattern bugPattern = bugInstance.getBugPattern(); if (bugPattern != null) { bugCatSet.add(bugPattern.getCategory()); } } // Emit element describing each reported bug code for (String bugCat : bugCatSet) { String bugCatDescription = I18N.instance().getBugCategoryDescription(bugCat); if (bugCatDescription == null) continue; XMLAttributeList attributeList = new XMLAttributeList(); attributeList.addAttribute("category", bugCat); xmlOutput.openTag("BugCategory", attributeList); xmlOutput.openTag("Description"); xmlOutput.writeText(bugCatDescription); xmlOutput.closeTag("Description"); xmlOutput.closeTag("BugCategory"); } } private void emitErrors(XMLOutput xmlOutput) throws IOException { // System.err.println("Writing errors to XML output"); XMLAttributeList attributeList = new XMLAttributeList(); attributeList.addAttribute("errors", Integer.toString(errorList.size())); attributeList.addAttribute("missingClasses", Integer.toString(missingClassSet.size())); xmlOutput.openTag(ERRORS_ELEMENT_NAME, attributeList); // Emit Error elements describing analysis errors for (AnalysisError error : getErrors()) { xmlOutput.openTag(ERROR_ELEMENT_NAME); xmlOutput.openTag(ERROR_MESSAGE_ELEMENT_NAME); xmlOutput.writeText(error.getMessage()); xmlOutput.closeTag(ERROR_MESSAGE_ELEMENT_NAME); if (error.getExceptionMessage() != null) { xmlOutput.openTag(ERROR_EXCEPTION_ELEMENT_NAME); xmlOutput.writeText(error.getExceptionMessage()); xmlOutput.closeTag(ERROR_EXCEPTION_ELEMENT_NAME); String stackTrace[] = error.getStackTrace(); if (stackTrace != null) { for (String aStackTrace : stackTrace) { xmlOutput.openTag(ERROR_STACK_TRACE_ELEMENT_NAME); xmlOutput.writeText(aStackTrace); xmlOutput.closeTag(ERROR_STACK_TRACE_ELEMENT_NAME); } } if (false && error.getNestedExceptionMessage() != null) { xmlOutput.openTag(ERROR_EXCEPTION_ELEMENT_NAME); xmlOutput.writeText(error.getNestedExceptionMessage()); xmlOutput.closeTag(ERROR_EXCEPTION_ELEMENT_NAME); stackTrace = error.getNestedStackTrace(); if (stackTrace != null) { for (String aStackTrace : stackTrace) { xmlOutput.openTag(ERROR_STACK_TRACE_ELEMENT_NAME); xmlOutput.writeText(aStackTrace); xmlOutput.closeTag(ERROR_STACK_TRACE_ELEMENT_NAME); } } } } xmlOutput.closeTag(ERROR_ELEMENT_NAME); } // Emit missing classes XMLOutputUtil.writeElementList(xmlOutput, MISSING_CLASS_ELEMENT_NAME, missingClassIterator()); xmlOutput.closeTag(ERRORS_ELEMENT_NAME); } private void checkInputStream(@WillNotClose InputStream in) throws IOException { if (!in.markSupported()) return; byte[] buf = new byte[200]; in.mark(buf.length); int numRead = 0; boolean isEOF = false; while (numRead < buf.length && !isEOF) { int n = in.read(buf, numRead, buf.length - numRead); if (n < 0) { isEOF = true; } else { numRead += n; } } in.reset(); BufferedReader reader = new BufferedReader(Util.getReader(new ByteArrayInputStream(buf))); try { String line; while ((line = reader.readLine()) != null) { if (line.startsWith("<BugCollection")) { return; } } } finally { reader.close(); } throw new IOException("XML does not contain saved bug data"); } /** * Clone all of the BugInstance objects in the source Collection and add * them to the destination Collection. * * @param dest * the destination Collection * @param source * the source Collection */ public static void cloneAll(Collection<BugInstance> dest, Collection<BugInstance> source) { for (BugInstance obj : source) { dest.add((BugInstance) obj.clone()); } } /** * @author pugh */ private static final class BoundedLinkedHashSet extends LinkedHashSet<AnalysisError> { @Override public boolean add(AnalysisError a) { if (this.size() > 1000) return false; return super.add(a); } } public static class BugInstanceComparator implements Comparator<BugInstance> { private BugInstanceComparator() { } public int compare(BugInstance lhs, BugInstance rhs) { ClassAnnotation lca = lhs.getPrimaryClass(); ClassAnnotation rca = rhs.getPrimaryClass(); if (lca == null || rca == null) throw new IllegalStateException("null class annotation: " + lca + "," + rca); int cmp = lca.getClassName().compareTo(rca.getClassName()); if (cmp != 0) return cmp; return lhs.compareTo(rhs); } public static final BugInstanceComparator instance = new BugInstanceComparator(); } public static class MultiversionBugInstanceComparator extends BugInstanceComparator { @Override public int compare(BugInstance lhs, BugInstance rhs) { int result = super.compare(lhs, rhs); if (result != 0) return result; long diff = lhs.getFirstVersion() - rhs.getFirstVersion(); if (diff == 0) diff = lhs.getLastVersion() - rhs.getLastVersion(); if (diff < 0) return -1; if (diff > 0) return 1; return 0; } public static final MultiversionBugInstanceComparator instance = new MultiversionBugInstanceComparator(); } private final Comparator<BugInstance> comparator; private final TreeSet<BugInstance> bugSet; private final LinkedHashSet<AnalysisError> errorList; private final TreeSet<String> missingClassSet; @CheckForNull private String summaryHTML; private final ProjectStats projectStats; // private Map<String, ClassHash> classHashMap; private final Map<String, ClassFeatureSet> classFeatureSetMap; private final List<AppVersion> appVersionList; private boolean preciseHashOccurrenceNumbersAvailable = false; /** * Sequence number of the most-recently analyzed version of the code. */ private long sequence; /** * Release name of the analyzed application. */ private String releaseName; /** * Current analysis timestamp. */ private long timestamp; public SortedBugCollection(Project project) { this(new ProjectStats(), MultiversionBugInstanceComparator.instance, project); } public SortedBugCollection(File f) throws IOException, DocumentException { this(); this.readXML(f); } /** * Constructor. Creates an empty object. */ public SortedBugCollection() { this(new ProjectStats()); } /** * Constructor. Creates an empty object. */ public SortedBugCollection(Comparator<BugInstance> comparator) { this(new ProjectStats(), comparator); } /** * Constructor. Creates an empty object given an existing ProjectStats. * * @param projectStats * the ProjectStats */ public SortedBugCollection(ProjectStats projectStats) { this(projectStats, MultiversionBugInstanceComparator.instance); } public SortedBugCollection(ProjectStats projectStats, Project project) { this(projectStats, MultiversionBugInstanceComparator.instance, project); } /** * Constructor. Creates an empty object given an existing ProjectStats. * * @param projectStats * the ProjectStats * @param comparator * to use for sorting bug instances */ public SortedBugCollection(ProjectStats projectStats, Comparator<BugInstance> comparator) { this(projectStats, comparator, new Project()); } public SortedBugCollection(ProjectStats projectStats, Comparator<BugInstance> comparator, Project project) { this.projectStats = projectStats; this.comparator = comparator; this.project = project; bugSet = new TreeSet<BugInstance>(comparator); errorList = new BoundedLinkedHashSet(); missingClassSet = new TreeSet<String>(); summaryHTML = null; classFeatureSetMap = new TreeMap<String, ClassFeatureSet>(); sequence = 0L; appVersionList = new LinkedList<AppVersion>(); releaseName = ""; timestamp = -1L; } public boolean add(BugInstance bugInstance, boolean updateActiveTime) { assert !bugsPopulated; if (bugsPopulated) AnalysisContext.logError("Bug collection marked as populated, but bugs added", new RuntimeException()); preciseHashOccurrenceNumbersAvailable = false; if (updateActiveTime) { bugInstance.setFirstVersion(sequence); } invalidateHashes(); return bugSet.add(bugInstance); } private void invalidateHashes() { preciseHashOccurrenceNumbersAvailable = false; } public boolean remove(BugInstance bugInstance) { invalidateHashes(); return bugSet.remove(bugInstance); } public Iterator<BugInstance> iterator() { return bugSet.iterator(); } public Collection<BugInstance> getCollection() { return Collections.unmodifiableCollection(bugSet); } public void addError(String message, Throwable exception) { if (exception instanceof MissingClassException) { MissingClassException e = (MissingClassException) exception; addMissingClass(AbstractBugReporter.getMissingClassName(e.getClassNotFoundException())); return; } if (exception instanceof ClassNotFoundException) { ClassNotFoundException e = (ClassNotFoundException) exception; addMissingClass(AbstractBugReporter.getMissingClassName(e)); return; } if (exception instanceof edu.umd.cs.findbugs.classfile.MissingClassException) { edu.umd.cs.findbugs.classfile.MissingClassException e = (edu.umd.cs.findbugs.classfile.MissingClassException) exception; addMissingClass(AbstractBugReporter.getMissingClassName(e.toClassNotFoundException())); return; } errorList.add(new AnalysisError(message, exception)); } public void addError(AnalysisError error) { errorList.add(error); } public void clearErrors() { errorList.clear(); } public void addMissingClass(String className) { if (className == null || className.length() == 0) return; if (className.startsWith("[")) { assert false : "Bad class name " + className; return; } if (className.endsWith(";")) addError("got signature rather than classname: " + className, new IllegalArgumentException()); else missingClassSet.add(className); } public Collection<? extends AnalysisError> getErrors() { return errorList; } public Iterator<String> missingClassIterator() { return missingClassSet.iterator(); } public boolean contains(BugInstance bugInstance) { return bugSet.contains(bugInstance); } public BugInstance getMatching(BugInstance bugInstance) { SortedSet<BugInstance> tailSet = bugSet.tailSet(bugInstance); if (tailSet.isEmpty()) return null; BugInstance first = tailSet.first(); return bugInstance.equals(first) ? first : null; } public String getSummaryHTML() throws IOException { if (summaryHTML == null) { try { StringWriter writer = new StringWriter(); ProjectStats stats = getProjectStats(); stats.transformSummaryToHTML(writer); summaryHTML = writer.toString(); } catch (final TransformerException e) { IOException ioe = new IOException("Couldn't generate summary HTML"); ioe.initCause(e); throw ioe; } } return summaryHTML; } public ProjectStats getProjectStats() { return projectStats; } /* * (non-Javadoc) * * @see * edu.umd.cs.findbugs.BugCollection#lookupFromUniqueId(java.lang.String) */ @Deprecated public BugInstance lookupFromUniqueId(String uniqueId) { for (BugInstance bug : bugSet) if (bug.getInstanceHash().equals(uniqueId)) return bug; return null; } public long getSequenceNumber() { return sequence; } public void setSequenceNumber(long sequence) { this.sequence = sequence; } public SortedBugCollection duplicate() { SortedBugCollection dup = createEmptyCollectionWithMetadata(); SortedBugCollection.cloneAll(dup.bugSet, this.bugSet); return dup; } /* * (non-Javadoc) * * @see * edu.umd.cs.findbugs.BugCollection#createEmptyCollectionWithMetadata() */ public SortedBugCollection createEmptyCollectionWithMetadata() { SortedBugCollection dup = new SortedBugCollection((ProjectStats) projectStats.clone(), comparator, project.duplicate()); dup.projectStats.clearBugCounts(); dup.errorList.addAll(this.errorList); dup.missingClassSet.addAll(this.missingClassSet); dup.summaryHTML = this.summaryHTML; dup.classFeatureSetMap.putAll(this.classFeatureSetMap); dup.sequence = this.sequence; dup.analysisVersion = this.analysisVersion; dup.analysisTimestamp = this.analysisTimestamp; dup.timestamp = this.timestamp; dup.releaseName = this.releaseName; for (AppVersion appVersion : appVersionList) { dup.appVersionList.add((AppVersion) appVersion.clone()); } return dup; } /* * (non-Javadoc) * * @see edu.umd.cs.findbugs.BugCollection#clearBugInstances() */ public void clearBugInstances() { bugSet.clear(); invalidateHashes(); } public void clearMissingClasses() { missingClassSet.clear(); } /* * (non-Javadoc) * * @see edu.umd.cs.findbugs.BugCollection#getReleaseName() */ public String getReleaseName() { if (releaseName == null) return ""; return releaseName; } /* * (non-Javadoc) * * @see edu.umd.cs.findbugs.BugCollection#setReleaseName(java.lang.String) */ public void setReleaseName(String releaseName) { this.releaseName = releaseName; } /* * (non-Javadoc) * * @see edu.umd.cs.findbugs.BugCollection#appVersionIterator() */ public Iterator<AppVersion> appVersionIterator() { return appVersionList.iterator(); } /* * (non-Javadoc) * * @see edu.umd.cs.findbugs.BugCollection#addAppVersion(edu.umd.cs.findbugs. * AppVersion) */ public void addAppVersion(AppVersion appVersion) { appVersionList.add(appVersion); } /* * (non-Javadoc) * * @see edu.umd.cs.findbugs.BugCollection#clearAppVersions() */ public void clearAppVersions() { appVersionList.clear(); sequence = 0; } public void trimAppVersions(long numberToRetain) { while (appVersionList.size() > numberToRetain) appVersionList.remove(appVersionList.size() - 1); sequence = appVersionList.size(); } /* * (non-Javadoc) * * @see edu.umd.cs.findbugs.BugCollection#setTimestamp(long) */ public void setTimestamp(long timestamp) { this.timestamp = timestamp; } /* * (non-Javadoc) * * @see edu.umd.cs.findbugs.BugCollection#getTimestamp() */ public long getTimestamp() { return timestamp; } /* * (non-Javadoc) * * @see * edu.umd.cs.findbugs.BugCollection#getClassFeatureSet(java.lang.String) */ public ClassFeatureSet getClassFeatureSet(String className) { return classFeatureSetMap.get(className); } /* * (non-Javadoc) * * @see * edu.umd.cs.findbugs.BugCollection#setClassFeatureSet(edu.umd.cs.findbugs * .model.ClassFeatureSet) */ public void setClassFeatureSet(ClassFeatureSet classFeatureSet) { classFeatureSetMap.put(classFeatureSet.getClassName(), classFeatureSet); } /* * (non-Javadoc) * * @see edu.umd.cs.findbugs.BugCollection#classFeatureSetIterator() */ public Iterator<ClassFeatureSet> classFeatureSetIterator() { return classFeatureSetMap.values().iterator(); } /* * (non-Javadoc) * * @see edu.umd.cs.findbugs.BugCollection#clearClassFeatures() */ public void clearClassFeatures() { classFeatureSetMap.clear(); } /** * @param withMessages * The withMessages to set. */ public void setWithMessages(boolean withMessages) { this.withMessages = withMessages; } /** * @return Returns the withMessages. */ public boolean getWithMessages() { return withMessages; } /* * (non-Javadoc) * * @see * edu.umd.cs.findbugs.BugCollection#getAppVersionFromSequenceNumber(int) */ public AppVersion getAppVersionFromSequenceNumber(long target) { for (AppVersion av : appVersionList) if (av.getSequenceNumber() == target) return av; if (target == this.getSequenceNumber()) return this.getCurrentAppVersion(); return null; } /* * (non-Javadoc) * * @see edu.umd.cs.findbugs.BugCollection#findBug(java.lang.String, * java.lang.String, int) */ public BugInstance findBug(String instanceHash, String bugType, int lineNumber) { for (BugInstance bug : bugSet) if (bug.getInstanceHash().equals(instanceHash) && bug.getBugPattern().getType().equals(bugType) && bug.getPrimarySourceLineAnnotation().getStartLine() == lineNumber) return bug; return null; } /** * @param version */ public void setAnalysisVersion(String version) { this.analysisVersion = version; } public String getAnalysisVersion() { return this.analysisVersion; } public InputStream progessMonitoredInputStream(File f, String msg) throws IOException { InputStream in = new FileInputStream(f); long length = f.length(); if (length > Integer.MAX_VALUE) throw new IllegalArgumentException("File " + f + " is too big at " + length + " bytes"); return wrapGzip(progressMonitoredInputStream(in, (int) length, msg), f); } public InputStream progessMonitoredInputStream(URLConnection c, String msg) throws IOException { InputStream in = c.getInputStream(); int length = c.getContentLength(); return wrapGzip(progressMonitoredInputStream(in, length, msg), c.getURL()); } public InputStream progressMonitoredInputStream(InputStream in, int length, String msg) { if (GraphicsEnvironment.isHeadless()) return in; IGuiCallback guiCallback = project.getGuiCallback(); if (guiCallback == null) return in; return guiCallback.getProgressMonitorInputStream(in, length, msg); } public InputStream wrapGzip(InputStream in, Object source) { try { if (source instanceof File) { File f = (File) source; if (f.getName().endsWith(".gz")) { return new GZIPInputStream(in); } } else if (source instanceof URL) { URL u = (URL) source; if (u.getPath().endsWith(".gz")) { return new GZIPInputStream(in); } } } catch (IOException e) { assert true; } return in; } public void clearCloud() { Cloud oldCloud = cloud; IGuiCallback callback = project.getGuiCallback(); if (oldCloud != null) { callback.unregisterCloud(project, this, oldCloud); oldCloud.shutdown(); } cloud = null; } /* * (non-Javadoc) * * @see edu.umd.cs.findbugs.BugCollection#reinitializeCloud() */ public Cloud reinitializeCloud() { Cloud oldCloud = cloud; IGuiCallback callback = project.getGuiCallback(); if (oldCloud != null) { callback.unregisterCloud(project, this, oldCloud); oldCloud.shutdown(); } cloud = null; Cloud newCloud = getCloud(); assert newCloud == cloud; if (bugsPopulated && cloud != null) { cloud.bugsPopulated(); cloud.initiateCommunication(); } return cloud; } public void setXmlCloudDetails(Map<String, String> map) { this.xmlCloudDetails = map; } public Map<String, String> getXmlCloudDetails() { return xmlCloudDetails; } /* * (non-Javadoc) * * @see edu.umd.cs.findbugs.BugCollection#setMinimalXML(boolean) */ public void setMinimalXML(boolean minimalXML) { this.minimalXML = minimalXML; } /** * @param b */ public void setDoNotUseCloud(boolean b) { this.shouldNotUsePlugin = b; } boolean bugsPopulated = false; /* (non-Javadoc) * @see edu.umd.cs.findbugs.BugCollection#bugsPopulated() */ public void bugsPopulated() { bugsPopulated = true; } } // vim:ts=4
findbugs/src/java/edu/umd/cs/findbugs/SortedBugCollection.java
/* * FindBugs - Find bugs in Java programs * Copyright (C) 2003-2008 University of Maryland * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package edu.umd.cs.findbugs; import java.awt.GraphicsEnvironment; import java.io.BufferedOutputStream; import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.Reader; import java.io.StringWriter; import java.io.UnsupportedEncodingException; import java.io.Writer; import java.math.BigInteger; import java.net.URL; import java.net.URLConnection; import java.security.MessageDigest; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedSet; import java.util.TreeMap; import java.util.TreeSet; import java.util.logging.Level; import java.util.logging.Logger; import java.util.zip.GZIPInputStream; import javax.annotation.CheckForNull; import javax.annotation.Nonnull; import javax.annotation.WillClose; import javax.annotation.WillNotClose; import javax.xml.transform.TransformerException; import edu.umd.cs.findbugs.ba.AnalysisContext; import edu.umd.cs.findbugs.ba.MissingClassException; import edu.umd.cs.findbugs.charsets.UTF8; import edu.umd.cs.findbugs.cloud.Cloud; import edu.umd.cs.findbugs.cloud.CloudFactory; import edu.umd.cs.findbugs.log.Profiler; import edu.umd.cs.findbugs.model.ClassFeatureSet; import edu.umd.cs.findbugs.util.Util; import edu.umd.cs.findbugs.xml.Dom4JXMLOutput; import edu.umd.cs.findbugs.xml.OutputStreamXMLOutput; import edu.umd.cs.findbugs.xml.XMLAttributeList; import edu.umd.cs.findbugs.xml.XMLOutput; import edu.umd.cs.findbugs.xml.XMLOutputUtil; import org.dom4j.Document; import org.dom4j.DocumentException; import org.dom4j.DocumentFactory; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import org.xml.sax.SAXParseException; import org.xml.sax.XMLReader; import org.xml.sax.helpers.XMLReaderFactory; /** * An implementation of {@link BugCollection} that keeps the BugInstances sorted * by class (using the native comparison ordering of BugInstance's compareTo() * method as a tie-breaker). * * @see BugInstance * @author David Hovemeyer */ public class SortedBugCollection implements BugCollection { private static final Logger LOGGER = Logger.getLogger(SortedBugCollection.class.getName()); long analysisTimestamp = System.currentTimeMillis(); String analysisVersion = Version.RELEASE; private boolean withMessages = false; private boolean minimalXML = false; private boolean applySuppressions = false; private Cloud cloud; boolean shouldNotUsePlugin; long timeStartedLoading, timeFinishedLoading; String dataSource = ""; private Map<String, String> xmlCloudDetails = Collections.emptyMap(); /** * @return Returns the timeStartedLoading. */ public long getTimeStartedLoading() { return timeStartedLoading; } /** * @return Returns the timeFinishedLoading. */ public long getTimeFinishedLoading() { return timeFinishedLoading; } public String getDataSource() { return dataSource; } final Project project; public Project getProject() { return project; } public @CheckForNull Cloud getCloudLazily() { if (cloud != null && bugsPopulated) cloud.bugsPopulated(); return cloud; } public @Nonnull Cloud getCloud() { if (shouldNotUsePlugin) { return CloudFactory.getPlainCloud(this); } if (cloud == null) { IGuiCallback callback = getProject().getGuiCallback(); try { cloud = CloudFactory.createCloudWithoutInitializing(this); callback.registerCloud(getProject(), this, cloud); CloudFactory.initializeCloud(this, cloud); } catch (Exception e) { LOGGER.log(Level.SEVERE, "Could not load cloud plugin", e); callback.showMessageDialog("Unable to connect to cloud: " + e.getClass().getSimpleName() + ": " + e.getMessage()); if (CloudFactory.FAIL_ON_CLOUD_ERROR) throw new IllegalStateException("Could not load FindBugs Cloud plugin - to avoid this message, " + "set -D" + CloudFactory.FAIL_ON_CLOUD_ERROR_PROP + "=false", e); cloud = CloudFactory.getPlainCloud(this); } } if (bugsPopulated) cloud.bugsPopulated(); return cloud; } public boolean isApplySuppressions() { return applySuppressions; } public void setApplySuppressions(boolean applySuppressions) { this.applySuppressions = applySuppressions; } private static final boolean REPORT_SUMMARY_HTML = SystemProperties.getBoolean("findbugs.report.SummaryHTML"); public long getAnalysisTimestamp() { return analysisTimestamp; } public void setAnalysisTimestamp(long timestamp) { analysisTimestamp = timestamp; } /** * Add a Collection of BugInstances to this BugCollection object. This just * calls add(BugInstance) for each instance in the input collection. * * @param collection * the Collection of BugInstances to add */ public void addAll(Collection<BugInstance> collection) { for (BugInstance bug : collection) { add(bug); } } /** * Add a Collection of BugInstances to this BugCollection object. * * @param collection * the Collection of BugInstances to add * @param updateActiveTime * true if active time of added BugInstances should be updated to * match collection: false if not */ public void addAll(Collection<BugInstance> collection, boolean updateActiveTime) { for (BugInstance warning : collection) { add(warning, updateActiveTime); } } /** * Add a BugInstance to this BugCollection. This just calls add(bugInstance, * true). * * @param bugInstance * the BugInstance * @return true if the BugInstance was added, or false if a matching * BugInstance was already in the BugCollection */ public boolean add(BugInstance bugInstance) { return add(bugInstance, bugInstance.getFirstVersion() == 0L && bugInstance.getLastVersion() == 0L); } /** * Add an analysis error. * * @param message * the error message */ public void addError(String message) { addError(message, null); } /** * Get the current AppVersion. */ public AppVersion getCurrentAppVersion() { return new AppVersion(getSequenceNumber()).setReleaseName(getReleaseName()).setTimestamp(getTimestamp()) .setNumClasses(getProjectStats().getNumClasses()).setCodeSize(getProjectStats().getCodeSize()); } /** * Read XML data from given file into this object, populating given Project * as a side effect. * * @param fileName * name of the file to read */ public void readXML(String fileName) throws IOException, DocumentException { readXML(new File(fileName)); } /** * Read XML data from given file into this object, populating given Project * as a side effect. * * @param file * the file */ public void readXML(File file) throws IOException, DocumentException { project.setCurrentWorkingDirectory(file.getParentFile()); dataSource = file.getAbsolutePath(); InputStream in = progessMonitoredInputStream(file, "Loading analysis"); try { readXML(in, file); } catch (IOException e) { throw newIOException(file, e); } catch (DocumentException e) { throw new DocumentException("Failing reading " + file, e); } } /** * @param file * @param e * @return */ private IOException newIOException(Object file, IOException e) { IOException result = new IOException("Failing reading " + file); result.initCause(e); return result; } public void readXML(URL u) throws IOException, DocumentException { InputStream in = progessMonitoredInputStream(u.openConnection(), "Loading analysis"); dataSource = u.toString(); try { readXML(in); } catch (IOException e) { throw newIOException(u, e); } catch (DocumentException e) { throw new DocumentException("Failing reading " + u, e); } } /** * Read XML data from given input stream into this object, populating the * Project as a side effect. An attempt will be made to close the input * stream (even if an exception is thrown). * * @param in * the InputStream */ public void readXML(@WillClose InputStream in, File base) throws IOException, DocumentException { try { doReadXML(in, base); } finally { in.close(); } } public void readXML(@WillClose InputStream in) throws IOException, DocumentException { assert project != null; assert in != null; doReadXML(in, null); } public void readXML(@WillClose Reader reader) throws IOException, DocumentException { assert project != null; assert reader != null; doReadXML(reader, null); } private void doReadXML(@WillClose InputStream in, @CheckForNull File base) throws IOException, DocumentException { try { checkInputStream(in); Reader reader = Util.getReader(in); doReadXML(reader, base); } catch (RuntimeException e) { in.close(); throw e; }catch (IOException e) { in.close(); throw e; } } private void doReadXML(@WillClose Reader reader, @CheckForNull File base) throws IOException, DocumentException { timeStartedLoading = System.currentTimeMillis(); SAXBugCollectionHandler handler = new SAXBugCollectionHandler(this, base); Profiler profiler = getProjectStats().getProfiler(); profiler.start(handler.getClass()); try { XMLReader xr; try { xr = XMLReaderFactory.createXMLReader(); } catch (SAXException e) { AnalysisContext.logError("Couldn't create XMLReaderFactory", e); throw new DocumentException("Sax error ", e); } xr.setContentHandler(handler); xr.setErrorHandler(handler); xr.parse(new InputSource(reader)); } catch (SAXParseException e) { if (base != null) throw new DocumentException("Parse error at line " + e.getLineNumber() + " : " + e.getColumnNumber() + " of " + base, e); throw new DocumentException("Parse error at line " + e.getLineNumber() + " : " + e.getColumnNumber(), e); } catch (SAXException e) { // FIXME: throw SAXException from method? if (base != null) throw new DocumentException("Sax error while parsing " + base, e); throw new DocumentException("Sax error ", e); } finally { Util.closeSilently(reader); profiler.end(handler.getClass()); } timeFinishedLoading = System.currentTimeMillis(); bugsPopulated(); // Presumably, project is now up-to-date project.setModified(false); } public void writeXML(OutputStream out) throws IOException { writeXML(UTF8.writer(out)); } /** * Write this BugCollection to a file as XML. * * @param fileName * the file to write to */ public void writeXML(String fileName) throws IOException { BufferedOutputStream out = new BufferedOutputStream(new FileOutputStream(fileName)); writeXML(out); } /** * Write this BugCollection to a file as XML. * * @param file * the file to write to */ public void writeXML(File file) throws IOException { BufferedOutputStream out = new BufferedOutputStream(new FileOutputStream(file)); writeXML(out); } /** * Convert the BugCollection into a dom4j Document object. * * @return the Document representing the BugCollection as a dom4j tree */ public Document toDocument() { // if (project == null) throw new NullPointerException("No project"); assert project != null; DocumentFactory docFactory = new DocumentFactory(); Document document = docFactory.createDocument(); Dom4JXMLOutput treeBuilder = new Dom4JXMLOutput(document); try { writeXML(treeBuilder); } catch (IOException e) { // Can't happen } return document; } /** * Write the BugCollection to given output stream as XML. The output stream * will be closed, even if an exception is thrown. * * @param out * the OutputStream to write to */ public void writeXML(@WillClose Writer out) throws IOException { assert project != null; bugsPopulated(); XMLOutput xmlOutput; // if (project == null) throw new NullPointerException("No project"); if (withMessages) { Cloud cloud = getCloud(); cloud.bugsPopulated(); cloud.initiateCommunication(); cloud.waitUntilIssueDataDownloaded(); String token = SystemProperties.getProperty("findbugs.cloud.token"); if (token != null && token.trim().length() > 0) { LOGGER.info("Cloud token specified - uploading new issues, if necessary..."); cloud.waitUntilNewIssuesUploaded(); } xmlOutput = new OutputStreamXMLOutput(out, "http://findbugs.sourceforge.net/xsl/default.xsl"); } else { xmlOutput = new OutputStreamXMLOutput(out); } writeXML(xmlOutput); } public void writePrologue(XMLOutput xmlOutput) throws IOException { xmlOutput.beginDocument(); xmlOutput.openTag( ROOT_ELEMENT_NAME, new XMLAttributeList().addAttribute("version", analysisVersion) .addAttribute("sequence", String.valueOf(getSequenceNumber())) .addAttribute("timestamp", String.valueOf(getTimestamp())) .addAttribute("analysisTimestamp", String.valueOf(getAnalysisTimestamp())) .addAttribute("release", getReleaseName())); project.writeXML(xmlOutput, null, this); } // private String getQuickInstanceHash(BugInstance bugInstance) { // String hash = bugInstance.getInstanceHash(); // if (hash != null) return hash; // MessageDigest digest = null; // try { digest = MessageDigest.getInstance("MD5"); // } catch (Exception e2) { // // OK, we won't digest // assert true; // } // hash = bugInstance.getInstanceKey(); // if (digest != null) { // byte [] data = digest.digest(hash.getBytes()); // String tmp = new BigInteger(1,data).toString(16); // if (false) System.out.println(hash + " -> " + tmp); // hash = tmp; // } // bugInstance.setInstanceHash(hash); // return hash; // } public void computeBugHashes() { if (preciseHashOccurrenceNumbersAvailable) return; invalidateHashes(); MessageDigest digest = Util.getMD5Digest(); HashMap<String, Integer> seen = new HashMap<String, Integer>(); for (BugInstance bugInstance : getCollection()) { String hash = bugInstance.getInstanceHash(); if (hash == null) { hash = bugInstance.getInstanceKey(); try { byte[] data = digest.digest(hash.getBytes("UTF-8")); hash = new BigInteger(1, data).toString(16); } catch (UnsupportedEncodingException e) { throw new IllegalStateException(e); } bugInstance.setInstanceHash(hash); } Integer count = seen.get(hash); if (count == null) { bugInstance.setInstanceOccurrenceNum(0); seen.put(hash, 0); } else { bugInstance.setInstanceOccurrenceNum(count + 1); seen.put(hash, count + 1); } } for (BugInstance bugInstance : getCollection()) bugInstance.setInstanceOccurrenceMax(seen.get(bugInstance.getInstanceHash())); preciseHashOccurrenceNumbersAvailable = true; } /** * Write the BugCollection to an XMLOutput object. The finish() method of * the XMLOutput object is guaranteed to be called. * * <p> * To write the SummaryHTML element, set property * findbugs.report.SummaryHTML to "true". * </p> * * @param xmlOutput * the XMLOutput object */ public void writeXML(@WillClose XMLOutput xmlOutput) throws IOException { assert project != null; try { writePrologue(xmlOutput); if (withMessages) { computeBugHashes(); getProjectStats().computeFileStats(this); String commonBase = null; for (String s : project.getSourceDirList()) { if (commonBase == null) commonBase = s; else commonBase = commonBase.substring(0, commonPrefix(commonBase, s)); } if (commonBase != null && commonBase.length() > 0) { if (commonBase.indexOf("/./") > 0) commonBase = commonBase.substring(0, commonBase.indexOf("/.")); File base = new File(commonBase); if (base.exists() && base.isDirectory() && base.canRead()) SourceLineAnnotation.generateRelativeSource(base, project); } } if (earlyStats && !minimalXML) getProjectStats().writeXML(xmlOutput, withMessages); // Write BugInstances for (BugInstance bugInstance : getCollection()) if (!applySuppressions || !project.getSuppressionFilter().match(bugInstance)) bugInstance.writeXML(xmlOutput, this, withMessages); writeEpilogue(xmlOutput); } finally { xmlOutput.finish(); SourceLineAnnotation.clearGenerateRelativeSource(); } } int commonPrefix(String s1, String s2) { int pos = 0; while (pos < s1.length() && pos < s2.length() && s1.charAt(pos) == s2.charAt(pos)) pos++; return pos; } boolean earlyStats = SystemProperties.getBoolean("findbugs.report.summaryFirst"); public void writeEpilogue(XMLOutput xmlOutput) throws IOException { if (withMessages) { writeBugCategories(xmlOutput); writeBugPatterns(xmlOutput); writeBugCodes(xmlOutput); } // Errors, missing classes if (!minimalXML) emitErrors(xmlOutput); if (!earlyStats && !minimalXML) { // Statistics getProjectStats().writeXML(xmlOutput, withMessages); } // // Class and method hashes // xmlOutput.openTag(CLASS_HASHES_ELEMENT_NAME); // for (Iterator<ClassHash> i = classHashIterator(); i.hasNext();) { // ClassHash classHash = i.next(); // classHash.writeXML(xmlOutput); // } // xmlOutput.closeTag(CLASS_HASHES_ELEMENT_NAME); // Class features xmlOutput.openTag("ClassFeatures"); for (Iterator<ClassFeatureSet> i = classFeatureSetIterator(); i.hasNext();) { ClassFeatureSet classFeatureSet = i.next(); classFeatureSet.writeXML(xmlOutput); } xmlOutput.closeTag("ClassFeatures"); // AppVersions xmlOutput.openTag(HISTORY_ELEMENT_NAME); for (Iterator<AppVersion> i = appVersionIterator(); i.hasNext();) { AppVersion appVersion = i.next(); appVersion.writeXML(xmlOutput); } xmlOutput.closeTag(HISTORY_ELEMENT_NAME); // Summary HTML if (REPORT_SUMMARY_HTML) { String html = getSummaryHTML(); if (html != null && !html.equals("")) { xmlOutput.openTag(SUMMARY_HTML_ELEMENT_NAME); xmlOutput.writeCDATA(html); xmlOutput.closeTag(SUMMARY_HTML_ELEMENT_NAME); } } xmlOutput.closeTag(ROOT_ELEMENT_NAME); } private void writeBugPatterns(XMLOutput xmlOutput) throws IOException { // Find bug types reported Set<String> bugTypeSet = new HashSet<String>(); for (Iterator<BugInstance> i = iterator(); i.hasNext();) { BugInstance bugInstance = i.next(); BugPattern bugPattern = bugInstance.getBugPattern(); if (bugPattern != null) { bugTypeSet.add(bugPattern.getType()); } } // Emit element describing each reported bug pattern for (String bugType : bugTypeSet) { BugPattern bugPattern = DetectorFactoryCollection.instance().lookupBugPattern(bugType); if (bugPattern == null) continue; XMLAttributeList attributeList = new XMLAttributeList(); attributeList.addAttribute("type", bugType); attributeList.addAttribute("abbrev", bugPattern.getAbbrev()); attributeList.addAttribute("category", bugPattern.getCategory()); if (bugPattern.getCWEid() != 0) { attributeList.addAttribute("cweid", Integer.toString(bugPattern.getCWEid())); } xmlOutput.openTag("BugPattern", attributeList); xmlOutput.openTag("ShortDescription"); xmlOutput.writeText(bugPattern.getShortDescription()); xmlOutput.closeTag("ShortDescription"); xmlOutput.openTag("Details"); xmlOutput.writeCDATA(bugPattern.getDetailText()); xmlOutput.closeTag("Details"); xmlOutput.closeTag("BugPattern"); } } private void writeBugCodes(XMLOutput xmlOutput) throws IOException { // Find bug codes reported Set<String> bugCodeSet = new HashSet<String>(); for (Iterator<BugInstance> i = iterator(); i.hasNext();) { BugInstance bugInstance = i.next(); String bugCode = bugInstance.getAbbrev(); if (bugCode != null) { bugCodeSet.add(bugCode); } } // Emit element describing each reported bug code for (String bugCodeAbbrev : bugCodeSet) { BugCode bugCode = DetectorFactoryCollection.instance().getBugCode(bugCodeAbbrev); String bugCodeDescription = bugCode.getDescription(); if (bugCodeDescription == null) continue; XMLAttributeList attributeList = new XMLAttributeList(); attributeList.addAttribute("abbrev", bugCodeAbbrev); if (bugCode.getCWEid() != 0) { attributeList.addAttribute("cweid", Integer.toString(bugCode.getCWEid())); } xmlOutput.openTag("BugCode", attributeList); xmlOutput.openTag("Description"); xmlOutput.writeText(bugCodeDescription); xmlOutput.closeTag("Description"); xmlOutput.closeTag("BugCode"); } } private void writeBugCategories(XMLOutput xmlOutput) throws IOException { // Find bug categories reported Set<String> bugCatSet = new HashSet<String>(); for (Iterator<BugInstance> i = iterator(); i.hasNext();) { BugInstance bugInstance = i.next(); BugPattern bugPattern = bugInstance.getBugPattern(); if (bugPattern != null) { bugCatSet.add(bugPattern.getCategory()); } } // Emit element describing each reported bug code for (String bugCat : bugCatSet) { String bugCatDescription = I18N.instance().getBugCategoryDescription(bugCat); if (bugCatDescription == null) continue; XMLAttributeList attributeList = new XMLAttributeList(); attributeList.addAttribute("category", bugCat); xmlOutput.openTag("BugCategory", attributeList); xmlOutput.openTag("Description"); xmlOutput.writeText(bugCatDescription); xmlOutput.closeTag("Description"); xmlOutput.closeTag("BugCategory"); } } private void emitErrors(XMLOutput xmlOutput) throws IOException { // System.err.println("Writing errors to XML output"); XMLAttributeList attributeList = new XMLAttributeList(); attributeList.addAttribute("errors", Integer.toString(errorList.size())); attributeList.addAttribute("missingClasses", Integer.toString(missingClassSet.size())); xmlOutput.openTag(ERRORS_ELEMENT_NAME, attributeList); // Emit Error elements describing analysis errors for (AnalysisError error : getErrors()) { xmlOutput.openTag(ERROR_ELEMENT_NAME); xmlOutput.openTag(ERROR_MESSAGE_ELEMENT_NAME); xmlOutput.writeText(error.getMessage()); xmlOutput.closeTag(ERROR_MESSAGE_ELEMENT_NAME); if (error.getExceptionMessage() != null) { xmlOutput.openTag(ERROR_EXCEPTION_ELEMENT_NAME); xmlOutput.writeText(error.getExceptionMessage()); xmlOutput.closeTag(ERROR_EXCEPTION_ELEMENT_NAME); String stackTrace[] = error.getStackTrace(); if (stackTrace != null) { for (String aStackTrace : stackTrace) { xmlOutput.openTag(ERROR_STACK_TRACE_ELEMENT_NAME); xmlOutput.writeText(aStackTrace); xmlOutput.closeTag(ERROR_STACK_TRACE_ELEMENT_NAME); } } if (false && error.getNestedExceptionMessage() != null) { xmlOutput.openTag(ERROR_EXCEPTION_ELEMENT_NAME); xmlOutput.writeText(error.getNestedExceptionMessage()); xmlOutput.closeTag(ERROR_EXCEPTION_ELEMENT_NAME); stackTrace = error.getNestedStackTrace(); if (stackTrace != null) { for (String aStackTrace : stackTrace) { xmlOutput.openTag(ERROR_STACK_TRACE_ELEMENT_NAME); xmlOutput.writeText(aStackTrace); xmlOutput.closeTag(ERROR_STACK_TRACE_ELEMENT_NAME); } } } } xmlOutput.closeTag(ERROR_ELEMENT_NAME); } // Emit missing classes XMLOutputUtil.writeElementList(xmlOutput, MISSING_CLASS_ELEMENT_NAME, missingClassIterator()); xmlOutput.closeTag(ERRORS_ELEMENT_NAME); } private void checkInputStream(@WillNotClose InputStream in) throws IOException { if (!in.markSupported()) return; byte[] buf = new byte[200]; in.mark(buf.length); int numRead = 0; boolean isEOF = false; while (numRead < buf.length && !isEOF) { int n = in.read(buf, numRead, buf.length - numRead); if (n < 0) { isEOF = true; } else { numRead += n; } } in.reset(); BufferedReader reader = new BufferedReader(Util.getReader(new ByteArrayInputStream(buf))); try { String line; while ((line = reader.readLine()) != null) { if (line.startsWith("<BugCollection")) { return; } } } finally { reader.close(); } throw new IOException("XML does not contain saved bug data"); } /** * Clone all of the BugInstance objects in the source Collection and add * them to the destination Collection. * * @param dest * the destination Collection * @param source * the source Collection */ public static void cloneAll(Collection<BugInstance> dest, Collection<BugInstance> source) { for (BugInstance obj : source) { dest.add((BugInstance) obj.clone()); } } /** * @author pugh */ private static final class BoundedLinkedHashSet extends LinkedHashSet<AnalysisError> { @Override public boolean add(AnalysisError a) { if (this.size() > 1000) return false; return super.add(a); } } public static class BugInstanceComparator implements Comparator<BugInstance> { private BugInstanceComparator() { } public int compare(BugInstance lhs, BugInstance rhs) { ClassAnnotation lca = lhs.getPrimaryClass(); ClassAnnotation rca = rhs.getPrimaryClass(); if (lca == null || rca == null) throw new IllegalStateException("null class annotation: " + lca + "," + rca); int cmp = lca.getClassName().compareTo(rca.getClassName()); if (cmp != 0) return cmp; return lhs.compareTo(rhs); } public static final BugInstanceComparator instance = new BugInstanceComparator(); } public static class MultiversionBugInstanceComparator extends BugInstanceComparator { @Override public int compare(BugInstance lhs, BugInstance rhs) { int result = super.compare(lhs, rhs); if (result != 0) return result; long diff = lhs.getFirstVersion() - rhs.getFirstVersion(); if (diff == 0) diff = lhs.getLastVersion() - rhs.getLastVersion(); if (diff < 0) return -1; if (diff > 0) return 1; return 0; } public static final MultiversionBugInstanceComparator instance = new MultiversionBugInstanceComparator(); } private final Comparator<BugInstance> comparator; private final TreeSet<BugInstance> bugSet; private final LinkedHashSet<AnalysisError> errorList; private final TreeSet<String> missingClassSet; @CheckForNull private String summaryHTML; private final ProjectStats projectStats; // private Map<String, ClassHash> classHashMap; private final Map<String, ClassFeatureSet> classFeatureSetMap; private final List<AppVersion> appVersionList; private boolean preciseHashOccurrenceNumbersAvailable = false; /** * Sequence number of the most-recently analyzed version of the code. */ private long sequence; /** * Release name of the analyzed application. */ private String releaseName; /** * Current analysis timestamp. */ private long timestamp; public SortedBugCollection(Project project) { this(new ProjectStats(), MultiversionBugInstanceComparator.instance, project); } public SortedBugCollection(File f) throws IOException, DocumentException { this(); this.readXML(f); } /** * Constructor. Creates an empty object. */ public SortedBugCollection() { this(new ProjectStats()); } /** * Constructor. Creates an empty object. */ public SortedBugCollection(Comparator<BugInstance> comparator) { this(new ProjectStats(), comparator); } /** * Constructor. Creates an empty object given an existing ProjectStats. * * @param projectStats * the ProjectStats */ public SortedBugCollection(ProjectStats projectStats) { this(projectStats, MultiversionBugInstanceComparator.instance); } public SortedBugCollection(ProjectStats projectStats, Project project) { this(projectStats, MultiversionBugInstanceComparator.instance, project); } /** * Constructor. Creates an empty object given an existing ProjectStats. * * @param projectStats * the ProjectStats * @param comparator * to use for sorting bug instances */ public SortedBugCollection(ProjectStats projectStats, Comparator<BugInstance> comparator) { this(projectStats, comparator, new Project()); } public SortedBugCollection(ProjectStats projectStats, Comparator<BugInstance> comparator, Project project) { this.projectStats = projectStats; this.comparator = comparator; this.project = project; bugSet = new TreeSet<BugInstance>(comparator); errorList = new BoundedLinkedHashSet(); missingClassSet = new TreeSet<String>(); summaryHTML = null; classFeatureSetMap = new TreeMap<String, ClassFeatureSet>(); sequence = 0L; appVersionList = new LinkedList<AppVersion>(); releaseName = ""; timestamp = -1L; } public boolean add(BugInstance bugInstance, boolean updateActiveTime) { assert !bugsPopulated; if (bugsPopulated) AnalysisContext.logError("Bug collection marked as populated, but bugs added", new RuntimeException()); preciseHashOccurrenceNumbersAvailable = false; if (updateActiveTime) { bugInstance.setFirstVersion(sequence); } invalidateHashes(); return bugSet.add(bugInstance); } private void invalidateHashes() { preciseHashOccurrenceNumbersAvailable = false; } public boolean remove(BugInstance bugInstance) { invalidateHashes(); return bugSet.remove(bugInstance); } public Iterator<BugInstance> iterator() { return bugSet.iterator(); } public Collection<BugInstance> getCollection() { return Collections.unmodifiableCollection(bugSet); } public void addError(String message, Throwable exception) { if (exception instanceof MissingClassException) { MissingClassException e = (MissingClassException) exception; addMissingClass(AbstractBugReporter.getMissingClassName(e.getClassNotFoundException())); return; } if (exception instanceof ClassNotFoundException) { ClassNotFoundException e = (ClassNotFoundException) exception; addMissingClass(AbstractBugReporter.getMissingClassName(e)); return; } if (exception instanceof edu.umd.cs.findbugs.classfile.MissingClassException) { edu.umd.cs.findbugs.classfile.MissingClassException e = (edu.umd.cs.findbugs.classfile.MissingClassException) exception; addMissingClass(AbstractBugReporter.getMissingClassName(e.toClassNotFoundException())); return; } errorList.add(new AnalysisError(message, exception)); } public void addError(AnalysisError error) { errorList.add(error); } public void clearErrors() { errorList.clear(); } public void addMissingClass(String className) { if (className == null || className.length() == 0) return; if (className.startsWith("[")) { assert false : "Bad class name " + className; return; } if (className.endsWith(";")) addError("got signature rather than classname: " + className, new IllegalArgumentException()); else missingClassSet.add(className); } public Collection<? extends AnalysisError> getErrors() { return errorList; } public Iterator<String> missingClassIterator() { return missingClassSet.iterator(); } public boolean contains(BugInstance bugInstance) { return bugSet.contains(bugInstance); } public BugInstance getMatching(BugInstance bugInstance) { SortedSet<BugInstance> tailSet = bugSet.tailSet(bugInstance); if (tailSet.isEmpty()) return null; BugInstance first = tailSet.first(); return bugInstance.equals(first) ? first : null; } public String getSummaryHTML() throws IOException { if (summaryHTML == null) { try { StringWriter writer = new StringWriter(); ProjectStats stats = getProjectStats(); stats.transformSummaryToHTML(writer); summaryHTML = writer.toString(); } catch (final TransformerException e) { IOException ioe = new IOException("Couldn't generate summary HTML"); ioe.initCause(e); throw ioe; } } return summaryHTML; } public ProjectStats getProjectStats() { return projectStats; } /* * (non-Javadoc) * * @see * edu.umd.cs.findbugs.BugCollection#lookupFromUniqueId(java.lang.String) */ @Deprecated public BugInstance lookupFromUniqueId(String uniqueId) { for (BugInstance bug : bugSet) if (bug.getInstanceHash().equals(uniqueId)) return bug; return null; } public long getSequenceNumber() { return sequence; } public void setSequenceNumber(long sequence) { this.sequence = sequence; } public SortedBugCollection duplicate() { SortedBugCollection dup = createEmptyCollectionWithMetadata(); SortedBugCollection.cloneAll(dup.bugSet, this.bugSet); return dup; } /* * (non-Javadoc) * * @see * edu.umd.cs.findbugs.BugCollection#createEmptyCollectionWithMetadata() */ public SortedBugCollection createEmptyCollectionWithMetadata() { SortedBugCollection dup = new SortedBugCollection((ProjectStats) projectStats.clone(), comparator, project.duplicate()); dup.projectStats.clearBugCounts(); dup.errorList.addAll(this.errorList); dup.missingClassSet.addAll(this.missingClassSet); dup.summaryHTML = this.summaryHTML; dup.classFeatureSetMap.putAll(this.classFeatureSetMap); dup.sequence = this.sequence; dup.analysisVersion = this.analysisVersion; dup.analysisTimestamp = this.analysisTimestamp; dup.timestamp = this.timestamp; dup.releaseName = this.releaseName; for (AppVersion appVersion : appVersionList) { dup.appVersionList.add((AppVersion) appVersion.clone()); } return dup; } /* * (non-Javadoc) * * @see edu.umd.cs.findbugs.BugCollection#clearBugInstances() */ public void clearBugInstances() { bugSet.clear(); invalidateHashes(); } public void clearMissingClasses() { missingClassSet.clear(); } /* * (non-Javadoc) * * @see edu.umd.cs.findbugs.BugCollection#getReleaseName() */ public String getReleaseName() { if (releaseName == null) return ""; return releaseName; } /* * (non-Javadoc) * * @see edu.umd.cs.findbugs.BugCollection#setReleaseName(java.lang.String) */ public void setReleaseName(String releaseName) { this.releaseName = releaseName; } /* * (non-Javadoc) * * @see edu.umd.cs.findbugs.BugCollection#appVersionIterator() */ public Iterator<AppVersion> appVersionIterator() { return appVersionList.iterator(); } /* * (non-Javadoc) * * @see edu.umd.cs.findbugs.BugCollection#addAppVersion(edu.umd.cs.findbugs. * AppVersion) */ public void addAppVersion(AppVersion appVersion) { appVersionList.add(appVersion); } /* * (non-Javadoc) * * @see edu.umd.cs.findbugs.BugCollection#clearAppVersions() */ public void clearAppVersions() { appVersionList.clear(); sequence = 0; } public void trimAppVersions(long numberToRetain) { while (appVersionList.size() > numberToRetain) appVersionList.remove(appVersionList.size() - 1); sequence = appVersionList.size(); } /* * (non-Javadoc) * * @see edu.umd.cs.findbugs.BugCollection#setTimestamp(long) */ public void setTimestamp(long timestamp) { this.timestamp = timestamp; } /* * (non-Javadoc) * * @see edu.umd.cs.findbugs.BugCollection#getTimestamp() */ public long getTimestamp() { return timestamp; } /* * (non-Javadoc) * * @see * edu.umd.cs.findbugs.BugCollection#getClassFeatureSet(java.lang.String) */ public ClassFeatureSet getClassFeatureSet(String className) { return classFeatureSetMap.get(className); } /* * (non-Javadoc) * * @see * edu.umd.cs.findbugs.BugCollection#setClassFeatureSet(edu.umd.cs.findbugs * .model.ClassFeatureSet) */ public void setClassFeatureSet(ClassFeatureSet classFeatureSet) { classFeatureSetMap.put(classFeatureSet.getClassName(), classFeatureSet); } /* * (non-Javadoc) * * @see edu.umd.cs.findbugs.BugCollection#classFeatureSetIterator() */ public Iterator<ClassFeatureSet> classFeatureSetIterator() { return classFeatureSetMap.values().iterator(); } /* * (non-Javadoc) * * @see edu.umd.cs.findbugs.BugCollection#clearClassFeatures() */ public void clearClassFeatures() { classFeatureSetMap.clear(); } /** * @param withMessages * The withMessages to set. */ public void setWithMessages(boolean withMessages) { this.withMessages = withMessages; } /** * @return Returns the withMessages. */ public boolean getWithMessages() { return withMessages; } /* * (non-Javadoc) * * @see * edu.umd.cs.findbugs.BugCollection#getAppVersionFromSequenceNumber(int) */ public AppVersion getAppVersionFromSequenceNumber(long target) { for (AppVersion av : appVersionList) if (av.getSequenceNumber() == target) return av; if (target == this.getSequenceNumber()) return this.getCurrentAppVersion(); return null; } /* * (non-Javadoc) * * @see edu.umd.cs.findbugs.BugCollection#findBug(java.lang.String, * java.lang.String, int) */ public BugInstance findBug(String instanceHash, String bugType, int lineNumber) { for (BugInstance bug : bugSet) if (bug.getInstanceHash().equals(instanceHash) && bug.getBugPattern().getType().equals(bugType) && bug.getPrimarySourceLineAnnotation().getStartLine() == lineNumber) return bug; return null; } /** * @param version */ public void setAnalysisVersion(String version) { this.analysisVersion = version; } public String getAnalysisVersion() { return this.analysisVersion; } public InputStream progessMonitoredInputStream(File f, String msg) throws IOException { InputStream in = new FileInputStream(f); long length = f.length(); if (length > Integer.MAX_VALUE) throw new IllegalArgumentException("File " + f + " is too big at " + length + " bytes"); return wrapGzip(progressMonitoredInputStream(in, (int) length, msg), f); } public InputStream progessMonitoredInputStream(URLConnection c, String msg) throws IOException { InputStream in = c.getInputStream(); int length = c.getContentLength(); return wrapGzip(progressMonitoredInputStream(in, length, msg), c.getURL()); } public InputStream progressMonitoredInputStream(InputStream in, int length, String msg) { if (GraphicsEnvironment.isHeadless()) return in; IGuiCallback guiCallback = project.getGuiCallback(); if (guiCallback == null) return in; return guiCallback.getProgressMonitorInputStream(in, length, msg); } public InputStream wrapGzip(InputStream in, Object source) { try { if (source instanceof File) { File f = (File) source; if (f.getName().endsWith(".gz")) { return new GZIPInputStream(in); } } else if (source instanceof URL) { URL u = (URL) source; if (u.getPath().endsWith(".gz")) { return new GZIPInputStream(in); } } } catch (IOException e) { assert true; } return in; } public void clearCloud() { Cloud oldCloud = cloud; IGuiCallback callback = project.getGuiCallback(); if (oldCloud != null) { callback.unregisterCloud(project, this, oldCloud); oldCloud.shutdown(); } cloud = null; } /* * (non-Javadoc) * * @see edu.umd.cs.findbugs.BugCollection#reinitializeCloud() */ public Cloud reinitializeCloud() { Cloud oldCloud = cloud; IGuiCallback callback = project.getGuiCallback(); if (oldCloud != null) { callback.unregisterCloud(project, this, oldCloud); oldCloud.shutdown(); } cloud = null; Cloud newCloud = getCloud(); assert newCloud == cloud; if (bugsPopulated && cloud != null) { cloud.bugsPopulated(); cloud.initiateCommunication(); } return cloud; } public void setXmlCloudDetails(Map<String, String> map) { this.xmlCloudDetails = map; } public Map<String, String> getXmlCloudDetails() { return xmlCloudDetails; } /* * (non-Javadoc) * * @see edu.umd.cs.findbugs.BugCollection#setMinimalXML(boolean) */ public void setMinimalXML(boolean minimalXML) { this.minimalXML = minimalXML; } /** * @param b */ public void setDoNotUseCloud(boolean b) { this.shouldNotUsePlugin = b; } boolean bugsPopulated = false; /* (non-Javadoc) * @see edu.umd.cs.findbugs.BugCollection#bugsPopulated() */ public void bugsPopulated() { bugsPopulated = true; } } // vim:ts=4
use gzip compressing when writing to a .gz suffix git-svn-id: e7d6bde23f017c9ff4efd468d79d66def666766b@13609 eae3c2d3-9b19-0410-a86e-396b6ccb6ab3
findbugs/src/java/edu/umd/cs/findbugs/SortedBugCollection.java
use gzip compressing when writing to a .gz suffix
Java
apache-2.0
c8146874a9633a3afd9da1621131c6d67277ac3f
0
apache/isis,apache/isis,apache/isis,apache/isis,apache/isis,apache/isis
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.isis.viewer.wicket.ui.components.scalars.isisapplib; import java.awt.image.BufferedImage; import java.io.ByteArrayInputStream; import java.io.IOException; import java.util.List; import javax.activation.MimeType; import javax.imageio.ImageIO; import org.apache.wicket.AttributeModifier; import org.apache.wicket.Component; import org.apache.wicket.MarkupContainer; import org.apache.wicket.ajax.AjaxRequestTarget; import org.apache.wicket.ajax.markup.html.AjaxLink; import org.apache.wicket.extensions.markup.html.image.resource.ThumbnailImageResource; import org.apache.wicket.markup.html.WebMarkupContainer; import org.apache.wicket.markup.html.basic.Label; import org.apache.wicket.markup.html.form.upload.FileUpload; import org.apache.wicket.markup.html.form.upload.FileUploadField; import org.apache.wicket.markup.html.image.Image; import org.apache.wicket.markup.html.image.NonCachingImage; import org.apache.wicket.markup.html.image.resource.BufferedDynamicImageResource; import org.apache.wicket.markup.html.link.ResourceLink; import org.apache.wicket.model.IModel; import org.apache.wicket.model.Model; import org.apache.wicket.request.resource.IResource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.isis.applib.value.Blob; import org.apache.isis.applib.value.NamedWithMimeType; import org.apache.isis.core.commons.lang.CloseableExtensions; import org.apache.isis.core.metamodel.adapter.ObjectAdapter; import org.apache.isis.viewer.wicket.model.models.ScalarModel; import org.apache.isis.viewer.wicket.ui.components.scalars.ScalarPanelAbstract2; import org.apache.isis.viewer.wicket.ui.components.widgets.bootstrap.FormGroup; import org.apache.isis.viewer.wicket.ui.util.Components; import de.agilecoders.wicket.extensions.markup.html.bootstrap.form.fileinput.BootstrapFileInputField; public abstract class IsisBlobOrClobPanelAbstract<T extends NamedWithMimeType> extends ScalarPanelAbstract2 { private static final long serialVersionUID = 1L; @SuppressWarnings("unused") private static final Logger LOG = LoggerFactory.getLogger(IsisBlobOrClobPanelAbstract.class); private static final String ID_SCALAR_IF_REGULAR = "scalarIfRegular"; private static final String ID_SCALAR_IF_REGULAR_DOWNLOAD = "scalarIfRegularDownload"; private static final String ID_FILE_NAME = "fileName"; private static final String ID_SCALAR_IF_REGULAR_CLEAR = "scalarIfRegularClear"; private static final String ID_SCALAR_NAME = "scalarName"; private static final String ID_SCALAR_VALUE = "scalarValue"; private static final String ID_IMAGE = "scalarImage"; private static final String ID_SCALAR_IF_COMPACT = "scalarIfCompact"; private static final String ID_SCALAR_IF_COMPACT_DOWNLOAD = "scalarIfCompactDownload"; private Image wicketImage; private FileUploadField fileUploadField; private Label fileNameLabel; protected enum InputFieldVisibility { VISIBLE, NOT_VISIBLE } protected enum InputFieldEditability{ EDITABLE, NOT_EDITABLE } @Override protected FormGroup createComponentForRegular() { fileUploadField = createFileUploadField(ID_SCALAR_VALUE); fileUploadField.setLabel(Model.of(getModel().getName())); final FormGroup scalarIfRegularFormGroup = new FormGroup(ID_SCALAR_IF_REGULAR, fileUploadField); scalarIfRegularFormGroup.add(fileUploadField); final Label scalarName = new Label(ID_SCALAR_NAME, getModel().getName()); scalarIfRegularFormGroup.add(scalarName); wicketImage = asWicketImage(ID_IMAGE); if(wicketImage != null) { wicketImage.setOutputMarkupId(true); scalarIfRegularFormGroup.addOrReplace(wicketImage); } else { Components.permanentlyHide(scalarIfRegularFormGroup, ID_IMAGE); } updateFileNameLabel(ID_FILE_NAME, scalarIfRegularFormGroup); updateDownloadLink(ID_SCALAR_IF_REGULAR_DOWNLOAD, scalarIfRegularFormGroup); return scalarIfRegularFormGroup; } protected Component getScalarValueComponent() { return fileUploadField; } // ////////////////////////////////////// /** * Inline prompts are <i>not</i> supported by this component. */ @Override protected InlinePromptConfig getInlinePromptConfig() { return InlinePromptConfig.notSupported(); } // ////////////////////////////////////// @Override protected Component createComponentForCompact() { final MarkupContainer scalarIfCompact = new WebMarkupContainer(ID_SCALAR_IF_COMPACT); MarkupContainer downloadLink = updateDownloadLink(ID_SCALAR_IF_COMPACT_DOWNLOAD, scalarIfCompact); if(downloadLink != null) { updateFileNameLabel("fileNameIfCompact", downloadLink); } return scalarIfCompact; } // ////////////////////////////////////// private Image asWicketImage(String id) { final ObjectAdapter adapter = getModel().getObject(); if(adapter == null) { return null; } final Object object = adapter.getObject(); if(!(object instanceof Blob)) { return null; } final Blob blob = (Blob)object; final MimeType mimeType = blob.getMimeType(); if(mimeType == null || !mimeType.getPrimaryType().equals("image")) { return null; } final BufferedImage image = asBufferedImage(blob); if(image == null) { return null; } final BufferedDynamicImageResource imageResource = new BufferedDynamicImageResource(); imageResource.setImage(image); final ThumbnailImageResource thumbnailImageResource = new ThumbnailImageResource(imageResource, 300); return new NonCachingImage(id, thumbnailImageResource); } private BufferedImage asBufferedImage(final Blob blob) { final byte[] bytes = blob.getBytes(); if(bytes == null) { return null; } final ByteArrayInputStream bais = new ByteArrayInputStream(bytes); try { return ImageIO.read(bais); } catch (IOException ex) { return null; } finally { CloseableExtensions.closeSafely(bais); } } // ////////////////////////////////////// protected void onInitializeWhenViewMode() { updateRegularFormComponents(InputFieldVisibility.VISIBLE, InputFieldEditability.NOT_EDITABLE, null, null); } protected void onInitializeWhenDisabled(final String disableReason) { updateRegularFormComponents(InputFieldVisibility.VISIBLE, InputFieldEditability.NOT_EDITABLE, null, null); } protected void onInitializeWhenEnabled() { updateRegularFormComponents(InputFieldVisibility.VISIBLE, InputFieldEditability.EDITABLE, null, null); } private FileUploadField createFileUploadField(String componentId) { final BootstrapFileInputField fileUploadField = new BootstrapFileInputField(componentId, new IModel<List<FileUpload>>() { private static final long serialVersionUID = 1L; @Override public void setObject(final List<FileUpload> fileUploads) { if (fileUploads == null || fileUploads.isEmpty()) { return; } final T blob = getBlobOrClobFrom(fileUploads); final ObjectAdapter adapter = getPersistenceSession().adapterFor(blob); getModel().setObject(adapter); } @Override public void detach() { } @Override public List<FileUpload> getObject() { return null; } }); fileUploadField.getConfig().showUpload(false).mainClass("input-group-sm"); return fileUploadField; } @Override protected void onDisabled(final String disableReason, final AjaxRequestTarget target) { updateRegularFormComponents(InputFieldVisibility.VISIBLE, InputFieldEditability.NOT_EDITABLE, disableReason, target); } @Override protected void onEnabled(final AjaxRequestTarget target) { updateRegularFormComponents(InputFieldVisibility.VISIBLE, InputFieldEditability.EDITABLE, null, target); } protected abstract T getBlobOrClobFrom(final List<FileUpload> fileUploads); @SuppressWarnings("unchecked") private T getBlobOrClob(final ScalarModel model) { ObjectAdapter adapter = model.getObject(); return adapter != null? (T) adapter.getObject(): null; } public IsisBlobOrClobPanelAbstract(String id, ScalarModel scalarModel) { super(id, scalarModel); } private void updateRegularFormComponents( final InputFieldVisibility visibility, final InputFieldEditability editability, final String disabledReason, final AjaxRequestTarget target) { MarkupContainer formComponent = (MarkupContainer) getComponentForRegular(); sync(formComponent, visibility, editability, disabledReason, target); final Component component = formComponent.get(ID_SCALAR_VALUE); final InputFieldVisibility editingWidgetVisibility = editability == InputFieldEditability.EDITABLE ? InputFieldVisibility.VISIBLE : InputFieldVisibility.NOT_VISIBLE; sync(component, editingWidgetVisibility, null, disabledReason, target); addAcceptFilterTo(component); fileNameLabel = updateFileNameLabel(ID_FILE_NAME, formComponent); updateClearLink(editingWidgetVisibility, null, target); // the visibility of download link is intentionally 'backwards'; // if in edit mode then do NOT show final MarkupContainer downloadLink = updateDownloadLink(ID_SCALAR_IF_REGULAR_DOWNLOAD, formComponent); sync(downloadLink, visibility, editability, disabledReason, target); // ditto any image sync(wicketImage, visibility, editability, disabledReason, target); } private void sync( final Component component, final InputFieldVisibility visibility, final InputFieldEditability editability, final String disabledReason, final AjaxRequestTarget target) { if(component == null) { return; } component.setOutputMarkupId(true); // enable ajax link if(visibility != null) { component.setVisible(visibility == InputFieldVisibility.VISIBLE); if (target != null) { target.add(component); } } if(editability != null) { // // dynamic disablement doesn't yet work, this exception is thrown when form is submitted: // // // // Caused by: java.lang.IllegalStateException: ServletRequest does not contain multipart content. // // One possible solution is to explicitly call Form.setMultipart(true), Wicket tries its best to // // auto-detect multipart forms but there are certain situation where it cannot. // // component.setEnabled(editability == InputFieldEditability.EDITABLE); // // final AttributeModifier title = new AttributeModifier("title", Model.of(disabledReason != null ? disabledReason : "")); // component.add(title); // // if (target != null) { // target.add(component); // } } } private String getAcceptFilter(){ return scalarModel.getFileAccept(); } private void addAcceptFilterTo(Component component){ final String filter = getAcceptFilter(); if(filter==null || filter.isEmpty()) return; // ignore class AcceptAttributeModel extends Model<String> { private static final long serialVersionUID = 1L; @Override public String getObject() { return filter; } } component.add(new AttributeModifier("accept", new AcceptAttributeModel())); } private Label updateFileNameLabel(String idFileName, MarkupContainer formComponent) { class FileNameModel extends Model<String> { private static final long serialVersionUID = 1L; @Override public String getObject() { T blobOrClob = getBlobOrClobFromModel(); String fileName = blobOrClob != null? blobOrClob.getName(): ""; return fileName; } } Label fileNameLabel = new Label(idFileName, new FileNameModel()); formComponent.addOrReplace(fileNameLabel); fileNameLabel.setOutputMarkupId(true); return fileNameLabel; } private void updateClearLink( final InputFieldVisibility visibility, final InputFieldEditability editability, final AjaxRequestTarget target) { final MarkupContainer formComponent = (MarkupContainer) getComponentForRegular(); formComponent.setOutputMarkupId(true); // enable ajax link final AjaxLink<Void> ajaxLink = new AjaxLink<Void>(ID_SCALAR_IF_REGULAR_CLEAR){ private static final long serialVersionUID = 1L; @Override public void onClick(AjaxRequestTarget target) { setEnabled(false); ScalarModel model = IsisBlobOrClobPanelAbstract.this.getModel(); model.setObject(null); target.add(formComponent); target.add(fileNameLabel); } }; ajaxLink.setOutputMarkupId(true); formComponent.addOrReplace(ajaxLink); final T blobOrClob = getBlobOrClobFromModel(); final Component clearButton = formComponent.get(ID_SCALAR_IF_REGULAR_CLEAR); clearButton.setVisible(blobOrClob != null && visibility == InputFieldVisibility.VISIBLE); clearButton.setEnabled(blobOrClob != null); if(target != null) { target.add(formComponent); target.add(clearButton); target.add(ajaxLink); } } private MarkupContainer updateDownloadLink(String downloadId, MarkupContainer container) { final ResourceLink<?> resourceLink = createResourceLink(downloadId); if(resourceLink != null) { container.addOrReplace(resourceLink); } else { Components.permanentlyHide(container, downloadId); } return resourceLink; } private ResourceLink<?> createResourceLink(String id) { final T blob = getBlobOrClobFromModel(); if(blob == null) { return null; } final IResource bar = newResource(blob); return new ResourceLink<Object>(id, bar); } private T getBlobOrClobFromModel() { return getBlobOrClob(getModel()); } /** * Mandatory hook method. */ protected abstract IResource newResource(final T namedWithMimeType); @Override protected String getScalarPanelType() { return "isisBlobPanel"; } }
core/viewer-wicket-ui/src/main/java/org/apache/isis/viewer/wicket/ui/components/scalars/isisapplib/IsisBlobOrClobPanelAbstract.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.isis.viewer.wicket.ui.components.scalars.isisapplib; import java.awt.image.BufferedImage; import java.io.ByteArrayInputStream; import java.io.IOException; import java.util.List; import javax.activation.MimeType; import javax.imageio.ImageIO; import org.apache.wicket.AttributeModifier; import org.apache.wicket.Component; import org.apache.wicket.MarkupContainer; import org.apache.wicket.ajax.AjaxRequestTarget; import org.apache.wicket.ajax.markup.html.AjaxLink; import org.apache.wicket.extensions.markup.html.image.resource.ThumbnailImageResource; import org.apache.wicket.markup.html.WebMarkupContainer; import org.apache.wicket.markup.html.basic.Label; import org.apache.wicket.markup.html.form.upload.FileUpload; import org.apache.wicket.markup.html.form.upload.FileUploadField; import org.apache.wicket.markup.html.image.Image; import org.apache.wicket.markup.html.image.NonCachingImage; import org.apache.wicket.markup.html.image.resource.BufferedDynamicImageResource; import org.apache.wicket.markup.html.link.ResourceLink; import org.apache.wicket.model.IModel; import org.apache.wicket.model.Model; import org.apache.wicket.request.resource.IResource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.isis.applib.value.Blob; import org.apache.isis.applib.value.NamedWithMimeType; import org.apache.isis.core.commons.lang.CloseableExtensions; import org.apache.isis.core.metamodel.adapter.ObjectAdapter; import org.apache.isis.viewer.wicket.model.models.ScalarModel; import org.apache.isis.viewer.wicket.ui.components.scalars.ScalarPanelAbstract2; import org.apache.isis.viewer.wicket.ui.components.widgets.bootstrap.FormGroup; import org.apache.isis.viewer.wicket.ui.util.Components; import de.agilecoders.wicket.extensions.markup.html.bootstrap.form.fileinput.BootstrapFileInputField; public abstract class IsisBlobOrClobPanelAbstract<T extends NamedWithMimeType> extends ScalarPanelAbstract2 { private static final long serialVersionUID = 1L; @SuppressWarnings("unused") private static final Logger LOG = LoggerFactory.getLogger(IsisBlobOrClobPanelAbstract.class); private static final String ID_SCALAR_IF_REGULAR = "scalarIfRegular"; private static final String ID_SCALAR_IF_REGULAR_DOWNLOAD = "scalarIfRegularDownload"; private static final String ID_FILE_NAME = "fileName"; private static final String ID_SCALAR_IF_REGULAR_CLEAR = "scalarIfRegularClear"; private static final String ID_SCALAR_NAME = "scalarName"; private static final String ID_SCALAR_VALUE = "scalarValue"; private static final String ID_IMAGE = "scalarImage"; private static final String ID_SCALAR_IF_COMPACT = "scalarIfCompact"; private static final String ID_SCALAR_IF_COMPACT_DOWNLOAD = "scalarIfCompactDownload"; private Image wicketImage; private FileUploadField fileUploadField; private Label fileNameLabel; protected enum InputFieldVisibility { VISIBLE, NOT_VISIBLE } protected enum InputFieldEditability{ EDITABLE, NOT_EDITABLE } @Override protected FormGroup createComponentForRegular() { fileUploadField = createFileUploadField(ID_SCALAR_VALUE); fileUploadField.setLabel(Model.of(getModel().getName())); final FormGroup scalarIfRegularFormGroup = new FormGroup(ID_SCALAR_IF_REGULAR, fileUploadField); scalarIfRegularFormGroup.add(fileUploadField); final Label scalarName = new Label(ID_SCALAR_NAME, getModel().getName()); scalarIfRegularFormGroup.add(scalarName); wicketImage = asWicketImage(ID_IMAGE); if(wicketImage != null) { wicketImage.setOutputMarkupId(true); scalarIfRegularFormGroup.addOrReplace(wicketImage); } else { Components.permanentlyHide(scalarIfRegularFormGroup, ID_IMAGE); } updateFileNameLabel(ID_FILE_NAME, scalarIfRegularFormGroup); updateDownloadLink(ID_SCALAR_IF_REGULAR_DOWNLOAD, scalarIfRegularFormGroup); return scalarIfRegularFormGroup; } protected Component getScalarValueComponent() { return fileUploadField; } // ////////////////////////////////////// /** * Inline prompts are <i>not</i> supported by this component. */ @Override protected InlinePromptConfig getInlinePromptConfig() { return InlinePromptConfig.notSupported(); } // ////////////////////////////////////// @Override protected Component createComponentForCompact() { final MarkupContainer scalarIfCompact = new WebMarkupContainer(ID_SCALAR_IF_COMPACT); MarkupContainer downloadLink = updateDownloadLink(ID_SCALAR_IF_COMPACT_DOWNLOAD, scalarIfCompact); if(downloadLink != null) { updateFileNameLabel("fileNameIfCompact", downloadLink); } return scalarIfCompact; } // ////////////////////////////////////// private Image asWicketImage(String id) { final ObjectAdapter adapter = getModel().getObject(); if(adapter == null) { return null; } final Object object = adapter.getObject(); if(!(object instanceof Blob)) { return null; } final Blob blob = (Blob)object; final MimeType mimeType = blob.getMimeType(); if(mimeType == null || !mimeType.getPrimaryType().equals("image")) { return null; } final BufferedImage image = asBufferedImage(blob); if(image == null) { return null; } final BufferedDynamicImageResource imageResource = new BufferedDynamicImageResource(); imageResource.setImage(image); final ThumbnailImageResource thumbnailImageResource = new ThumbnailImageResource(imageResource, 300); return new NonCachingImage(id, thumbnailImageResource); } private BufferedImage asBufferedImage(final Blob blob) { final byte[] bytes = blob.getBytes(); if(bytes == null) { return null; } final ByteArrayInputStream bais = new ByteArrayInputStream(bytes); try { return ImageIO.read(bais); } catch (IOException ex) { return null; } finally { CloseableExtensions.closeSafely(bais); } } // ////////////////////////////////////// protected void onInitializeWhenViewMode() { updateRegularFormComponents(InputFieldVisibility.NOT_VISIBLE, null, null, null); } protected void onInitializeWhenDisabled(final String disableReason) { updateRegularFormComponents(InputFieldVisibility.NOT_VISIBLE, null, null, null); } protected void onInitializeWhenEnabled() { updateRegularFormComponents(InputFieldVisibility.VISIBLE, null, null, null); } private FileUploadField createFileUploadField(String componentId) { final BootstrapFileInputField fileUploadField = new BootstrapFileInputField(componentId, new IModel<List<FileUpload>>() { private static final long serialVersionUID = 1L; @Override public void setObject(final List<FileUpload> fileUploads) { if (fileUploads == null || fileUploads.isEmpty()) { return; } final T blob = getBlobOrClobFrom(fileUploads); final ObjectAdapter adapter = getPersistenceSession().adapterFor(blob); getModel().setObject(adapter); } @Override public void detach() { } @Override public List<FileUpload> getObject() { return null; } }); fileUploadField.getConfig().showUpload(false).mainClass("input-group-sm"); return fileUploadField; } @Override protected void onDisabled(final String disableReason, final AjaxRequestTarget target) { updateRegularFormComponents(null, InputFieldEditability.NOT_EDITABLE, disableReason, target); } @Override protected void onEnabled(final AjaxRequestTarget target) { updateRegularFormComponents(null, InputFieldEditability.EDITABLE, null, target); } protected abstract T getBlobOrClobFrom(final List<FileUpload> fileUploads); @SuppressWarnings("unchecked") private T getBlobOrClob(final ScalarModel model) { ObjectAdapter adapter = model.getObject(); return adapter != null? (T) adapter.getObject(): null; } public IsisBlobOrClobPanelAbstract(String id, ScalarModel scalarModel) { super(id, scalarModel); } private void updateRegularFormComponents( final InputFieldVisibility visibility, final InputFieldEditability editability, final String disabledReason, final AjaxRequestTarget target) { MarkupContainer formComponent = (MarkupContainer) getComponentForRegular(); sync(formComponent, visibility, editability, disabledReason, target); final Component component = formComponent.get(ID_SCALAR_VALUE); sync(component, visibility, editability, disabledReason, target); addAcceptFilterTo(component); fileNameLabel = updateFileNameLabel(ID_FILE_NAME, formComponent); updateClearLink(visibility, editability, target); // the visibility of download link is intentionally 'backwards'; // if in edit mode then do NOT show final MarkupContainer downloadLink = updateDownloadLink(ID_SCALAR_IF_REGULAR_DOWNLOAD, formComponent); sync(downloadLink, visibility, editability, disabledReason, target); // ditto any image sync(wicketImage, visibility, editability, disabledReason, target); } private void sync( final Component component, final InputFieldVisibility visibility, final InputFieldEditability editability, final String disabledReason, final AjaxRequestTarget target) { if(component == null) { return; } component.setOutputMarkupId(true); // enable ajax link if(visibility != null) { component.setVisible(visibility == InputFieldVisibility.VISIBLE); if (target != null) { target.add(component); } } if(editability != null) { // dynamic disablement doesn't yet work, this exception is thrown when form is submitted: // // Caused by: java.lang.IllegalStateException: ServletRequest does not contain multipart content. // One possible solution is to explicitly call Form.setMultipart(true), Wicket tries its best to // auto-detect multipart forms but there are certain situation where it cannot. // component.setEnabled(editability == InputFieldEditability.EDITABLE); // // final AttributeModifier title = new AttributeModifier("title", Model.of(disabledReason != null ? disabledReason : "")); // component.add(title); // // if (target != null) { // target.add(component); // } // as a workaround, use VISIBILITY instead. } } private String getAcceptFilter(){ return scalarModel.getFileAccept(); } private void addAcceptFilterTo(Component component){ final String filter = getAcceptFilter(); if(filter==null || filter.isEmpty()) return; // ignore class AcceptAttributeModel extends Model<String> { private static final long serialVersionUID = 1L; @Override public String getObject() { return filter; } } component.add(new AttributeModifier("accept", new AcceptAttributeModel())); } private Label updateFileNameLabel(String idFileName, MarkupContainer formComponent) { class FileNameModel extends Model<String> { private static final long serialVersionUID = 1L; @Override public String getObject() { T blobOrClob = getBlobOrClobFromModel(); String fileName = blobOrClob != null? blobOrClob.getName(): ""; return fileName; } } Label fileNameLabel = new Label(idFileName, new FileNameModel()); formComponent.addOrReplace(fileNameLabel); fileNameLabel.setOutputMarkupId(true); return fileNameLabel; } private void updateClearLink( final InputFieldVisibility visibility, final InputFieldEditability editability, final AjaxRequestTarget target) { final MarkupContainer formComponent = (MarkupContainer) getComponentForRegular(); formComponent.setOutputMarkupId(true); // enable ajax link final AjaxLink<Void> ajaxLink = new AjaxLink<Void>(ID_SCALAR_IF_REGULAR_CLEAR){ private static final long serialVersionUID = 1L; @Override public void onClick(AjaxRequestTarget target) { setEnabled(false); ScalarModel model = IsisBlobOrClobPanelAbstract.this.getModel(); model.setObject(null); target.add(formComponent); target.add(fileNameLabel); } }; ajaxLink.setOutputMarkupId(true); formComponent.addOrReplace(ajaxLink); final T blobOrClob = getBlobOrClobFromModel(); final Component component = formComponent.get(ID_SCALAR_IF_REGULAR_CLEAR); component.setVisible(blobOrClob != null && visibility == InputFieldVisibility.VISIBLE); component.setEnabled(blobOrClob != null && editability == InputFieldEditability.EDITABLE); if(target != null) { target.add(formComponent); target.add(component); target.add(ajaxLink); } } private MarkupContainer updateDownloadLink(String downloadId, MarkupContainer container) { final ResourceLink<?> resourceLink = createResourceLink(downloadId); if(resourceLink != null) { container.addOrReplace(resourceLink); } else { Components.permanentlyHide(container, downloadId); } return resourceLink; } private ResourceLink<?> createResourceLink(String id) { final T blob = getBlobOrClobFromModel(); if(blob == null) { return null; } final IResource bar = newResource(blob); return new ResourceLink<Object>(id, bar); } private T getBlobOrClobFromModel() { return getBlobOrClob(getModel()); } /** * Mandatory hook method. */ protected abstract IResource newResource(final T namedWithMimeType); @Override protected String getScalarPanelType() { return "isisBlobPanel"; } }
ISIS-2001: blob/clob properties now visible once more even if not editable.
core/viewer-wicket-ui/src/main/java/org/apache/isis/viewer/wicket/ui/components/scalars/isisapplib/IsisBlobOrClobPanelAbstract.java
ISIS-2001: blob/clob properties now visible once more even if not editable.
Java
apache-2.0
7f3a93928875e85b8ab5fc965661b85223014bf6
0
apache/airavata,apache/airavata,apache/airavata,apache/airavata,apache/airavata,apache/airavata,apache/airavata,apache/airavata
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.airavata.service.profile.handlers; import org.apache.airavata.common.exception.ApplicationSettingsException; import org.apache.airavata.common.utils.Constants; import org.apache.airavata.common.utils.ServerSettings; import org.apache.airavata.credential.store.client.CredentialStoreClientFactory; import org.apache.airavata.credential.store.cpi.CredentialStoreService; import org.apache.airavata.credential.store.exception.CredentialStoreException; import org.apache.airavata.model.appcatalog.gatewayprofile.GatewayResourceProfile; import org.apache.airavata.model.credential.store.PasswordCredential; import org.apache.airavata.model.error.AuthorizationException; import org.apache.airavata.model.security.AuthzToken; import org.apache.airavata.model.user.UserProfile; import org.apache.airavata.model.workspace.Gateway; import org.apache.airavata.registry.api.RegistryService; import org.apache.airavata.registry.api.client.RegistryServiceClientFactory; import org.apache.airavata.registry.api.exception.RegistryServiceException; import org.apache.airavata.service.profile.iam.admin.services.core.impl.TenantManagementKeycloakImpl; import org.apache.airavata.service.profile.iam.admin.services.cpi.IamAdminServices; import org.apache.airavata.service.profile.iam.admin.services.cpi.exception.IamAdminServicesException; import org.apache.airavata.service.profile.iam.admin.services.cpi.iam_admin_services_cpiConstants; import org.apache.airavata.service.security.interceptor.SecurityCheck; import org.apache.thrift.TException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.List; public class IamAdminServicesHandler implements IamAdminServices.Iface { private final static Logger logger = LoggerFactory.getLogger(IamAdminServicesHandler.class); @Override public String getAPIVersion(AuthzToken authzToken) throws IamAdminServicesException, AuthorizationException { try { return iam_admin_services_cpiConstants.IAM_ADMIN_SERVICES_CPI_VERSION; } catch (Exception ex) { logger.error("Error getting API version, reason: " + ex.getMessage(), ex); IamAdminServicesException exception = new IamAdminServicesException(); exception.setMessage("Error getting API version, reason: " + ex.getMessage()); throw exception; } } @Override @SecurityCheck public Gateway setUpGateway(AuthzToken authzToken, Gateway gateway, PasswordCredential isSuperAdminCredentials) throws IamAdminServicesException, AuthorizationException { TenantManagementKeycloakImpl keycloakclient = new TenantManagementKeycloakImpl(); try{ keycloakclient.addTenant(isSuperAdminCredentials,gateway); if(!keycloakclient.createTenantAdminAccount(isSuperAdminCredentials,gateway)){ logger.error("Admin account creation failed !!, please refer error logs for reason"); } Gateway gatewayWithIdAndSecret = keycloakclient.configureClient(isSuperAdminCredentials,gateway); return gatewayWithIdAndSecret; } catch (IamAdminServicesException ex){ logger.error("Gateway Setup Failed, reason: " + ex.getMessage(), ex); throw ex; } } //ToDo: Will only be secure when using SSL between PGA and Airavata @Override @SecurityCheck public boolean registerUser(AuthzToken authzToken, UserProfile userDetails, PasswordCredential isRealmAdminCredentials, String newPassword) throws IamAdminServicesException, AuthorizationException { TenantManagementKeycloakImpl keycloakclient = new TenantManagementKeycloakImpl(); try{ if(keycloakclient.createUser(isRealmAdminCredentials,userDetails,newPassword)) return true; else return false; } catch (IamAdminServicesException ex){ logger.error("Error while registering user into Identity Server, reason: " + ex.getMessage(), ex); throw ex; } } @Override @SecurityCheck public boolean enableUser(AuthzToken authzToken, UserProfile userDetails, PasswordCredential isRealmAdminCredentials) throws IamAdminServicesException, AuthorizationException { TenantManagementKeycloakImpl keycloakclient = new TenantManagementKeycloakImpl(); try{ if(keycloakclient.enableUserAccount(isRealmAdminCredentials,userDetails)) return true; else return false; } catch (IamAdminServicesException ex){ logger.error("Error while enabling user account, reason: " + ex.getMessage(), ex); throw ex; } } @Override @SecurityCheck public boolean resetUserPassword(AuthzToken authzToken, String tenantId, String username, String newPassword) throws IamAdminServicesException, AuthorizationException, TException { TenantManagementKeycloakImpl keycloakclient = new TenantManagementKeycloakImpl(); try{ if(keycloakclient.resetUserPassword(authzToken.getAccessToken(), tenantId, username, newPassword)) return true; else return false; } catch (IamAdminServicesException ex){ logger.error("Error while resetting user password in Identity Server, reason: " + ex.getMessage(), ex); throw ex; } } @Override @SecurityCheck public List<UserProfile> findUsers(AuthzToken authzToken, String gatewayID, String email, String userId, PasswordCredential isRealmAdminCredentials) throws IamAdminServicesException, AuthorizationException, TException { TenantManagementKeycloakImpl keycloakclient = new TenantManagementKeycloakImpl(); try{ return keycloakclient.findUser(isRealmAdminCredentials,gatewayID,email,userId); } catch (IamAdminServicesException ex){ logger.error("Error while retrieving users from Identity Server, reason: " + ex.getMessage(), ex); throw ex; } } @Override public void updateUserProfile(AuthzToken authzToken, UserProfile userDetails) throws IamAdminServicesException, AuthorizationException, TException { TenantManagementKeycloakImpl keycloakclient = new TenantManagementKeycloakImpl(); try { String username = authzToken.getClaimsMap().get(Constants.USER_NAME); String gatewayId = authzToken.getClaimsMap().get(Constants.GATEWAY_ID); if (!gatewayId.equals(userDetails.getGatewayId())) { throw new IamAdminServicesException("gatewayId in user profile doesn't match authorization token!"); } if (!username.equals(userDetails.getUserId())) { throw new IamAdminServicesException("userId in user profile doesn't match authorization token!"); } GatewayResourceProfile gwrp = getRegistryServiceClient().getGatewayResourceProfile(gatewayId); CredentialStoreService.Client csClient = getCredentialStoreServiceClient(); PasswordCredential credential = csClient.getPasswordCredential(gwrp.getIdentityServerPwdCredToken(), gwrp.getGatewayID()); keycloakclient.updateUserProfile(credential, gatewayId, username, userDetails); } catch (ApplicationSettingsException e) { throw new IamAdminServicesException("Unable to create service clients. Reason: " + e.getMessage()); } } private RegistryService.Client getRegistryServiceClient() throws TException, ApplicationSettingsException { final int serverPort = Integer.parseInt(ServerSettings.getRegistryServerPort()); final String serverHost = ServerSettings.getRegistryServerHost(); try { return RegistryServiceClientFactory.createRegistryClient(serverHost, serverPort); } catch (RegistryServiceException e) { throw new TException("Unable to create registry client...", e); } } private CredentialStoreService.Client getCredentialStoreServiceClient() throws TException, ApplicationSettingsException { final int serverPort = Integer.parseInt(ServerSettings.getCredentialStoreServerPort()); final String serverHost = ServerSettings.getCredentialStoreServerHost(); try { return CredentialStoreClientFactory.createAiravataCSClient(serverHost, serverPort); } catch (CredentialStoreException e) { throw new TException("Unable to create credential store client...", e); } } }
airavata-services/profile-service/profile-service-server/src/main/java/org/apache/airavata/service/profile/handlers/IamAdminServicesHandler.java
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.airavata.service.profile.handlers; import org.apache.airavata.common.exception.ApplicationSettingsException; import org.apache.airavata.common.utils.Constants; import org.apache.airavata.common.utils.ServerSettings; import org.apache.airavata.credential.store.client.CredentialStoreClientFactory; import org.apache.airavata.credential.store.cpi.CredentialStoreService; import org.apache.airavata.credential.store.exception.CredentialStoreException; import org.apache.airavata.model.appcatalog.gatewayprofile.GatewayResourceProfile; import org.apache.airavata.model.credential.store.PasswordCredential; import org.apache.airavata.model.error.AuthorizationException; import org.apache.airavata.model.security.AuthzToken; import org.apache.airavata.model.user.UserProfile; import org.apache.airavata.model.workspace.Gateway; import org.apache.airavata.registry.api.RegistryService; import org.apache.airavata.registry.api.client.RegistryServiceClientFactory; import org.apache.airavata.registry.api.exception.RegistryServiceException; import org.apache.airavata.service.profile.iam.admin.services.core.impl.TenantManagementKeycloakImpl; import org.apache.airavata.service.profile.iam.admin.services.cpi.IamAdminServices; import org.apache.airavata.service.profile.iam.admin.services.cpi.exception.IamAdminServicesException; import org.apache.airavata.service.profile.iam.admin.services.cpi.iam_admin_services_cpiConstants; import org.apache.airavata.service.security.interceptor.SecurityCheck; import org.apache.thrift.TException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.List; public class IamAdminServicesHandler implements IamAdminServices.Iface { private final static Logger logger = LoggerFactory.getLogger(IamAdminServicesHandler.class); @Override public String getAPIVersion(AuthzToken authzToken) throws IamAdminServicesException, AuthorizationException { try { return iam_admin_services_cpiConstants.IAM_ADMIN_SERVICES_CPI_VERSION; } catch (Exception ex) { logger.error("Error getting API version, reason: " + ex.getMessage(), ex); IamAdminServicesException exception = new IamAdminServicesException(); exception.setMessage("Error getting API version, reason: " + ex.getMessage()); throw exception; } } @Override @SecurityCheck public Gateway setUpGateway(AuthzToken authzToken, Gateway gateway, PasswordCredential isSuperAdminCredentials) throws IamAdminServicesException, AuthorizationException { TenantManagementKeycloakImpl keycloakclient = new TenantManagementKeycloakImpl(); try{ keycloakclient.addTenant(isSuperAdminCredentials,gateway); if(!keycloakclient.createTenantAdminAccount(isSuperAdminCredentials,gateway)){ logger.error("Admin account creation failed !!, please refer error logs for reason"); } Gateway gatewayWithIdAndSecret = keycloakclient.configureClient(isSuperAdminCredentials,gateway); return gatewayWithIdAndSecret; } catch (IamAdminServicesException ex){ logger.error("Gateway Setup Failed, reason: " + ex.getMessage(), ex); throw ex; } } //ToDo: Will only be secure when using SSL between PGA and Airavata @Override @SecurityCheck public boolean registerUser(AuthzToken authzToken, UserProfile userDetails, PasswordCredential isRealmAdminCredentials, String newPassword) throws IamAdminServicesException, AuthorizationException { TenantManagementKeycloakImpl keycloakclient = new TenantManagementKeycloakImpl(); try{ if(keycloakclient.createUser(isRealmAdminCredentials,userDetails,newPassword)) return true; else return false; } catch (IamAdminServicesException ex){ logger.error("Error while registering user into Identity Server, reason: " + ex.getMessage(), ex); throw ex; } } @Override @SecurityCheck public boolean enableUser(AuthzToken authzToken, UserProfile userDetails, PasswordCredential isRealmAdminCredentials) throws IamAdminServicesException, AuthorizationException { TenantManagementKeycloakImpl keycloakclient = new TenantManagementKeycloakImpl(); try{ if(keycloakclient.enableUserAccount(isRealmAdminCredentials,userDetails)) return true; else return false; } catch (IamAdminServicesException ex){ logger.error("Error while enabling user account, reason: " + ex.getMessage(), ex); throw ex; } } @Override @SecurityCheck public boolean resetUserPassword(AuthzToken authzToken, String tenantId, String username, String newPassword) throws IamAdminServicesException, AuthorizationException, TException { TenantManagementKeycloakImpl keycloakclient = new TenantManagementKeycloakImpl(); try{ if(keycloakclient.resetUserPassword(authzToken.getAccessToken(), tenantId, username, newPassword)) return true; else return false; } catch (IamAdminServicesException ex){ logger.error("Error while resetting user password in Identity Server, reason: " + ex.getMessage(), ex); throw ex; } } @Override @SecurityCheck public List<UserProfile> findUsers(AuthzToken authzToken, String gatewayID, String email, String userId, PasswordCredential isRealmAdminCredentials) throws IamAdminServicesException, AuthorizationException, TException { TenantManagementKeycloakImpl keycloakclient = new TenantManagementKeycloakImpl(); try{ return keycloakclient.findUser(isRealmAdminCredentials,gatewayID,email,userId); } catch (IamAdminServicesException ex){ logger.error("Error while retrieving users from Identity Server, reason: " + ex.getMessage(), ex); throw ex; } } @Override public void updateUserProfile(AuthzToken authzToken, UserProfile userDetails) throws IamAdminServicesException, AuthorizationException, TException { TenantManagementKeycloakImpl keycloakclient = new TenantManagementKeycloakImpl(); try { String username = authzToken.getClaimsMap().get(Constants.USER_NAME); String gatewayId = authzToken.getClaimsMap().get(Constants.GATEWAY_ID); if (gatewayId.equals(userDetails.getGatewayId())) { throw new IamAdminServicesException("gatewayId in user profile doesn't match authorization token!"); } if (username.equals(userDetails.getUserId())) { throw new IamAdminServicesException("userId in user profile doesn't match authorization token!"); } GatewayResourceProfile gwrp = getRegistryServiceClient().getGatewayResourceProfile(gatewayId); CredentialStoreService.Client csClient = getCredentialStoreServiceClient(); PasswordCredential credential = csClient.getPasswordCredential(gwrp.getIdentityServerPwdCredToken(), gwrp.getGatewayID()); keycloakclient.updateUserProfile(credential, gatewayId, username, userDetails); } catch (ApplicationSettingsException e) { throw new IamAdminServicesException("Unable to create service clients. Reason: " + e.getMessage()); } } private RegistryService.Client getRegistryServiceClient() throws TException, ApplicationSettingsException { final int serverPort = Integer.parseInt(ServerSettings.getRegistryServerPort()); final String serverHost = ServerSettings.getRegistryServerHost(); try { return RegistryServiceClientFactory.createRegistryClient(serverHost, serverPort); } catch (RegistryServiceException e) { throw new TException("Unable to create registry client...", e); } } private CredentialStoreService.Client getCredentialStoreServiceClient() throws TException, ApplicationSettingsException { final int serverPort = Integer.parseInt(ServerSettings.getCredentialStoreServerPort()); final String serverHost = ServerSettings.getCredentialStoreServerHost(); try { return CredentialStoreClientFactory.createAiravataCSClient(serverHost, serverPort); } catch (CredentialStoreException e) { throw new TException("Unable to create credential store client...", e); } } }
AIRAVATA-2408 Fixing authz token claim verification
airavata-services/profile-service/profile-service-server/src/main/java/org/apache/airavata/service/profile/handlers/IamAdminServicesHandler.java
AIRAVATA-2408 Fixing authz token claim verification
Java
apache-2.0
17a023a62366ed4b85fd8f0f9cb41d591169c940
0
tsmgeek/traccar,jon-stumpf/traccar,jon-stumpf/traccar,ninioe/traccar,ninioe/traccar,jssenyange/traccar,jssenyange/traccar,jon-stumpf/traccar,5of9/traccar,orcoliver/traccar,AnshulJain1985/Roadcast-Tracker,ninioe/traccar,5of9/traccar,AnshulJain1985/Roadcast-Tracker,tananaev/traccar,orcoliver/traccar,orcoliver/traccar,tananaev/traccar,jssenyange/traccar,tsmgeek/traccar,tananaev/traccar,tsmgeek/traccar
/* * Copyright 2014 - 2017 Anton Tananaev ([email protected]) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.traccar; import org.traccar.helper.DistanceCalculator; import org.traccar.helper.Log; import org.traccar.helper.UnitsConverter; import org.traccar.model.Position; public class FilterHandler extends BaseDataHandler { private boolean filterInvalid; private boolean filterZero; private boolean filterDuplicate; private long filterFuture; private boolean filterApproximate; private boolean filterStatic; private int filterDistance; private int filterMaxSpeed; private long filterLimit; public void setFilterInvalid(boolean filterInvalid) { this.filterInvalid = filterInvalid; } public void setFilterZero(boolean filterZero) { this.filterZero = filterZero; } public void setFilterDuplicate(boolean filterDuplicate) { this.filterDuplicate = filterDuplicate; } public void setFilterApproximate(boolean filterApproximate) { this.filterApproximate = filterApproximate; } public void setFilterStatic(boolean filterStatic) { this.filterStatic = filterStatic; } public void setFilterDistance(int filterDistance) { this.filterDistance = filterDistance; } public void setFilterLimit(long filterLimit) { this.filterLimit = filterLimit; } public FilterHandler() { Config config = Context.getConfig(); if (config != null) { filterInvalid = config.getBoolean("filter.invalid"); filterZero = config.getBoolean("filter.zero"); filterDuplicate = config.getBoolean("filter.duplicate"); filterFuture = config.getLong("filter.future") * 1000; filterApproximate = config.getBoolean("filter.approximate"); filterStatic = config.getBoolean("filter.static"); filterDistance = config.getInteger("filter.distance"); filterMaxSpeed = config.getInteger("filter.maxSpeed"); filterLimit = config.getLong("filter.limit") * 1000; } } private boolean filterInvalid(Position position) { return filterInvalid && !position.getValid(); } private boolean filterZero(Position position) { return filterZero && position.getLatitude() == 0.0 && position.getLongitude() == 0.0; } private boolean filterDuplicate(Position position, Position last) { return filterDuplicate && last != null && position.getFixTime().equals(last.getFixTime()); } private boolean filterFuture(Position position) { return filterFuture != 0 && position.getFixTime().getTime() > System.currentTimeMillis() + filterFuture; } private boolean filterApproximate(Position position) { return filterApproximate && position.getBoolean(Position.KEY_APPROXIMATE); } private boolean filterStatic(Position position) { return filterStatic && position.getSpeed() == 0.0; } private boolean filterDistance(Position position, Position last) { if (filterDistance != 0 && last != null) { double distance = DistanceCalculator.distance( position.getLatitude(), position.getLongitude(), last.getLatitude(), last.getLongitude()); return distance < filterDistance; } return false; } private boolean filterMaxSpeed(Position position, Position last) { if (filterMaxSpeed != 0 && last != null) { double distance = DistanceCalculator.distance( position.getLatitude(), position.getLongitude(), last.getLatitude(), last.getLongitude()); long time = position.getFixTime().getTime() - last.getFixTime().getTime(); return UnitsConverter.knotsFromMps(distance / time) > filterMaxSpeed; } return false; } private boolean filterLimit(Position position, Position last) { if (filterLimit != 0) { if (last != null) { return (position.getFixTime().getTime() - last.getFixTime().getTime()) > filterLimit; } else { return false; } } else { return false; } } private boolean filter(Position position) { StringBuilder filterType = new StringBuilder(); Position last = null; if (Context.getIdentityManager() != null) { last = Context.getIdentityManager().getLastPosition(position.getDeviceId()); } if (filterInvalid(position)) { filterType.append("Invalid "); } if (filterZero(position)) { filterType.append("Zero "); } if (filterDuplicate(position, last)) { filterType.append("Duplicate "); } if (filterFuture(position)) { filterType.append("Future "); } if (filterApproximate(position)) { filterType.append("Approximate "); } if (filterStatic(position)) { filterType.append("Static "); } if (filterDistance(position, last)) { filterType.append("Distance "); } if (filterMaxSpeed(position, last)) { filterType.append("MaxSpeed "); } if (filterType.length() > 0 && !filterLimit(position, last)) { StringBuilder message = new StringBuilder(); message.append("Position filtered by "); message.append(filterType.toString()); message.append("filters from device: "); message.append(Context.getIdentityManager().getDeviceById(position.getDeviceId()).getUniqueId()); message.append(" with id: "); message.append(position.getDeviceId()); Log.info(message.toString()); return true; } return false; } @Override protected Position handlePosition(Position position) { if (filter(position)) { return null; } return position; } }
src/org/traccar/FilterHandler.java
/* * Copyright 2014 - 2016 Anton Tananaev ([email protected]) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.traccar; import org.traccar.helper.DistanceCalculator; import org.traccar.helper.Log; import org.traccar.model.Position; public class FilterHandler extends BaseDataHandler { private boolean filterInvalid; private boolean filterZero; private boolean filterDuplicate; private boolean filterApproximate; private boolean filterStatic; private int filterDistance; private long filterLimit; private long filterFuture; public void setFilterInvalid(boolean filterInvalid) { this.filterInvalid = filterInvalid; } public void setFilterZero(boolean filterZero) { this.filterZero = filterZero; } public void setFilterDuplicate(boolean filterDuplicate) { this.filterDuplicate = filterDuplicate; } public void setFilterApproximate(boolean filterApproximate) { this.filterApproximate = filterApproximate; } public void setFilterStatic(boolean filterStatic) { this.filterStatic = filterStatic; } public void setFilterDistance(int filterDistance) { this.filterDistance = filterDistance; } public void setFilterLimit(long filterLimit) { this.filterLimit = filterLimit; } public void setFilterFuture(long filterFuture) { this.filterFuture = filterFuture; } public FilterHandler() { Config config = Context.getConfig(); if (config != null) { filterInvalid = config.getBoolean("filter.invalid"); filterZero = config.getBoolean("filter.zero"); filterDuplicate = config.getBoolean("filter.duplicate"); filterApproximate = config.getBoolean("filter.approximate"); filterStatic = config.getBoolean("filter.static"); filterDistance = config.getInteger("filter.distance"); filterLimit = config.getLong("filter.limit") * 1000; filterFuture = config.getLong("filter.future") * 1000; } } private Position getLastPosition(long deviceId) { if (Context.getIdentityManager() != null) { return Context.getIdentityManager().getLastPosition(deviceId); } return null; } private boolean filterInvalid(Position position) { return filterInvalid && !position.getValid(); } private boolean filterZero(Position position) { return filterZero && position.getLatitude() == 0.0 && position.getLongitude() == 0.0; } private boolean filterDuplicate(Position position) { if (filterDuplicate) { Position last = getLastPosition(position.getDeviceId()); if (last != null) { return position.getFixTime().equals(last.getFixTime()); } else { return false; } } else { return false; } } private boolean filterFuture(Position position) { return filterFuture != 0 && position.getFixTime().getTime() > System.currentTimeMillis() + filterFuture; } private boolean filterApproximate(Position position) { return filterApproximate && position.getBoolean(Position.KEY_APPROXIMATE); } private boolean filterStatic(Position position) { return filterStatic && position.getSpeed() == 0.0; } private boolean filterDistance(Position position) { if (filterDistance != 0) { Position last = getLastPosition(position.getDeviceId()); if (last != null) { double distance = DistanceCalculator.distance( position.getLatitude(), position.getLongitude(), last.getLatitude(), last.getLongitude()); return distance < filterDistance; } else { return false; } } else { return false; } } private boolean filterLimit(Position position) { if (filterLimit != 0) { Position last = getLastPosition(position.getDeviceId()); if (last != null) { return (position.getFixTime().getTime() - last.getFixTime().getTime()) > filterLimit; } else { return false; } } else { return false; } } private boolean filter(Position position) { StringBuilder filterType = new StringBuilder(); if (filterInvalid(position)) { filterType.append("Invalid "); } if (filterZero(position)) { filterType.append("Zero "); } if (filterDuplicate(position)) { filterType.append("Duplicate "); } if (filterFuture(position)) { filterType.append("Future "); } if (filterApproximate(position)) { filterType.append("Approximate "); } if (filterStatic(position)) { filterType.append("Static "); } if (filterDistance(position)) { filterType.append("Distance "); } if (filterType.length() > 0 && !filterLimit(position)) { StringBuilder message = new StringBuilder(); message.append("Position filtered by "); message.append(filterType.toString()); message.append("filters from device: "); message.append(Context.getIdentityManager().getDeviceById(position.getDeviceId()).getUniqueId()); message.append(" with id: "); message.append(position.getDeviceId()); Log.info(message.toString()); return true; } return false; } @Override protected Position handlePosition(Position position) { if (filter(position)) { return null; } return position; } }
Implement max speed filtering
src/org/traccar/FilterHandler.java
Implement max speed filtering
Java
apache-2.0
e7b8bd4ab0bea1fefa3f70953cdeafc9d4524aef
0
Apache9/hbase,SeekerResource/hbase,gustavoanatoly/hbase,lshmouse/hbase,lshmouse/hbase,Guavus/hbase,infospace/hbase,ultratendency/hbase,Eshcar/hbase,juwi/hbase,amyvmiwei/hbase,justintung/hbase,intel-hadoop/hbase-rhino,amyvmiwei/hbase,ibmsoe/hbase,lshmouse/hbase,ultratendency/hbase,Eshcar/hbase,ChinmaySKulkarni/hbase,JingchengDu/hbase,bijugs/hbase,vincentpoon/hbase,ndimiduk/hbase,juwi/hbase,bijugs/hbase,ibmsoe/hbase,gustavoanatoly/hbase,SeekerResource/hbase,ChinmaySKulkarni/hbase,andrewmains12/hbase,Apache9/hbase,mahak/hbase,Eshcar/hbase,joshelser/hbase,apurtell/hbase,bijugs/hbase,Guavus/hbase,drewpope/hbase,andrewmains12/hbase,narendragoyal/hbase,Apache9/hbase,ibmsoe/hbase,lshmouse/hbase,ultratendency/hbase,ultratendency/hbase,bijugs/hbase,ChinmaySKulkarni/hbase,mahak/hbase,vincentpoon/hbase,justintung/hbase,SeekerResource/hbase,lshmouse/hbase,infospace/hbase,juwi/hbase,Eshcar/hbase,JingchengDu/hbase,apurtell/hbase,juwi/hbase,andrewmains12/hbase,joshelser/hbase,intel-hadoop/hbase-rhino,HubSpot/hbase,bijugs/hbase,apurtell/hbase,joshelser/hbase,francisliu/hbase,apurtell/hbase,narendragoyal/hbase,justintung/hbase,narendragoyal/hbase,toshimasa-nasu/hbase,StackVista/hbase,juwi/hbase,andrewmains12/hbase,ibmsoe/hbase,StackVista/hbase,juwi/hbase,drewpope/hbase,andrewmains12/hbase,francisliu/hbase,ultratendency/hbase,ChinmaySKulkarni/hbase,Guavus/hbase,justintung/hbase,francisliu/hbase,gustavoanatoly/hbase,Guavus/hbase,francisliu/hbase,justintung/hbase,bijugs/hbase,mahak/hbase,JingchengDu/hbase,drewpope/hbase,infospace/hbase,narendragoyal/hbase,joshelser/hbase,ndimiduk/hbase,HubSpot/hbase,francisliu/hbase,toshimasa-nasu/hbase,StackVista/hbase,ChinmaySKulkarni/hbase,Eshcar/hbase,HubSpot/hbase,Apache9/hbase,francisliu/hbase,drewpope/hbase,Apache9/hbase,intel-hadoop/hbase-rhino,mapr/hbase,infospace/hbase,intel-hadoop/hbase-rhino,Eshcar/hbase,toshimasa-nasu/hbase,amyvmiwei/hbase,narendragoyal/hbase,Guavus/hbase,StackVista/hbase,joshelser/hbase,ultratendency/hbase,JingchengDu/hbase,joshelser/hbase,drewpope/hbase,Apache9/hbase,drewpope/hbase,apurtell/hbase,intel-hadoop/hbase-rhino,joshelser/hbase,Eshcar/hbase,HubSpot/hbase,infospace/hbase,ndimiduk/hbase,francisliu/hbase,StackVista/hbase,ibmsoe/hbase,ultratendency/hbase,drewpope/hbase,ultratendency/hbase,HubSpot/hbase,ndimiduk/hbase,Guavus/hbase,vincentpoon/hbase,apurtell/hbase,andrewmains12/hbase,StackVista/hbase,ChinmaySKulkarni/hbase,Eshcar/hbase,ibmsoe/hbase,amyvmiwei/hbase,mahak/hbase,toshimasa-nasu/hbase,mapr/hbase,Eshcar/hbase,gustavoanatoly/hbase,JingchengDu/hbase,vincentpoon/hbase,lshmouse/hbase,andrewmains12/hbase,mahak/hbase,infospace/hbase,vincentpoon/hbase,HubSpot/hbase,Guavus/hbase,ibmsoe/hbase,apurtell/hbase,toshimasa-nasu/hbase,mapr/hbase,HubSpot/hbase,intel-hadoop/hbase-rhino,gustavoanatoly/hbase,ibmsoe/hbase,joshelser/hbase,mahak/hbase,intel-hadoop/hbase-rhino,Guavus/hbase,amyvmiwei/hbase,lshmouse/hbase,Apache9/hbase,justintung/hbase,justintung/hbase,lshmouse/hbase,andrewmains12/hbase,JingchengDu/hbase,juwi/hbase,drewpope/hbase,gustavoanatoly/hbase,toshimasa-nasu/hbase,ndimiduk/hbase,drewpope/hbase,apurtell/hbase,mapr/hbase,StackVista/hbase,francisliu/hbase,francisliu/hbase,bijugs/hbase,narendragoyal/hbase,ndimiduk/hbase,andrewmains12/hbase,amyvmiwei/hbase,SeekerResource/hbase,ChinmaySKulkarni/hbase,HubSpot/hbase,SeekerResource/hbase,ndimiduk/hbase,infospace/hbase,intel-hadoop/hbase-rhino,ChinmaySKulkarni/hbase,ndimiduk/hbase,StackVista/hbase,gustavoanatoly/hbase,StackVista/hbase,apurtell/hbase,mahak/hbase,StackVista/hbase,ibmsoe/hbase,narendragoyal/hbase,mapr/hbase,lshmouse/hbase,ndimiduk/hbase,JingchengDu/hbase,amyvmiwei/hbase,ibmsoe/hbase,amyvmiwei/hbase,vincentpoon/hbase,narendragoyal/hbase,francisliu/hbase,gustavoanatoly/hbase,intel-hadoop/hbase-rhino,HubSpot/hbase,Apache9/hbase,Eshcar/hbase,gustavoanatoly/hbase,ndimiduk/hbase,SeekerResource/hbase,mapr/hbase,lshmouse/hbase,narendragoyal/hbase,mahak/hbase,toshimasa-nasu/hbase,bijugs/hbase,Apache9/hbase,ChinmaySKulkarni/hbase,justintung/hbase,ChinmaySKulkarni/hbase,mahak/hbase,mapr/hbase,JingchengDu/hbase,justintung/hbase,ultratendency/hbase,SeekerResource/hbase,mahak/hbase,apurtell/hbase,amyvmiwei/hbase,vincentpoon/hbase,ultratendency/hbase,JingchengDu/hbase,intel-hadoop/hbase-rhino,andrewmains12/hbase,mapr/hbase,joshelser/hbase,amyvmiwei/hbase,SeekerResource/hbase,Apache9/hbase,mapr/hbase,joshelser/hbase,juwi/hbase,HubSpot/hbase,bijugs/hbase,vincentpoon/hbase,toshimasa-nasu/hbase,Guavus/hbase,juwi/hbase,SeekerResource/hbase,Guavus/hbase,toshimasa-nasu/hbase,bijugs/hbase,vincentpoon/hbase,gustavoanatoly/hbase,narendragoyal/hbase,vincentpoon/hbase,SeekerResource/hbase,JingchengDu/hbase,justintung/hbase
/* * Copyright 2010 The Apache Software Foundation * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.client; import com.google.common.collect.Ordering; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.SplitKeyValue; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.WritableWithSize; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.Writable; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.NavigableMap; import java.util.TreeMap; /** * Single row result of a {@link Get} or {@link Scan} query.<p> * * This class is NOT THREAD SAFE.<p> * * Convenience methods are available that return various {@link Map} * structures and values directly.<p> * * To get a complete mapping of all cells in the Result, which can include * multiple families and multiple versions, use {@link #getMap()}.<p> * * To get a mapping of each family to its columns (qualifiers and values), * including only the latest version of each, use {@link #getNoVersionMap()}. * * To get a mapping of qualifiers to latest values for an individual family use * {@link #getFamilyMap(byte[])}.<p> * * To get the latest value for a specific family and qualifier use {@link #getValue(byte[], byte[])}. * * A Result is backed by an array of {@link KeyValue} objects, each representing * an HBase cell defined by the row, family, qualifier, timestamp, and value.<p> * * The underlying {@link KeyValue} objects can be accessed through the methods * {@link #sorted()} and {@link #list()}. Each KeyValue can then be accessed * through {@link KeyValue#getRow()}, {@link KeyValue#getFamily()}, {@link KeyValue#getQualifier()}, * {@link KeyValue#getTimestamp()}, and {@link KeyValue#getValue()}. */ public class Result implements Writable, WritableWithSize { private static final byte RESULT_VERSION = (byte)1; private KeyValue [] kvs = null; private NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> familyMap = null; // We're not using java serialization. Transient here is just a marker to say // that this is where we cache row if we're ever asked for it. private transient byte [] row = null; private ImmutableBytesWritable bytes = null; /** * Constructor used for Writable. */ public Result() {} /** * Instantiate a Result with the specified array of KeyValues. * @param kvs array of KeyValues */ public Result(KeyValue [] kvs) { if(kvs != null && kvs.length > 0) { this.kvs = kvs; } } /** * Instantiate a Result with the specified List of KeyValues. * @param kvs List of KeyValues */ public Result(List<KeyValue> kvs) { this(kvs.toArray(new KeyValue[0])); } /** * Instantiate a Result from the specified raw binary format. * @param bytes raw binary format of Result */ public Result(ImmutableBytesWritable bytes) { this.bytes = bytes; } /** * Method for retrieving the row that this result is for * @return row */ public byte [] getRow() { if (this.row == null) { if(this.kvs == null) { readFields(); } this.row = this.kvs.length == 0? null: this.kvs[0].getRow(); } return this.row; } /** * Return the array of KeyValues backing this Result instance. * * The array is sorted from smallest -> largest using the * {@link KeyValue#COMPARATOR}. * * The array only contains what your Get or Scan specifies and no more. * For example if you request column "A" 1 version you will have at most 1 * KeyValue in the array. If you request column "A" with 2 version you will * have at most 2 KeyValues, with the first one being the newer timestamp and * the second being the older timestamp (this is the sort order defined by * {@link KeyValue#COMPARATOR}). If columns don't exist, they won't be * present in the result. Therefore if you ask for 1 version all columns, * it is safe to iterate over this array and expect to see 1 KeyValue for * each column and no more. * * This API is faster than using getFamilyMap() and getMap() * * @return array of KeyValues */ public KeyValue[] raw() { if(this.kvs == null) { readFields(); } return kvs; } /** * Create a sorted list of the KeyValue's in this result. * * Since HBase 0.20.5 this is equivalent to raw(). * * @return The sorted list of KeyValue's. */ public List<KeyValue> list() { if(this.kvs == null) { readFields(); } return isEmpty()? null: Arrays.asList(raw()); } /** * Returns a sorted array of KeyValues in this Result. * <p> * Since HBase 0.20.5 this is equivalent to {@link #raw}. Use * {@link #raw} instead. * * @return sorted array of KeyValues * @deprecated */ public KeyValue[] sorted() { return raw(); // side effect of loading this.kvs } /** * Return the KeyValues for the specific column. The KeyValues are sorted in * the {@link KeyValue#COMPARATOR} order. That implies the first entry in * the list is the most recent column. If the query (Scan or Get) only * requested 1 version the list will contain at most 1 entry. If the column * did not exist in the result set (either the column does not exist * or the column was not selected in the query) the list will be empty. * * Also see getColumnLatest which returns just a KeyValue * * @param family the family * @param qualifier * @return a list of KeyValues for this column or empty list if the column * did not exist in the result set */ public List<KeyValue> getColumn(byte [] family, byte [] qualifier) { List<KeyValue> result = new ArrayList<KeyValue>(); KeyValue [] kvs = raw(); if (kvs == null || kvs.length == 0) { return result; } int pos = binarySearch(kvs, family, qualifier); if (pos == -1) { return result; // cant find it } for (int i = pos ; i < kvs.length ; i++ ) { KeyValue kv = kvs[i]; if (kv.matchingColumn(family,qualifier)) { result.add(kv); } else { break; } } return result; } protected int binarySearch(final KeyValue [] kvs, final byte [] family, final byte [] qualifier) { KeyValue searchTerm = KeyValue.createFirstOnRow(kvs[0].getRow(), family, qualifier); // pos === ( -(insertion point) - 1) int pos = Arrays.binarySearch(kvs, searchTerm, KeyValue.COMPARATOR); // never will exact match if (pos < 0) { pos = (pos+1) * -1; // pos is now insertion point } if (pos == kvs.length) { return -1; // doesn't exist } return pos; } /** * The KeyValue for the most recent for a given column. If the column does * not exist in the result set - if it wasn't selected in the query (Get/Scan) * or just does not exist in the row the return value is null. * * @param family * @param qualifier * @return KeyValue for the column or null */ public KeyValue getColumnLatest(byte [] family, byte [] qualifier) { KeyValue [] kvs = raw(); // side effect possibly. if (kvs == null || kvs.length == 0) { return null; } int pos = binarySearch(kvs, family, qualifier); if (pos == -1) { return null; } KeyValue kv = kvs[pos]; if (kv.matchingColumn(family, qualifier)) { return kv; } return null; } /** * Get the latest version of the specified column. * @param family family name * @param qualifier column qualifier * @return value of latest version of column, null if none found */ public byte[] getValue(byte [] family, byte [] qualifier) { KeyValue kv = getColumnLatest(family, qualifier); if (kv == null) { return null; } return kv.getValue(); } /** * Checks for existence of the specified column. * @param family family name * @param qualifier column qualifier * @return true if at least one value exists in the result, false if not */ public boolean containsColumn(byte [] family, byte [] qualifier) { KeyValue kv = getColumnLatest(family, qualifier); return kv != null; } /** * Map of families to all versions of its qualifiers and values. * <p> * Returns a three level Map of the form: * <code>Map&amp;family,Map&lt;qualifier,Map&lt;timestamp,value>>></code> * <p> * Note: All other map returning methods make use of this map internally. * @return map from families to qualifiers to versions */ public NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> getMap() { if(this.familyMap != null) { return this.familyMap; } if(isEmpty()) { return null; } this.familyMap = new TreeMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> (Bytes.BYTES_COMPARATOR); for(KeyValue kv : this.kvs) { SplitKeyValue splitKV = kv.split(); byte [] family = splitKV.getFamily(); NavigableMap<byte[], NavigableMap<Long, byte[]>> columnMap = familyMap.get(family); if(columnMap == null) { columnMap = new TreeMap<byte[], NavigableMap<Long, byte[]>> (Bytes.BYTES_COMPARATOR); familyMap.put(family, columnMap); } byte [] qualifier = splitKV.getQualifier(); NavigableMap<Long, byte[]> versionMap = columnMap.get(qualifier); if(versionMap == null) { versionMap = new TreeMap<Long, byte[]>(new Comparator<Long>() { public int compare(Long l1, Long l2) { return l2.compareTo(l1); } }); columnMap.put(qualifier, versionMap); } Long timestamp = Bytes.toLong(splitKV.getTimestamp()); byte [] value = splitKV.getValue(); versionMap.put(timestamp, value); } return this.familyMap; } /** * Map of families to their most recent qualifiers and values. * <p> * Returns a two level Map of the form: <code>Map&amp;family,Map&lt;qualifier,value>></code> * <p> * The most recent version of each qualifier will be used. * @return map from families to qualifiers and value */ public NavigableMap<byte[], NavigableMap<byte[], byte[]>> getNoVersionMap() { if(this.familyMap == null) { getMap(); } if(isEmpty()) { return null; } NavigableMap<byte[], NavigableMap<byte[], byte[]>> returnMap = new TreeMap<byte[], NavigableMap<byte[], byte[]>>(Bytes.BYTES_COMPARATOR); for(Map.Entry<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> familyEntry : familyMap.entrySet()) { NavigableMap<byte[], byte[]> qualifierMap = new TreeMap<byte[], byte[]>(Bytes.BYTES_COMPARATOR); for(Map.Entry<byte[], NavigableMap<Long, byte[]>> qualifierEntry : familyEntry.getValue().entrySet()) { byte [] value = qualifierEntry.getValue().get(qualifierEntry.getValue().firstKey()); qualifierMap.put(qualifierEntry.getKey(), value); } returnMap.put(familyEntry.getKey(), qualifierMap); } return returnMap; } /** * Map of qualifiers to values. * <p> * Returns a Map of the form: <code>Map&lt;qualifier,value></code> * @param family column family to get * @return map of qualifiers to values */ public NavigableMap<byte[], byte[]> getFamilyMap(byte [] family) { if(this.familyMap == null) { getMap(); } if(isEmpty()) { return null; } NavigableMap<byte[], byte[]> returnMap = new TreeMap<byte[], byte[]>(Bytes.BYTES_COMPARATOR); NavigableMap<byte[], NavigableMap<Long, byte[]>> qualifierMap = familyMap.get(family); if(qualifierMap == null) { return returnMap; } for(Map.Entry<byte[], NavigableMap<Long, byte[]>> entry : qualifierMap.entrySet()) { byte [] value = entry.getValue().get(entry.getValue().firstKey()); returnMap.put(entry.getKey(), value); } return returnMap; } private Map.Entry<Long,byte[]> getKeyValue(byte[] family, byte[] qualifier) { if(this.familyMap == null) { getMap(); } if(isEmpty()) { return null; } NavigableMap<byte [], NavigableMap<Long, byte[]>> qualifierMap = familyMap.get(family); if(qualifierMap == null) { return null; } NavigableMap<Long, byte[]> versionMap = getVersionMap(qualifierMap, qualifier); if(versionMap == null) { return null; } return versionMap.firstEntry(); } private NavigableMap<Long, byte[]> getVersionMap( NavigableMap<byte [], NavigableMap<Long, byte[]>> qualifierMap, byte [] qualifier) { return qualifier != null? qualifierMap.get(qualifier): qualifierMap.get(new byte[0]); } /** * Returns the value of the first column in the Result. * @return value of the first column */ public byte [] value() { if (isEmpty()) { return null; } return kvs[0].getValue(); } /** * Returns the raw binary encoding of this Result.<p> * * Please note, there may be an offset into the underlying byte array of the * returned ImmutableBytesWritable. Be sure to use both * {@link ImmutableBytesWritable#get()} and {@link ImmutableBytesWritable#getOffset()} * @return pointer to raw binary of Result */ public ImmutableBytesWritable getBytes() { return this.bytes; } /** * Check if the underlying KeyValue [] is empty or not * @return true if empty */ public boolean isEmpty() { if(this.kvs == null) { readFields(); } return this.kvs == null || this.kvs.length == 0; } /** * @return the size of the underlying KeyValue [] */ public int size() { if(this.kvs == null) { readFields(); } return this.kvs == null? 0: this.kvs.length; } /** * @return String */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("keyvalues="); if(isEmpty()) { sb.append("NONE"); return sb.toString(); } sb.append("{"); boolean moreThanOne = false; for(KeyValue kv : this.kvs) { if(moreThanOne) { sb.append(", "); } else { moreThanOne = true; } sb.append(kv.toString()); } sb.append("}"); return sb.toString(); } //Writable public void readFields(final DataInput in) throws IOException { familyMap = null; row = null; kvs = null; int totalBuffer = in.readInt(); if(totalBuffer == 0) { bytes = null; return; } byte [] raw = new byte[totalBuffer]; in.readFully(raw, 0, totalBuffer); bytes = new ImmutableBytesWritable(raw, 0, totalBuffer); } //Create KeyValue[] when needed private void readFields() { if (bytes == null) { this.kvs = new KeyValue[0]; return; } byte [] buf = bytes.get(); int offset = bytes.getOffset(); int finalOffset = bytes.getSize() + offset; List<KeyValue> kvs = new ArrayList<KeyValue>(); while(offset < finalOffset) { int keyLength = Bytes.toInt(buf, offset); offset += Bytes.SIZEOF_INT; kvs.add(new KeyValue(buf, offset, keyLength)); offset += keyLength; } this.kvs = kvs.toArray(new KeyValue[kvs.size()]); } public long getWritableSize() { if (isEmpty()) return Bytes.SIZEOF_INT; // int size = 0 long size = Bytes.SIZEOF_INT; // totalLen for (KeyValue kv : kvs) { size += kv.getLength(); size += Bytes.SIZEOF_INT; // kv.getLength } return size; } public void write(final DataOutput out) throws IOException { if(isEmpty()) { out.writeInt(0); } else { int totalLen = 0; for(KeyValue kv : kvs) { totalLen += kv.getLength() + Bytes.SIZEOF_INT; } out.writeInt(totalLen); for(KeyValue kv : kvs) { out.writeInt(kv.getLength()); out.write(kv.getBuffer(), kv.getOffset(), kv.getLength()); } } } public static long getWriteArraySize(Result [] results) { long size = Bytes.SIZEOF_BYTE; // RESULT_VERSION if (results == null || results.length == 0) { size += Bytes.SIZEOF_INT; return size; } size += Bytes.SIZEOF_INT; // results.length size += Bytes.SIZEOF_INT; // bufLen for (Result result : results) { size += Bytes.SIZEOF_INT; // either 0 or result.size() if (result == null || result.isEmpty()) continue; for (KeyValue kv : result.raw()) { size += Bytes.SIZEOF_INT; // kv.getLength(); size += kv.getLength(); } } return size; } public static void writeArray(final DataOutput out, Result [] results) throws IOException { // Write version when writing array form. // This assumes that results are sent to the client as Result[], so we // have an opportunity to handle version differences without affecting // efficiency. out.writeByte(RESULT_VERSION); if(results == null || results.length == 0) { out.writeInt(0); return; } out.writeInt(results.length); int bufLen = 0; for(Result result : results) { bufLen += Bytes.SIZEOF_INT; if(result == null || result.isEmpty()) { continue; } for(KeyValue key : result.raw()) { bufLen += key.getLength() + Bytes.SIZEOF_INT; } } out.writeInt(bufLen); for(Result result : results) { if(result == null || result.isEmpty()) { out.writeInt(0); continue; } out.writeInt(result.size()); for(KeyValue kv : result.raw()) { out.writeInt(kv.getLength()); out.write(kv.getBuffer(), kv.getOffset(), kv.getLength()); } } } public static Result [] readArray(final DataInput in) throws IOException { // Read version for array form. // This assumes that results are sent to the client as Result[], so we // have an opportunity to handle version differences without affecting // efficiency. int version = in.readByte(); if (version > RESULT_VERSION) { throw new IOException("version not supported"); } int numResults = in.readInt(); if(numResults == 0) { return new Result[0]; } Result [] results = new Result[numResults]; int bufSize = in.readInt(); byte [] buf = new byte[bufSize]; int offset = 0; for(int i=0;i<numResults;i++) { int numKeys = in.readInt(); offset += Bytes.SIZEOF_INT; if(numKeys == 0) { results[i] = new Result((ImmutableBytesWritable)null); continue; } int initialOffset = offset; for(int j=0;j<numKeys;j++) { int keyLen = in.readInt(); Bytes.putInt(buf, offset, keyLen); offset += Bytes.SIZEOF_INT; in.readFully(buf, offset, keyLen); offset += keyLen; } int totalLength = offset - initialOffset; results[i] = new Result(new ImmutableBytesWritable(buf, initialOffset, totalLength)); } return results; } /** * Does a deep comparison of two Results, down to the byte arrays. * @param res1 first result to compare * @param res2 second result to compare * @throws Exception Every difference is throwing an exception */ public static void compareResults(Result res1, Result res2) throws Exception { if (res2 == null) { throw new Exception("There wasn't enough rows, we stopped at " + Bytes.toString(res1.getRow())); } if (res1.size() != res2.size()) { throw new Exception("This row doesn't have the same number of KVs: " + res1.toString() + " compared to " + res2.toString()); } KeyValue[] ourKVs = res1.sorted(); KeyValue[] replicatedKVs = res2.sorted(); for (int i = 0; i < res1.size(); i++) { if (!ourKVs[i].equals(replicatedKVs[i]) && !Bytes.equals(ourKVs[i].getValue(), replicatedKVs[i].getValue())) { throw new Exception("This result was different: " + res1.toString() + " compared to " + res2.toString()); } } } }
src/main/java/org/apache/hadoop/hbase/client/Result.java
/* * Copyright 2010 The Apache Software Foundation * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.client; import com.google.common.collect.Ordering; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.SplitKeyValue; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.WritableWithSize; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.Writable; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.NavigableMap; import java.util.TreeMap; /** * Single row result of a {@link Get} or {@link Scan} query.<p> * * This class is NOT THREAD SAFE.<p> * * Convenience methods are available that return various {@link Map} * structures and values directly.<p> * * To get a complete mapping of all cells in the Result, which can include * multiple families and multiple versions, use {@link #getMap()}.<p> * * To get a mapping of each family to its columns (qualifiers and values), * including only the latest version of each, use {@link #getNoVersionMap()}. * * To get a mapping of qualifiers to latest values for an individual family use * {@link #getFamilyMap(byte[])}.<p> * * To get the latest value for a specific family and qualifier use {@link #getValue(byte[], byte[])}. * * A Result is backed by an array of {@link KeyValue} objects, each representing * an HBase cell defined by the row, family, qualifier, timestamp, and value.<p> * * The underlying {@link KeyValue} objects can be accessed through the methods * {@link #sorted()} and {@link #list()}. Each KeyValue can then be accessed * through {@link KeyValue#getRow()}, {@link KeyValue#getFamily()}, {@link KeyValue#getQualifier()}, * {@link KeyValue#getTimestamp()}, and {@link KeyValue#getValue()}. */ public class Result implements Writable, WritableWithSize { private static final byte RESULT_VERSION = (byte)1; private KeyValue [] kvs = null; private NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> familyMap = null; // We're not using java serialization. Transient here is just a marker to say // that this is where we cache row if we're ever asked for it. private transient byte [] row = null; private ImmutableBytesWritable bytes = null; /** * Constructor used for Writable. */ public Result() {} /** * Instantiate a Result with the specified array of KeyValues. * @param kvs array of KeyValues */ public Result(KeyValue [] kvs) { if(kvs != null && kvs.length > 0) { this.kvs = kvs; } } /** * Instantiate a Result with the specified List of KeyValues. * @param kvs List of KeyValues */ public Result(List<KeyValue> kvs) { this(kvs.toArray(new KeyValue[0])); } /** * Instantiate a Result from the specified raw binary format. * @param bytes raw binary format of Result */ public Result(ImmutableBytesWritable bytes) { this.bytes = bytes; } /** * Method for retrieving the row that this result is for * @return row */ public byte [] getRow() { if (this.row == null) { if(this.kvs == null) { readFields(); } this.row = this.kvs.length == 0? null: this.kvs[0].getRow(); } return this.row; } /** * Return the array of KeyValues backing this Result instance. * * The array is sorted from smallest -> largest using the * {@link KeyValue#COMPARATOR}. * * The array only contains what your Get or Scan specifies and no more. * For example if you request column "A" 1 version you will have at most 1 * KeyValue in the array. If you request column "A" with 2 version you will * have at most 2 KeyValues, with the first one being the newer timestamp and * the second being the older timestamp (this is the sort order defined by * {@link KeyValue#COMPARATOR}). If columns don't exist, they won't be * present in the result. Therefore if you ask for 1 version all columns, * it is safe to iterate over this array and expect to see 1 KeyValue for * each column and no more. * * This API is faster than using getFamilyMap() and getMap() * * @return array of KeyValues */ public KeyValue[] raw() { if(this.kvs == null) { readFields(); } return kvs; } /** * Create a sorted list of the KeyValue's in this result. * * Since HBase 0.20.5 this is equivalent to raw(). * * @return The sorted list of KeyValue's. */ public List<KeyValue> list() { if(this.kvs == null) { readFields(); } return isEmpty()? null: Arrays.asList(raw()); } /** * Returns a sorted array of KeyValues in this Result. * <p> * Since HBase 0.20.5 this is equivalent to {@link #raw}. Use * {@link #raw} instead. * * @return sorted array of KeyValues * @deprecated */ public KeyValue[] sorted() { return raw(); // side effect of loading this.kvs } /** * Return the KeyValues for the specific column. The KeyValues are sorted in * the {@link KeyValue#COMPARATOR} order. That implies the first entry in * the list is the most recent column. If the query (Scan or Get) only * requested 1 version the list will contain at most 1 entry. If the column * did not exist in the result set (either the column does not exist * or the column was not selected in the query) the list will be empty. * * Also see getColumnLatest which returns just a KeyValue * * @param family the family * @param qualifier * @return a list of KeyValues for this column or empty list if the column * did not exist in the result set */ public List<KeyValue> getColumn(byte [] family, byte [] qualifier) { List<KeyValue> result = new ArrayList<KeyValue>(); KeyValue [] kvs = raw(); if (kvs == null || kvs.length == 0) { return result; } int pos = binarySearch(kvs, family, qualifier); if (pos == -1) { return result; // cant find it } for (int i = pos ; i < kvs.length ; i++ ) { KeyValue kv = kvs[i]; if (kv.matchingColumn(family,qualifier)) { result.add(kv); } else { break; } } return result; } protected int binarySearch(final KeyValue [] kvs, final byte [] family, final byte [] qualifier) { KeyValue searchTerm = KeyValue.createFirstOnRow(kvs[0].getRow(), family, qualifier); // pos === ( -(insertion point) - 1) int pos = Arrays.binarySearch(kvs, searchTerm, KeyValue.COMPARATOR); // never will exact match if (pos < 0) { pos = (pos+1) * -1; // pos is now insertion point } if (pos == kvs.length) { return -1; // doesn't exist } return pos; } /** * The KeyValue for the most recent for a given column. If the column does * not exist in the result set - if it wasn't selected in the query (Get/Scan) * or just does not exist in the row the return value is null. * * @param family * @param qualifier * @return KeyValue for the column or null */ public KeyValue getColumnLatest(byte [] family, byte [] qualifier) { KeyValue [] kvs = raw(); // side effect possibly. if (kvs == null || kvs.length == 0) { return null; } int pos = binarySearch(kvs, family, qualifier); if (pos == -1) { return null; } KeyValue kv = kvs[pos]; if (kv.matchingColumn(family, qualifier)) { return kv; } return null; } /** * Get the latest version of the specified column. * @param family family name * @param qualifier column qualifier * @return value of latest version of column, null if none found */ public byte[] getValue(byte [] family, byte [] qualifier) { KeyValue kv = getColumnLatest(family, qualifier); if (kv == null) { return null; } return kv.getValue(); } /** * Checks for existence of the specified column. * @param family family name * @param qualifier column qualifier * @return true if at least one value exists in the result, false if not */ public boolean containsColumn(byte [] family, byte [] qualifier) { KeyValue kv = getColumnLatest(family, qualifier); return kv != null; } /** * Map of families to all versions of its qualifiers and values. * <p> * Returns a three level Map of the form: * <code>Map<family,Map&lt;qualifier,Map&lt;timestamp,value>>></code> * <p> * Note: All other map returning methods make use of this map internally. * @return map from families to qualifiers to versions */ public NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> getMap() { if(this.familyMap != null) { return this.familyMap; } if(isEmpty()) { return null; } this.familyMap = new TreeMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> (Bytes.BYTES_COMPARATOR); for(KeyValue kv : this.kvs) { SplitKeyValue splitKV = kv.split(); byte [] family = splitKV.getFamily(); NavigableMap<byte[], NavigableMap<Long, byte[]>> columnMap = familyMap.get(family); if(columnMap == null) { columnMap = new TreeMap<byte[], NavigableMap<Long, byte[]>> (Bytes.BYTES_COMPARATOR); familyMap.put(family, columnMap); } byte [] qualifier = splitKV.getQualifier(); NavigableMap<Long, byte[]> versionMap = columnMap.get(qualifier); if(versionMap == null) { versionMap = new TreeMap<Long, byte[]>(new Comparator<Long>() { public int compare(Long l1, Long l2) { return l2.compareTo(l1); } }); columnMap.put(qualifier, versionMap); } Long timestamp = Bytes.toLong(splitKV.getTimestamp()); byte [] value = splitKV.getValue(); versionMap.put(timestamp, value); } return this.familyMap; } /** * Map of families to their most recent qualifiers and values. * <p> * Returns a two level Map of the form: <code>Map<family,Map&lt;qualifier,value>></code> * <p> * The most recent version of each qualifier will be used. * @return map from families to qualifiers and value */ public NavigableMap<byte[], NavigableMap<byte[], byte[]>> getNoVersionMap() { if(this.familyMap == null) { getMap(); } if(isEmpty()) { return null; } NavigableMap<byte[], NavigableMap<byte[], byte[]>> returnMap = new TreeMap<byte[], NavigableMap<byte[], byte[]>>(Bytes.BYTES_COMPARATOR); for(Map.Entry<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> familyEntry : familyMap.entrySet()) { NavigableMap<byte[], byte[]> qualifierMap = new TreeMap<byte[], byte[]>(Bytes.BYTES_COMPARATOR); for(Map.Entry<byte[], NavigableMap<Long, byte[]>> qualifierEntry : familyEntry.getValue().entrySet()) { byte [] value = qualifierEntry.getValue().get(qualifierEntry.getValue().firstKey()); qualifierMap.put(qualifierEntry.getKey(), value); } returnMap.put(familyEntry.getKey(), qualifierMap); } return returnMap; } /** * Map of qualifiers to values. * <p> * Returns a Map of the form: <code>Map&lt;qualifier,value></code> * @param family column family to get * @return map of qualifiers to values */ public NavigableMap<byte[], byte[]> getFamilyMap(byte [] family) { if(this.familyMap == null) { getMap(); } if(isEmpty()) { return null; } NavigableMap<byte[], byte[]> returnMap = new TreeMap<byte[], byte[]>(Bytes.BYTES_COMPARATOR); NavigableMap<byte[], NavigableMap<Long, byte[]>> qualifierMap = familyMap.get(family); if(qualifierMap == null) { return returnMap; } for(Map.Entry<byte[], NavigableMap<Long, byte[]>> entry : qualifierMap.entrySet()) { byte [] value = entry.getValue().get(entry.getValue().firstKey()); returnMap.put(entry.getKey(), value); } return returnMap; } private Map.Entry<Long,byte[]> getKeyValue(byte[] family, byte[] qualifier) { if(this.familyMap == null) { getMap(); } if(isEmpty()) { return null; } NavigableMap<byte [], NavigableMap<Long, byte[]>> qualifierMap = familyMap.get(family); if(qualifierMap == null) { return null; } NavigableMap<Long, byte[]> versionMap = getVersionMap(qualifierMap, qualifier); if(versionMap == null) { return null; } return versionMap.firstEntry(); } private NavigableMap<Long, byte[]> getVersionMap( NavigableMap<byte [], NavigableMap<Long, byte[]>> qualifierMap, byte [] qualifier) { return qualifier != null? qualifierMap.get(qualifier): qualifierMap.get(new byte[0]); } /** * Returns the value of the first column in the Result. * @return value of the first column */ public byte [] value() { if (isEmpty()) { return null; } return kvs[0].getValue(); } /** * Returns the raw binary encoding of this Result.<p> * * Please note, there may be an offset into the underlying byte array of the * returned ImmutableBytesWritable. Be sure to use both * {@link ImmutableBytesWritable#get()} and {@link ImmutableBytesWritable#getOffset()} * @return pointer to raw binary of Result */ public ImmutableBytesWritable getBytes() { return this.bytes; } /** * Check if the underlying KeyValue [] is empty or not * @return true if empty */ public boolean isEmpty() { if(this.kvs == null) { readFields(); } return this.kvs == null || this.kvs.length == 0; } /** * @return the size of the underlying KeyValue [] */ public int size() { if(this.kvs == null) { readFields(); } return this.kvs == null? 0: this.kvs.length; } /** * @return String */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("keyvalues="); if(isEmpty()) { sb.append("NONE"); return sb.toString(); } sb.append("{"); boolean moreThanOne = false; for(KeyValue kv : this.kvs) { if(moreThanOne) { sb.append(", "); } else { moreThanOne = true; } sb.append(kv.toString()); } sb.append("}"); return sb.toString(); } //Writable public void readFields(final DataInput in) throws IOException { familyMap = null; row = null; kvs = null; int totalBuffer = in.readInt(); if(totalBuffer == 0) { bytes = null; return; } byte [] raw = new byte[totalBuffer]; in.readFully(raw, 0, totalBuffer); bytes = new ImmutableBytesWritable(raw, 0, totalBuffer); } //Create KeyValue[] when needed private void readFields() { if (bytes == null) { this.kvs = new KeyValue[0]; return; } byte [] buf = bytes.get(); int offset = bytes.getOffset(); int finalOffset = bytes.getSize() + offset; List<KeyValue> kvs = new ArrayList<KeyValue>(); while(offset < finalOffset) { int keyLength = Bytes.toInt(buf, offset); offset += Bytes.SIZEOF_INT; kvs.add(new KeyValue(buf, offset, keyLength)); offset += keyLength; } this.kvs = kvs.toArray(new KeyValue[kvs.size()]); } public long getWritableSize() { if (isEmpty()) return Bytes.SIZEOF_INT; // int size = 0 long size = Bytes.SIZEOF_INT; // totalLen for (KeyValue kv : kvs) { size += kv.getLength(); size += Bytes.SIZEOF_INT; // kv.getLength } return size; } public void write(final DataOutput out) throws IOException { if(isEmpty()) { out.writeInt(0); } else { int totalLen = 0; for(KeyValue kv : kvs) { totalLen += kv.getLength() + Bytes.SIZEOF_INT; } out.writeInt(totalLen); for(KeyValue kv : kvs) { out.writeInt(kv.getLength()); out.write(kv.getBuffer(), kv.getOffset(), kv.getLength()); } } } public static long getWriteArraySize(Result [] results) { long size = Bytes.SIZEOF_BYTE; // RESULT_VERSION if (results == null || results.length == 0) { size += Bytes.SIZEOF_INT; return size; } size += Bytes.SIZEOF_INT; // results.length size += Bytes.SIZEOF_INT; // bufLen for (Result result : results) { size += Bytes.SIZEOF_INT; // either 0 or result.size() if (result == null || result.isEmpty()) continue; for (KeyValue kv : result.raw()) { size += Bytes.SIZEOF_INT; // kv.getLength(); size += kv.getLength(); } } return size; } public static void writeArray(final DataOutput out, Result [] results) throws IOException { // Write version when writing array form. // This assumes that results are sent to the client as Result[], so we // have an opportunity to handle version differences without affecting // efficiency. out.writeByte(RESULT_VERSION); if(results == null || results.length == 0) { out.writeInt(0); return; } out.writeInt(results.length); int bufLen = 0; for(Result result : results) { bufLen += Bytes.SIZEOF_INT; if(result == null || result.isEmpty()) { continue; } for(KeyValue key : result.raw()) { bufLen += key.getLength() + Bytes.SIZEOF_INT; } } out.writeInt(bufLen); for(Result result : results) { if(result == null || result.isEmpty()) { out.writeInt(0); continue; } out.writeInt(result.size()); for(KeyValue kv : result.raw()) { out.writeInt(kv.getLength()); out.write(kv.getBuffer(), kv.getOffset(), kv.getLength()); } } } public static Result [] readArray(final DataInput in) throws IOException { // Read version for array form. // This assumes that results are sent to the client as Result[], so we // have an opportunity to handle version differences without affecting // efficiency. int version = in.readByte(); if (version > RESULT_VERSION) { throw new IOException("version not supported"); } int numResults = in.readInt(); if(numResults == 0) { return new Result[0]; } Result [] results = new Result[numResults]; int bufSize = in.readInt(); byte [] buf = new byte[bufSize]; int offset = 0; for(int i=0;i<numResults;i++) { int numKeys = in.readInt(); offset += Bytes.SIZEOF_INT; if(numKeys == 0) { results[i] = new Result((ImmutableBytesWritable)null); continue; } int initialOffset = offset; for(int j=0;j<numKeys;j++) { int keyLen = in.readInt(); Bytes.putInt(buf, offset, keyLen); offset += Bytes.SIZEOF_INT; in.readFully(buf, offset, keyLen); offset += keyLen; } int totalLength = offset - initialOffset; results[i] = new Result(new ImmutableBytesWritable(buf, initialOffset, totalLength)); } return results; } /** * Does a deep comparison of two Results, down to the byte arrays. * @param res1 first result to compare * @param res2 second result to compare * @throws Exception Every difference is throwing an exception */ public static void compareResults(Result res1, Result res2) throws Exception { if (res2 == null) { throw new Exception("There wasn't enough rows, we stopped at " + Bytes.toString(res1.getRow())); } if (res1.size() != res2.size()) { throw new Exception("This row doesn't have the same number of KVs: " + res1.toString() + " compared to " + res2.toString()); } KeyValue[] ourKVs = res1.sorted(); KeyValue[] replicatedKVs = res2.sorted(); for (int i = 0; i < res1.size(); i++) { if (!ourKVs[i].equals(replicatedKVs[i]) && !Bytes.equals(ourKVs[i].getValue(), replicatedKVs[i].getValue())) { throw new Exception("This result was different: " + res1.toString() + " compared to " + res2.toString()); } } } }
fixup to javadoc on Result by Leif Wickland from up on dev mailing list git-svn-id: 949c06ec81f1cb709fd2be51dd530a930344d7b3@1096633 13f79535-47bb-0310-9956-ffa450edef68
src/main/java/org/apache/hadoop/hbase/client/Result.java
fixup to javadoc on Result by Leif Wickland from up on dev mailing list
Java
apache-2.0
206f78fcdb102d4c1c285a5c8c48f788fab925b6
0
epfldata/squall,mt0803/squall,khuevu/squall,avitorovic/squall,khuevu/squall,khuevu/squall,mt0803/squall,mt0803/squall,akathorn/squall,mt0803/squall,khuevu/squall,khuevu/squall,khuevu/squall,akathorn/squall,epfldata/squall,epfldata/squall,mt0803/squall,avitorovic/squall,avitorovic/squall
package main; import java.io.StringReader; import java.util.List; import java.util.Map; import net.sf.jsqlparser.JSQLParserException; import net.sf.jsqlparser.expression.Expression; import net.sf.jsqlparser.parser.CCJSqlParserManager; import net.sf.jsqlparser.schema.Table; import net.sf.jsqlparser.statement.Statement; import net.sf.jsqlparser.statement.select.Join; import net.sf.jsqlparser.statement.select.Select; import net.sf.jsqlparser.statement.select.SelectItem; import optimizers.ComponentGenerator; import optimizers.ruleBased.RuleBasedOpt; import optimizers.Optimizer; import optimizers.OptimizerTranslator; import optimizers.SimpleOpt; import optimizers.ruleBased.RuleTranslator; import queryPlans.QueryPlan; import schema.Schema; import schema.TPCH_Schema; import util.ParserUtil; import util.TableAliasName; import utilities.SystemParameters; import visitors.jsql.SQLVisitor; public class ParserMain{ private final int CLUSTER_WORKERS = 176; private int CLUSTER_ACKERS = 17; private final int LOCAL_WORKERS = 5; private int LOCAL_ACKERS = 1; private final String sqlExtension = ".sql"; public static void main(String[] args){ String parserConfPath = args[0]; new ParserMain(parserConfPath); } //String[] sizes: {"1G", "2G", "4G", ...} public ParserMain(String parserConfPath){ Map map = SystemParameters.fileToMap(parserConfPath); if(!SystemParameters.getBoolean(map, "DIP_ACK_EVERY_TUPLE")){ //we don't ack after each tuple is sent, // so we don't need any node to be dedicated for acking CLUSTER_ACKERS = 0; LOCAL_ACKERS = 0; } String mode = ""; if (SystemParameters.getBoolean(map, "DIP_DISTRIBUTED")){ mode = "parallel"; SystemParameters.putInMap(map, "DIP_NUM_PARALLELISM", CLUSTER_WORKERS); SystemParameters.putInMap(map, "DIP_NUM_ACKERS", CLUSTER_ACKERS); }else{ mode = "serial"; SystemParameters.putInMap(map, "DIP_NUM_PARALLELISM", LOCAL_WORKERS); SystemParameters.putInMap(map, "DIP_NUM_ACKERS", LOCAL_ACKERS); } String queryName = SystemParameters.getString(map, "DIP_QUERY_NAME"); String sqlPath = SystemParameters.getString(map, "DIP_SQL_ROOT") + queryName + sqlExtension; String sqlstring = ParserUtil.readStringFromFile(sqlPath); String dbSize = SystemParameters.getString(map, "DIP_DB_SIZE") + "G"; String srcParallelism = SystemParameters.getString(map, "DIP_MAX_SRC_PAR"); String dataRoot = SystemParameters.getString(map, "DIP_DATA_ROOT"); String dataPath = dataRoot + "/" + dbSize + "/"; SystemParameters.putInMap(map, "DIP_DATA_PATH" , dataPath); String topologyName = dbSize + "_" + queryName + "_" + mode + "_" + srcParallelism; SystemParameters.putInMap(map, "DIP_TOPOLOGY_NAME", topologyName); String extension = SystemParameters.getString(map, "DIP_EXTENSION"); QueryPlan plan = sqlToPlan(sqlstring, dataPath, extension, map); new Main(plan, map); } private QueryPlan sqlToPlan(String sql, String dataPath, String extension, Map map){ CCJSqlParserManager pm = new CCJSqlParserManager(); Statement statement=null; try { statement = pm.parse(new StringReader(sql)); } catch (JSQLParserException ex) { System.out.println("JSQLParserException"); } if (statement instanceof Select) { Select selectStatement = (Select) statement; SQLVisitor parser = new SQLVisitor(); //visit whole SELECT statement parser.visit(selectStatement); // print out all the tables List<Table> tableList = parser.getTableList(); for(Table table: tableList){ String tableStr = ParserUtil.toString(table); System.out.println(tableStr); } //print all the joins List<Join> joinList = parser.getJoinList(); for(Join join: joinList){ String joinStr = ParserUtil.toString(join); System.out.println(joinStr); } List<SelectItem> selectItems = parser.getSelectItems(); Expression whereExpr = parser.getWhereExpr(); double scallingFactor = SystemParameters.getDouble(map, "DIP_DB_SIZE"); return generatePlan(tableList, joinList, selectItems, whereExpr, new TPCH_Schema(scallingFactor), dataPath, extension, map); } throw new RuntimeException("Please provide SELECT statement!"); } private final QueryPlan generatePlan(List<Table> tableList, List<Join> joinList, List<SelectItem> selectItems, Expression whereExpr, Schema schema, String dataPath, String extension, Map map){ TableAliasName tan = new TableAliasName(tableList); //works both for simple and rule-based optimizer OptimizerTranslator ot = new RuleTranslator(schema, tan); //Simple optimizer provides lefty plans //Optimizer opt = new SimpleOpt(schema, tan, dataPath, extension, ot, map); //Dynamic programming query plan Optimizer opt = new RuleBasedOpt(schema, tan, dataPath, extension, ot, map); ComponentGenerator cg = opt.generate(tableList, joinList, selectItems, whereExpr); return cg.getQueryPlan(); } }
src/squall/src/main/ParserMain.java
package main; import java.io.StringReader; import java.util.List; import java.util.Map; import net.sf.jsqlparser.JSQLParserException; import net.sf.jsqlparser.expression.Expression; import net.sf.jsqlparser.parser.CCJSqlParserManager; import net.sf.jsqlparser.schema.Table; import net.sf.jsqlparser.statement.Statement; import net.sf.jsqlparser.statement.select.Join; import net.sf.jsqlparser.statement.select.Select; import net.sf.jsqlparser.statement.select.SelectItem; import optimizers.ComponentGenerator; import optimizers.ruleBased.RuleBasedOpt; import optimizers.Optimizer; import optimizers.OptimizerTranslator; import optimizers.SimpleOpt; import optimizers.ruleBased.RuleTranslator; import queryPlans.QueryPlan; import schema.Schema; import schema.TPCH_Schema; import util.ParserUtil; import util.TableAliasName; import utilities.SystemParameters; import visitors.jsql.SQLVisitor; public class ParserMain{ private final int CLUSTER_WORKERS = 176; private int CLUSTER_ACKERS = 17; private final int LOCAL_WORKERS = 5; private int LOCAL_ACKERS = 1; private final String sqlExtension = ".sql"; public static void main(String[] args){ String parserConfPath = args[0]; new ParserMain(parserConfPath); } //String[] sizes: {"1G", "2G", "4G", ...} public ParserMain(String parserConfPath){ Map map = SystemParameters.fileToMap(parserConfPath); if(!SystemParameters.getBoolean(map, "DIP_ACK_EVERY_TUPLE")){ //we don't ack after each tuple is sent, // so we don't need any node to be dedicated for acking CLUSTER_ACKERS = 0; LOCAL_ACKERS = 0; } String mode = ""; if (SystemParameters.getBoolean(map, "DIP_DISTRIBUTED")){ mode = "parallel"; SystemParameters.putInMap(map, "DIP_NUM_PARALLELISM", CLUSTER_WORKERS); SystemParameters.putInMap(map, "DIP_NUM_ACKERS", CLUSTER_ACKERS); }else{ mode = "serial"; SystemParameters.putInMap(map, "DIP_NUM_PARALLELISM", LOCAL_WORKERS); SystemParameters.putInMap(map, "DIP_NUM_ACKERS", LOCAL_ACKERS); } String queryName = SystemParameters.getString(map, "DIP_QUERY_NAME"); String sqlPath = SystemParameters.getString(map, "DIP_SQL_ROOT") + queryName + sqlExtension; String sqlstring = ParserUtil.readStringFromFile(sqlPath); String dbSize = SystemParameters.getString(map, "DIP_DB_SIZE") + "G"; String srcParallelism = SystemParameters.getString(map, "DIP_MAX_SRC_PAR"); String dataRoot = SystemParameters.getString(map, "DIP_DATA_ROOT"); String dataPath = dataRoot + "/" + dbSize + "/"; SystemParameters.putInMap(map, "DIP_DATA_PATH" , dataPath); String topologyName = dbSize + "_" + queryName + "_" + mode + "_" + srcParallelism; SystemParameters.putInMap(map, "DIP_TOPOLOGY_NAME", topologyName); String extension = SystemParameters.getString(map, "DIP_EXTENSION"); QueryPlan plan = sqlToPlan(sqlstring, dataPath, extension, map); new Main(plan, map); } private QueryPlan sqlToPlan(String sql, String dataPath, String extension, Map map){ CCJSqlParserManager pm = new CCJSqlParserManager(); Statement statement=null; try { statement = pm.parse(new StringReader(sql)); } catch (JSQLParserException ex) { System.out.println("JSQLParserException"); } if (statement instanceof Select) { Select selectStatement = (Select) statement; SQLVisitor parser = new SQLVisitor(); //visit whole SELECT statement parser.visit(selectStatement); // print out all the tables List<Table> tableList = parser.getTableList(); for(Table table: tableList){ String tableStr = ParserUtil.toString(table); System.out.println(tableStr); } //print all the joins List<Join> joinList = parser.getJoinList(); for(Join join: joinList){ String joinStr = ParserUtil.toString(join); System.out.println(joinStr); } List<SelectItem> selectItems = parser.getSelectItems(); Expression whereExpr = parser.getWhereExpr(); double scallingFactor = SystemParameters.getDouble(map, "DIP_DB_SIZE"); return generatePlan(tableList, joinList, selectItems, whereExpr, new TPCH_Schema(scallingFactor), dataPath, extension, map); } throw new RuntimeException("Please provide SELECT statement!"); } private final QueryPlan generatePlan(List<Table> tableList, List<Join> joinList, List<SelectItem> selectItems, Expression whereExpr, Schema schema, String dataPath, String extension, Map map){ TableAliasName tan = new TableAliasName(tableList); //works both for simple and rule-based optimizer OptimizerTranslator ot = new RuleTranslator(schema, tan); //Simple optimizer provides lefty plans Optimizer opt = new SimpleOpt(schema, tan, dataPath, extension, ot, map); //Dynamic programming query plan //Optimizer opt = new RuleBasedOpt(schema, tan, dataPath, extension, ot, map); ComponentGenerator cg = opt.generate(tableList, joinList, selectItems, whereExpr); return cg.getQueryPlan(); } }
Rule-based optimizer is the default one.
src/squall/src/main/ParserMain.java
Rule-based optimizer is the default one.
Java
apache-2.0
89d5190dde9f0406a88230b8cfa90e9d30aa2c68
0
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.jetbrains.pyqt; import com.intellij.lang.xml.XMLLanguage; import com.intellij.openapi.fileTypes.INativeFileType; import com.intellij.openapi.fileTypes.LanguageFileType; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleUtilCore; import com.intellij.openapi.project.Project; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.NlsContexts.Label; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiDirectory; import com.intellij.psi.PsiElement; import com.intellij.psi.util.QualifiedName; import com.jetbrains.python.PyBundle; import com.jetbrains.python.psi.resolve.PyResolveImportUtil; import com.jetbrains.python.sdk.PythonSdkUtil; import one.util.streamex.StreamEx; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.IOException; import java.util.List; /** * @author yole */ public abstract class QtFileType extends LanguageFileType implements INativeFileType { private final String myName; private final @Label String myDescription; private final String myDefaultExtension; QtFileType(@NonNls String name, @Label String description, String defaultExtension) { super(XMLLanguage.INSTANCE, true); myName = name; myDescription = description; myDefaultExtension = defaultExtension; } @NotNull @NonNls @Override public String getName() { return myName; } @NotNull @Label @Override public String getDescription() { return myDescription; } @NotNull @Override public String getDefaultExtension() { return myDefaultExtension; } @Override public boolean isReadOnly() { return false; } @Override public boolean openFileInAssociatedApplication(Project project, @NotNull VirtualFile file) { String qtTool = findQtTool(ModuleUtilCore.findModuleForFile(file, project), getToolName()); if (qtTool == null) { return false; } try { Runtime.getRuntime().exec(new String[]{qtTool, file.getPath()}); } catch (IOException e) { Messages.showErrorDialog(project, PyBundle.message("qt.error.failed.run.qt.designer", e.getMessage()), PyBundle.message("qt.run.designer.error")); } return true; } public static String findQtTool(Module module, String toolName) { if (SystemInfo.isWindows) { if (module == null) { return null; } Sdk sdk = PythonSdkUtil.findPythonSdk(module); if (sdk == null) { return null; } String tool = findToolInPackage(toolName, module, "PyQt4"); if (tool != null) { return tool; } return findToolInPackage(toolName, module, "PySide"); } // TODO return null; } @Nullable private static String findToolInPackage(String toolName, Module module, String name) { final List<PsiElement> results = PyResolveImportUtil.resolveQualifiedName(QualifiedName.fromDottedString(name), PyResolveImportUtil.fromModule(module)); return StreamEx.of(results).select(PsiDirectory.class) .map(directory -> directory.getVirtualFile().findChild(toolName + ".exe")) .nonNull() .map(VirtualFile::getPath) .findFirst() .orElse(null); } protected abstract String getToolName(); }
python/src/com/jetbrains/pyqt/QtFileType.java
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.jetbrains.pyqt; import com.intellij.lang.xml.XMLLanguage; import com.intellij.openapi.fileTypes.INativeFileType; import com.intellij.openapi.fileTypes.LanguageFileType; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleUtilCore; import com.intellij.openapi.project.Project; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.NlsContexts.Label; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiDirectory; import com.intellij.psi.PsiElement; import com.intellij.psi.util.QualifiedName; import com.jetbrains.python.PyBundle; import com.jetbrains.python.psi.resolve.PyResolveImportUtil; import com.jetbrains.python.sdk.PythonSdkUtil; import one.util.streamex.StreamEx; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.IOException; import java.util.List; /** * @author yole */ public abstract class QtFileType extends LanguageFileType implements INativeFileType { private final String myName; private final @Label String myDescription; private final String myDefaultExtension; QtFileType(@NonNls String name, @Label String description, String defaultExtension) { super(XMLLanguage.INSTANCE, true); myName = name; myDescription = description; myDefaultExtension = defaultExtension; } @NotNull @NonNls @Override public String getName() { return myName; } @NotNull @Label @Override public String getDescription() { return myDescription; } @NotNull @Override public String getDefaultExtension() { return myDefaultExtension; } @Override public boolean isReadOnly() { return false; } @Override public boolean openFileInAssociatedApplication(Project project, @NotNull VirtualFile file) { String qtTool = findQtTool(ModuleUtilCore.findModuleForFile(file, project), getToolName()); if (qtTool == null) { return false; } try { Runtime.getRuntime().exec(new String[] { qtTool, file.getPath() } ); } catch (IOException e) { Messages.showErrorDialog(project, PyBundle.message("qt.error.failed.run.qt.designer", e.getMessage()), PyBundle.message("qt.run.designer.error")); } return true; } public static String findQtTool(Module module, String toolName) { if (SystemInfo.isWindows) { if (module == null) { return null; } Sdk sdk = PythonSdkUtil.findPythonSdk(module); if (sdk == null) { return null; } String tool = findToolInPackage(toolName, module, "PyQt4"); if (tool != null) { return tool; } return findToolInPackage(toolName, module, "PySide"); } // TODO return null; } @Nullable private static String findToolInPackage(String toolName, Module module, String name) { final List<PsiElement> results = PyResolveImportUtil.resolveQualifiedName(QualifiedName.fromDottedString(name), PyResolveImportUtil.fromModule(module)); return StreamEx.of(results).select(PsiDirectory.class) .map(directory -> directory.getVirtualFile().findChild(toolName + ".exe")) .nonNull() .map(VirtualFile::getPath) .findFirst() .orElse(null); } protected abstract String getToolName(); }
Reformat QtFileType.java GitOrigin-RevId: 80b8a6eb3324b0cfbafc874075fad5070b679510
python/src/com/jetbrains/pyqt/QtFileType.java
Reformat QtFileType.java
Java
apache-2.0
081baf386a49ee5511dde18ebeae7428596ee137
0
bonigarcia/webdrivermanager,bonigarcia/webdrivermanager,bonigarcia/webdrivermanager
/* * (C) Copyright 2015 Boni Garcia (http://bonigarcia.github.io/) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package io.github.bonigarcia.wdm; import static io.github.bonigarcia.wdm.Architecture.X32; import static io.github.bonigarcia.wdm.Architecture.X64; import static io.github.bonigarcia.wdm.Config.isNullOrEmpty; import static io.github.bonigarcia.wdm.DriverManagerType.CHROME; import static io.github.bonigarcia.wdm.DriverManagerType.EDGE; import static io.github.bonigarcia.wdm.DriverManagerType.FIREFOX; import static io.github.bonigarcia.wdm.DriverManagerType.IEXPLORER; import static io.github.bonigarcia.wdm.DriverManagerType.OPERA; import static io.github.bonigarcia.wdm.DriverManagerType.PHANTOMJS; import static io.github.bonigarcia.wdm.DriverManagerType.SELENIUM_SERVER_STANDALONE; import static io.github.bonigarcia.wdm.OperatingSystem.WIN; import static io.github.bonigarcia.wdm.Shell.getVersionFromPosixOutput; import static io.github.bonigarcia.wdm.Shell.getVersionFromWmicOutput; import static io.github.bonigarcia.wdm.Shell.runAndWait; import static java.lang.Integer.parseInt; import static java.lang.Integer.signum; import static java.lang.Integer.valueOf; import static java.lang.invoke.MethodHandles.lookup; import static java.util.Collections.sort; import static java.util.Optional.empty; import static javax.xml.xpath.XPathConstants.NODESET; import static javax.xml.xpath.XPathFactory.newInstance; import static org.apache.commons.io.FileUtils.deleteDirectory; import static org.apache.commons.io.FileUtils.listFiles; import static org.apache.commons.io.FilenameUtils.removeExtension; import static org.apache.commons.lang3.StringUtils.isNumeric; import static org.apache.commons.lang3.SystemUtils.IS_OS_LINUX; import static org.apache.commons.lang3.SystemUtils.IS_OS_MAC; import static org.apache.commons.lang3.SystemUtils.IS_OS_WINDOWS; import static org.slf4j.LoggerFactory.getLogger; import java.io.BufferedReader; import java.io.File; import java.io.FilenameFilter; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.net.URL; import java.util.ArrayList; import java.util.EnumMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Properties; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.apache.commons.codec.binary.Base64; import org.apache.http.HttpResponse; import org.apache.http.client.methods.HttpGet; import org.jsoup.Jsoup; import org.slf4j.Logger; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.internal.LinkedTreeMap; /** * Parent driver manager. * * @author Boni Garcia ([email protected]) * @since 2.1.0 */ public abstract class WebDriverManager { static final Logger log = getLogger(lookup().lookupClass()); protected static final String SLASH = "/"; protected static final String INSIDERS = "insiders"; protected static final String BETA = "beta"; protected static final String ONLINE = "online"; protected static final String LOCAL = "local"; protected abstract List<URL> getDrivers() throws IOException; protected abstract Optional<String> getBrowserVersion(); protected abstract DriverManagerType getDriverManagerType(); protected abstract String getDriverName(); protected abstract void setDriverVersion(String version); protected abstract String getDriverVersion(); protected abstract void setDriverUrl(URL url); protected abstract URL getDriverUrl(); protected abstract Optional<URL> getMirrorUrl(); protected abstract Optional<String> getExportParameter(); protected static Map<DriverManagerType, WebDriverManager> instanceMap = new EnumMap<>( DriverManagerType.class); protected HttpClient httpClient; protected Downloader downloader; protected UrlFilter urlFilter; protected String versionToDownload; protected String downloadedVersion; protected String latestVersion; protected String binaryPath; protected boolean mirrorLog; protected List<String> listVersions; protected boolean forcedArch; protected boolean forcedOs; protected boolean isLatest; protected boolean retry = true; protected Config config = new Config(); protected Preferences preferences = new Preferences(config); protected String preferenceKey; protected Properties versionsProperties; public static Config globalConfig() { Config global = new Config(); global.setAvoidAutoReset(true); for (DriverManagerType type : DriverManagerType.values()) { WebDriverManager.getInstance(type).setConfig(global); } return global; } public Config config() { return config; } public static synchronized WebDriverManager chromedriver() { if (!instanceMap.containsKey(CHROME)) { instanceMap.put(CHROME, new ChromeDriverManager()); } return instanceMap.get(CHROME); } public static synchronized WebDriverManager firefoxdriver() { if (!instanceMap.containsKey(FIREFOX)) { instanceMap.put(FIREFOX, new FirefoxDriverManager()); } return instanceMap.get(FIREFOX); } public static synchronized WebDriverManager operadriver() { if (!instanceMap.containsKey(OPERA)) { instanceMap.put(OPERA, new OperaDriverManager()); } return instanceMap.get(OPERA); } public static synchronized WebDriverManager edgedriver() { if (!instanceMap.containsKey(EDGE)) { instanceMap.put(EDGE, new EdgeDriverManager()); } return instanceMap.get(EDGE); } public static synchronized WebDriverManager iedriver() { if (!instanceMap.containsKey(IEXPLORER)) { instanceMap.put(IEXPLORER, new InternetExplorerDriverManager()); } return instanceMap.get(IEXPLORER); } public static synchronized WebDriverManager phantomjs() { if (!instanceMap.containsKey(PHANTOMJS)) { instanceMap.put(PHANTOMJS, new PhantomJsDriverManager()); } return instanceMap.get(PHANTOMJS); } public static synchronized WebDriverManager seleniumServerStandalone() { if (!instanceMap.containsKey(SELENIUM_SERVER_STANDALONE)) { instanceMap.put(SELENIUM_SERVER_STANDALONE, new SeleniumServerStandaloneManager()); } return instanceMap.get(SELENIUM_SERVER_STANDALONE); } protected static synchronized WebDriverManager voiddriver() { return new VoidDriverManager(); } public static synchronized WebDriverManager getInstance( DriverManagerType driverManagerType) { if (driverManagerType == null) { return voiddriver(); } switch (driverManagerType) { case CHROME: return chromedriver(); case FIREFOX: return firefoxdriver(); case OPERA: return operadriver(); case IEXPLORER: return iedriver(); case EDGE: return edgedriver(); case PHANTOMJS: return phantomjs(); case SELENIUM_SERVER_STANDALONE: return seleniumServerStandalone(); default: return voiddriver(); } } public static synchronized WebDriverManager getInstance( Class<?> webDriverClass) { switch (webDriverClass.getName()) { case "org.openqa.selenium.chrome.ChromeDriver": return chromedriver(); case "org.openqa.selenium.firefox.FirefoxDriver": return firefoxdriver(); case "org.openqa.selenium.opera.OperaDriver": return operadriver(); case "org.openqa.selenium.ie.InternetExplorerDriver": return iedriver(); case "org.openqa.selenium.edge.EdgeDriver": return edgedriver(); case "org.openqa.selenium.phantomjs.PhantomJSDriver": return phantomjs(); default: return voiddriver(); } } public synchronized void setup() { if (getDriverManagerType() != null) { try { Architecture architecture = config().getArchitecture(); String driverVersion = getDriverVersion(); isLatest = isVersionLatest(driverVersion); manage(architecture, driverVersion); } finally { if (!config().isAvoidAutoReset()) { reset(); } } } } public WebDriverManager version(String version) { setDriverVersion(version); return instanceMap.get(getDriverManagerType()); } public WebDriverManager architecture(Architecture architecture) { config().setArchitecture(architecture); forcedArch = true; return instanceMap.get(getDriverManagerType()); } public WebDriverManager arch32() { architecture(X32); return instanceMap.get(getDriverManagerType()); } public WebDriverManager arch64() { architecture(X64); return instanceMap.get(getDriverManagerType()); } public WebDriverManager operatingSystem(OperatingSystem os) { config().setOs(os.name()); forcedOs = true; return instanceMap.get(getDriverManagerType()); } public WebDriverManager forceCache() { config().setForceCache(true); return instanceMap.get(getDriverManagerType()); } public WebDriverManager forceDownload() { config().setOverride(true); return instanceMap.get(getDriverManagerType()); } public WebDriverManager driverRepositoryUrl(URL url) { setDriverUrl(url); return instanceMap.get(getDriverManagerType()); } public WebDriverManager useMirror() { Optional<URL> mirrorUrl = getMirrorUrl(); if (!mirrorUrl.isPresent()) { throw new WebDriverManagerException("Mirror URL not available"); } config().setUseMirror(true); setDriverUrl(mirrorUrl.get()); return instanceMap.get(getDriverManagerType()); } public WebDriverManager proxy(String proxy) { config().setProxy(proxy); return instanceMap.get(getDriverManagerType()); } public WebDriverManager proxyUser(String proxyUser) { config().setProxyUser(proxyUser); return instanceMap.get(getDriverManagerType()); } public WebDriverManager proxyPass(String proxyPass) { config().setProxyPass(proxyPass); return instanceMap.get(getDriverManagerType()); } public WebDriverManager useBetaVersions() { config().setUseBetaVersions(true); return instanceMap.get(getDriverManagerType()); } public WebDriverManager ignoreVersions(String... versions) { config().setIgnoreVersions(versions); return instanceMap.get(getDriverManagerType()); } public WebDriverManager gitHubTokenName(String gitHubTokenName) { config().setGitHubTokenName(gitHubTokenName); return instanceMap.get(getDriverManagerType()); } public WebDriverManager gitHubTokenSecret(String gitHubTokenSecret) { config().setGitHubTokenSecret(gitHubTokenSecret); return instanceMap.get(getDriverManagerType()); } public WebDriverManager timeout(int timeout) { config().setTimeout(timeout); return instanceMap.get(getDriverManagerType()); } public WebDriverManager properties(String properties) { config().setProperties(properties); return instanceMap.get(getDriverManagerType()); } public WebDriverManager targetPath(String targetPath) { config().setTargetPath(targetPath); return instanceMap.get(getDriverManagerType()); } public WebDriverManager avoidExport() { config().setAvoidExport(true); return instanceMap.get(getDriverManagerType()); } public WebDriverManager avoidOutputTree() { config().setAvoidOutputTree(true); return instanceMap.get(getDriverManagerType()); } public WebDriverManager avoidAutoVersion() { config().setAvoidAutoVersion(true); return instanceMap.get(getDriverManagerType()); } public WebDriverManager avoidPreferences() { config().setAvoidPreferences(true); return instanceMap.get(getDriverManagerType()); } public WebDriverManager ttl(int seconds) { config().setTtl(seconds); return instanceMap.get(getDriverManagerType()); } public WebDriverManager browserPath(String browserPath) { config().setBinaryPath(browserPath); return instanceMap.get(getDriverManagerType()); } // ------------ public String getBinaryPath() { return instanceMap.get(getDriverManagerType()).binaryPath; } public String getDownloadedVersion() { return instanceMap.get(getDriverManagerType()).downloadedVersion; } public List<String> getVersions() { httpClient = new HttpClient(config()); try { List<URL> drivers = getDrivers(); List<String> versions = new ArrayList<>(); for (URL url : drivers) { String version = getCurrentVersion(url, getDriverName()); if (version.isEmpty() || version.equalsIgnoreCase("icons")) { continue; } if (version.startsWith(".")) { version = version.substring(1); } if (!versions.contains(version)) { versions.add(version); } } log.trace("Version list before sorting {}", versions); sort(versions, new VersionComparator()); return versions; } catch (IOException e) { throw new WebDriverManagerException(e); } } public void clearPreferences() { instanceMap.get(getDriverManagerType()).preferences.clear(); } public void clearCache() { String targetPath = config().getTargetPath(); try { log.debug("Clearing cache at {}", targetPath); deleteDirectory(new File(targetPath)); } catch (Exception e) { log.warn("Exception deleting cache at {}", targetPath, e); } } // ------------ protected String preDownload(String target, String version) { log.trace("Pre-download. target={}, version={}", target, version); return target; } protected File postDownload(File archive) { File parentFolder = archive.getParentFile(); File[] ls = parentFolder.listFiles(); for (File f : ls) { if (getDriverName().contains(removeExtension(f.getName()))) { log.trace("Found binary in post-download: {}", f); return f; } } throw new WebDriverManagerException("Driver " + getDriverName() + " not found (using temporal folder " + parentFolder + ")"); } protected String getCurrentVersion(URL url, String driverName) { String currentVersion = ""; try { currentVersion = url.getFile().substring( url.getFile().indexOf(SLASH) + 1, url.getFile().lastIndexOf(SLASH)); } catch (StringIndexOutOfBoundsException e) { log.trace("Exception getting version of URL {} ({})", url, e.getMessage()); } return currentVersion; } protected void manage(Architecture arch, String version) { httpClient = new HttpClient(config()); try (HttpClient wdmHttpClient = httpClient) { downloader = new Downloader(getDriverManagerType()); urlFilter = new UrlFilter(); boolean getLatest = isVersionLatest(version); boolean cache = config().isForceCache(); if (getLatest) { version = detectDriverVersionFromBrowser(); } getLatest = isNullOrEmpty(version); // For Edge if (checkInsiderVersion(version)) { return; } String os = config().getOs(); log.trace("Managing {} arch={} version={} getLatest={} cache={}", getDriverName(), arch, version, getLatest, cache); if (getLatest && latestVersion != null) { log.debug("Latest version of {} is {} (recently resolved)", getDriverName(), latestVersion); version = latestVersion; cache = true; } Optional<String> driverInCache = handleCache(arch, version, os, getLatest, cache); String versionStr = getLatest ? "(latest version)" : version; if (driverInCache.isPresent() && !config().isOverride()) { storeVersionToDownload(version); downloadedVersion = version; log.debug("Driver {} {} found in cache", getDriverName(), versionStr); exportDriver(driverInCache.get()); } else { List<URL> candidateUrls = filterCandidateUrls(arch, version, getLatest); if (candidateUrls.isEmpty()) { String errorMessage = getDriverName() + " " + versionStr + " for " + os + arch.toString() + " not found in " + getDriverUrl(); log.error(errorMessage); throw new WebDriverManagerException(errorMessage); } downloadCandidateUrls(candidateUrls); } } catch (Exception e) { handleException(e, arch, version); } } private String detectDriverVersionFromBrowser() { String version = ""; if (config().isAvoidAutoVersion()) { return version; } Optional<String> optionalBrowserVersion = getBrowserVersion(); if (optionalBrowserVersion.isPresent()) { String browserVersion = optionalBrowserVersion.get(); log.trace("Detected {} version {}", getDriverManagerType(), browserVersion); preferenceKey = getDriverManagerType().name().toLowerCase() + browserVersion; if (usePreferences() && preferences.checkKeyInPreferences(preferenceKey)) { // Get driver version from preferences version = preferences.getValueFromPreferences(preferenceKey); } else { // Get driver version from properties version = getVersionForInstalledBrowser(browserVersion); } if (!isNullOrEmpty(version)) { log.info( "Using {} {} (since {} {} is installed in your machine)", getDriverName(), version, getDriverManagerType(), browserVersion); } } else { log.debug( "The proper {} version for your {} is unknown ... trying with the latest", getDriverName(), getDriverManagerType()); } return version; } private boolean usePreferences() { boolean usePrefs = !config().isAvoidPreferences() && !config().isOverride() && !forcedArch && !forcedOs; log.trace("Using preferences {}", usePrefs); return usePrefs; } private boolean checkInsiderVersion(String version) { if (version.equals(INSIDERS)) { String systemRoot = System.getenv("SystemRoot"); File microsoftWebDriverFile = new File(systemRoot, "System32" + File.separator + "MicrosoftWebDriver.exe"); if (microsoftWebDriverFile.exists()) { downloadedVersion = INSIDERS; exportDriver(microsoftWebDriverFile.toString()); return true; } else { retry = false; throw new WebDriverManagerException( "MicrosoftWebDriver.exe should be installed in an elevated command prompt executing: " + "dism /Online /Add-Capability /CapabilityName:Microsoft.WebDriver~~~~0.0.1.0"); } } return false; } private boolean isVersionLatest(String version) { return isNullOrEmpty(version) || version.equalsIgnoreCase("latest"); } private String getVersionForInstalledBrowser(String browserVersion) { String driverVersion = ""; DriverManagerType driverManagerType = getDriverManagerType(); String driverLowerCase = driverManagerType.name().toLowerCase(); Optional<String> driverVersionForBrowser = getDriverVersionForBrowserFromProperties( driverLowerCase + browserVersion, false); if (driverVersionForBrowser.isPresent()) { driverVersion = driverVersionForBrowser.get(); } else { log.debug( "The driver version for {} {} is unknown ... trying with latest", driverManagerType, browserVersion); } return driverVersion; } private Optional<String> getDriverVersionForBrowserFromProperties( String key, boolean online) { String onlineMessage = online ? ONLINE : LOCAL; log.trace("Getting driver version from {} properties for {}", onlineMessage, key); String value = getVersionFromProperties(online).getProperty(key); if (value == null) { log.debug("Driver for {} not found in {} properties", key, onlineMessage); versionsProperties = null; value = getVersionFromProperties(!online).getProperty(key); } return value == null ? empty() : Optional.of(value); } private Properties getVersionFromProperties(boolean online) { if (versionsProperties != null) { log.trace("Already created versions.properties"); return versionsProperties; } else { try { InputStream inputStream = getVersionsInputStream(online); versionsProperties = new Properties(); versionsProperties.load(inputStream); inputStream.close(); } catch (Exception e) { versionsProperties = null; throw new IllegalStateException( "Cannot read versions.properties", e); } return versionsProperties; } } private InputStream getVersionsInputStream(boolean online) throws IOException { String onlineMessage = online ? ONLINE : LOCAL; log.trace("Reading {} version.properties to find out driver version", onlineMessage); InputStream inputStream; try { if (online) { inputStream = getOnlineVersionsInputStream(); } else { inputStream = getLocalVersionsInputStream(); } } catch (Exception e) { String exceptionMessage = online ? LOCAL : ONLINE; log.warn("Error reading version.properties, using {} instead", exceptionMessage); if (online) { inputStream = getLocalVersionsInputStream(); } else { inputStream = getOnlineVersionsInputStream(); } } return inputStream; } private InputStream getLocalVersionsInputStream() { InputStream inputStream; inputStream = Config.class.getResourceAsStream("/versions.properties"); return inputStream; } private InputStream getOnlineVersionsInputStream() throws IOException { return httpClient .execute(httpClient .createHttpGet(config().getVersionsPropertiesUrl())) .getEntity().getContent(); } protected void handleException(Exception e, Architecture arch, String version) { String versionStr = isNullOrEmpty(version) ? "(latest version)" : version; String errorMessage = String.format( "There was an error managing %s %s (%s)", getDriverName(), versionStr, e.getMessage()); if (!config().isForceCache() && retry) { config().setForceCache(true); config().setUseMirror(true); retry = false; log.warn("{} ... trying again using cache and mirror", errorMessage); manage(arch, version); } else { log.error("{}", errorMessage, e); throw new WebDriverManagerException(e); } } protected void downloadCandidateUrls(List<URL> candidateUrls) throws IOException, InterruptedException { URL url = candidateUrls.iterator().next(); String exportValue = downloader.download(url, versionToDownload, getDriverName()); exportDriver(exportValue); downloadedVersion = versionToDownload; } protected List<URL> filterCandidateUrls(Architecture arch, String version, boolean getLatest) throws IOException { List<URL> urls = getDrivers(); List<URL> candidateUrls; log.trace("All URLs: {}", urls); boolean continueSearchingVersion; do { // Get the latest or concrete version candidateUrls = getLatest ? checkLatest(urls, getDriverName()) : getVersion(urls, getDriverName(), version); log.trace("Candidate URLs: {}", candidateUrls); if (versionToDownload == null || this.getClass().equals(EdgeDriverManager.class)) { break; } // Filter by OS if (!getDriverName().equalsIgnoreCase("IEDriverServer") && !getDriverName() .equalsIgnoreCase("selenium-server-standalone")) { candidateUrls = urlFilter.filterByOs(candidateUrls, config().getOs()); } // Filter by architecture candidateUrls = urlFilter.filterByArch(candidateUrls, arch, forcedArch); // Filter by distro candidateUrls = filterByDistro(candidateUrls); // Filter by ignored versions candidateUrls = filterByIgnoredVersions(candidateUrls); // Find out if driver version has been found or not continueSearchingVersion = candidateUrls.isEmpty() && getLatest; if (continueSearchingVersion) { log.info( "No binary found for {} {} ... seeking another version", getDriverName(), versionToDownload); urls = removeFromList(urls, versionToDownload); versionToDownload = null; } } while (continueSearchingVersion); return candidateUrls; } protected List<URL> filterByIgnoredVersions(List<URL> candidateUrls) { if (config().getIgnoreVersions() != null && !candidateUrls.isEmpty()) { candidateUrls = urlFilter.filterByIgnoredVersions(candidateUrls, config().getIgnoreVersions()); } return candidateUrls; } protected List<URL> filterByDistro(List<URL> candidateUrls) throws IOException { // Filter phantomjs 2.5.0 in Linux if (config().getOs().equalsIgnoreCase("linux") && getDriverName().contains("phantomjs")) { candidateUrls = urlFilter.filterByDistro(candidateUrls, "2.5.0"); } return candidateUrls; } protected Optional<String> handleCache(Architecture arch, String version, String os, boolean getLatest, boolean cache) { Optional<String> driverInCache = empty(); if (cache || !getLatest) { driverInCache = getDriverFromCache(version, arch, os); } storeVersionToDownload(version); return driverInCache; } protected Optional<String> getDriverFromCache(String driverVersion, Architecture arch, String os) { log.trace("Checking if {} exists in cache", getDriverName()); List<File> filesInCache = getFilesInCache(); if (!filesInCache.isEmpty()) { // Filter by name filesInCache = filterCacheBy(filesInCache, getDriverName()); // Filter by version filesInCache = filterCacheBy(filesInCache, driverVersion); // Filter by OS if (!getDriverName().equals("MicrosoftWebDriver")) { filesInCache = filterCacheBy(filesInCache, os.toLowerCase()); } if (filesInCache.size() == 1) { return Optional.of(filesInCache.get(0).toString()); } // Filter by arch filesInCache = filterCacheBy(filesInCache, arch.toString()); if (!filesInCache.isEmpty()) { return Optional.of( filesInCache.get(filesInCache.size() - 1).toString()); } } log.trace("{} not found in cache", getDriverName()); return empty(); } protected List<File> filterCacheBy(List<File> input, String key) { List<File> output = new ArrayList<>(input); if (!key.isEmpty() && !input.isEmpty()) { for (File f : input) { if (!f.toString().contains(key)) { output.remove(f); } } } log.trace("Filter cache by {} -- input list {} -- output list {} ", key, input, output); return output; } protected List<File> getFilesInCache() { return (List<File>) listFiles(new File(downloader.getTargetPath()), null, true); } protected List<URL> removeFromList(List<URL> list, String version) { List<URL> out = new ArrayList<>(list); for (URL url : list) { if (url.getFile().contains(version)) { out.remove(url); } } return out; } protected List<URL> getVersion(List<URL> list, String driver, String version) { List<URL> out = new ArrayList<>(); if (getDriverName().contains("MicrosoftWebDriver")) { int i = listVersions.indexOf(version); if (i != -1) { out.add(list.get(i)); } } for (URL url : list) { if (url.getFile().contains(driver) && url.getFile().contains(version) && !url.getFile().contains("-symbols")) { out.add(url); } } if (versionToDownload != null && !versionToDownload.equals(version)) { versionToDownload = version; log.info("Using {} {}", driver, version); } return out; } protected List<URL> checkLatest(List<URL> list, String driver) { log.trace("Checking the lastest version of {} with URL list {}", driver, list); List<URL> out = new ArrayList<>(); List<URL> copyOfList = new ArrayList<>(list); for (URL url : copyOfList) { try { handleDriver(url, driver, out); } catch (Exception e) { log.trace("There was a problem with URL {} : {}", url, e.getMessage()); list.remove(url); } } storeVersionToDownload(versionToDownload); latestVersion = versionToDownload; log.info("Latest version of {} is {}", driver, versionToDownload); return out; } protected void handleDriver(URL url, String driver, List<URL> out) { if (!config().isUseBetaVersions() && (url.getFile().toLowerCase().contains("beta"))) { return; } if (url.getFile().contains(driver)) { String currentVersion = getCurrentVersion(url, driver); if (currentVersion.equalsIgnoreCase(driver)) { return; } if (versionToDownload == null) { versionToDownload = currentVersion; } if (versionCompare(currentVersion, versionToDownload) > 0) { versionToDownload = currentVersion; out.clear(); } if (url.getFile().contains(versionToDownload)) { out.add(url); } } } protected boolean isUsingTaobaoMirror() { return getDriverUrl().getHost().equalsIgnoreCase("npm.taobao.org"); } protected Integer versionCompare(String str1, String str2) { String[] vals1 = str1.replaceAll("v", "").split("\\."); String[] vals2 = str2.replaceAll("v", "").split("\\."); if (vals1[0].equals("")) { vals1[0] = "0"; } if (vals2[0].equals("")) { vals2[0] = "0"; } int i = 0; while (i < vals1.length && i < vals2.length && vals1[i].equals(vals2[i])) { i++; } if (i < vals1.length && i < vals2.length) { return signum(valueOf(vals1[i]).compareTo(valueOf(vals2[i]))); } else { return signum(vals1.length - vals2.length); } } /** * This method works also for http://npm.taobao.org/ and * https://bitbucket.org/ mirrors. */ protected List<URL> getDriversFromMirror(URL driverUrl) throws IOException { if (mirrorLog) { log.info("Crawling driver list from mirror {}", driverUrl); mirrorLog = true; } else { log.trace("[Recursive call] Crawling driver list from mirror {}", driverUrl); } String driverStr = driverUrl.toString(); String driverUrlContent = driverUrl.getPath(); HttpResponse response = httpClient .execute(httpClient.createHttpGet(driverUrl)); try (InputStream in = response.getEntity().getContent()) { org.jsoup.nodes.Document doc = Jsoup.parse(in, null, ""); Iterator<org.jsoup.nodes.Element> iterator = doc.select("a") .iterator(); List<URL> urlList = new ArrayList<>(); while (iterator.hasNext()) { String link = iterator.next().attr("href"); if (link.contains("mirror") && link.endsWith(SLASH)) { urlList.addAll(getDriversFromMirror(new URL( driverStr + link.replace(driverUrlContent, "")))); } else if (link.startsWith(driverUrlContent) && !link.contains("icons")) { urlList.add(new URL( driverStr + link.replace(driverUrlContent, ""))); } } return urlList; } } protected List<URL> getDriversFromXml(URL driverUrl) throws IOException { log.info("Reading {} to seek {}", driverUrl, getDriverName()); List<URL> urls = new ArrayList<>(); HttpResponse response = httpClient .execute(httpClient.createHttpGet(driverUrl)); try { try (BufferedReader reader = new BufferedReader( new InputStreamReader(response.getEntity().getContent()))) { Document xml = loadXML(reader); NodeList nodes = (NodeList) newInstance().newXPath().evaluate( "//Contents/Key", xml.getDocumentElement(), NODESET); for (int i = 0; i < nodes.getLength(); ++i) { Element e = (Element) nodes.item(i); urls.add(new URL(driverUrl.toURI().resolve(".") + e.getChildNodes().item(0).getNodeValue())); } } } catch (Exception e) { throw new WebDriverManagerException(e); } return urls; } protected Document loadXML(Reader reader) throws SAXException, IOException, ParserConfigurationException { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); DocumentBuilder builder = factory.newDocumentBuilder(); InputSource is = new InputSource(reader); return builder.parse(is); } protected void exportDriver(String variableValue) { binaryPath = variableValue; Optional<String> exportParameter = getExportParameter(); if (!config.isAvoidExport() && exportParameter.isPresent()) { String variableName = exportParameter.get(); log.info("Exporting {} as {}", variableName, variableValue); System.setProperty(variableName, variableValue); } else { log.info("Resulting binary {}", variableValue); } } protected InputStream openGitHubConnection(URL driverUrl) throws IOException { HttpGet get = httpClient.createHttpGet(driverUrl); String gitHubTokenName = config().getGitHubTokenName(); String gitHubTokenSecret = config().getGitHubTokenSecret(); if (!isNullOrEmpty(gitHubTokenName) && !isNullOrEmpty(gitHubTokenSecret)) { String userpass = gitHubTokenName + ":" + gitHubTokenSecret; String basicAuth = "Basic " + new String(new Base64().encode(userpass.getBytes())); get.addHeader("Authorization", basicAuth); } return httpClient.execute(get).getEntity().getContent(); } protected List<URL> getDriversFromGitHub() throws IOException { List<URL> urls; URL driverUrl = getDriverUrl(); log.info("Reading {} to seek {}", driverUrl, getDriverName()); if (isUsingTaobaoMirror()) { urls = getDriversFromMirror(driverUrl); } else { String driverVersion = versionToDownload; try (BufferedReader reader = new BufferedReader( new InputStreamReader(openGitHubConnection(driverUrl)))) { GsonBuilder gsonBuilder = new GsonBuilder(); Gson gson = gsonBuilder.create(); GitHubApi[] releaseArray = gson.fromJson(reader, GitHubApi[].class); if (driverVersion != null) { releaseArray = new GitHubApi[] { getVersion(releaseArray, driverVersion) }; } urls = new ArrayList<>(); for (GitHubApi release : releaseArray) { if (release != null) { List<LinkedTreeMap<String, Object>> assets = release .getAssets(); for (LinkedTreeMap<String, Object> asset : assets) { urls.add(new URL(asset.get("browser_download_url") .toString())); } } } } } return urls; } protected GitHubApi getVersion(GitHubApi[] releaseArray, String version) { GitHubApi out = null; for (GitHubApi release : releaseArray) { log.trace("Get version {} of {}", version, release); if ((release.getName() != null && release.getName().contains(version)) || (release.getTagName() != null && release.getTagName().contains(version))) { out = release; break; } } return out; } protected HttpClient getHttpClient() { return httpClient; } protected FilenameFilter getFolderFilter() { return (dir, name) -> dir.isDirectory() && name.toLowerCase().contains(getDriverName()); } protected Optional<String> getDefaultBrowserVersion(String programFilesEnv, String winBrowserName, String linuxBrowserName, String macBrowserName, String versionFlag, String browserNameInOutput) { String browserBinaryPath = config().getBinaryPath(); if (IS_OS_WINDOWS) { String programFiles = System.getenv(programFilesEnv) .replaceAll("\\\\", "\\\\\\\\"); String browserPath = isNullOrEmpty(browserBinaryPath) ? programFiles + winBrowserName : browserBinaryPath; String browserVersionOutput = runAndWait(getExecFile(), "wmic", "datafile", "where", "name='" + browserPath + "'", "get", "Version", "/value"); if (!isNullOrEmpty(browserVersionOutput)) { return Optional .of(getVersionFromWmicOutput(browserVersionOutput)); } } else if (IS_OS_LINUX || IS_OS_MAC) { String browserPath; if (!isNullOrEmpty(browserBinaryPath)) { browserPath = browserBinaryPath; } else { browserPath = IS_OS_LINUX ? linuxBrowserName : macBrowserName; } String browserVersionOutput = runAndWait(browserPath, versionFlag); if (!isNullOrEmpty(browserVersionOutput)) { return Optional.of(getVersionFromPosixOutput( browserVersionOutput, browserNameInOutput)); } } return empty(); } protected File getExecFile() { String systemRoot = System.getenv("SystemRoot"); File system32 = new File(systemRoot, "System32"); if (IS_OS_WINDOWS && system32.exists() && system32.isDirectory()) { return system32; } return new File("."); } protected void reset() { config().reset(); mirrorLog = false; listVersions = null; versionToDownload = null; forcedArch = false; forcedOs = false; retry = true; isLatest = true; } protected String getProgramFilesEnv() { return System.getProperty("os.arch").contains("64") ? "PROGRAMFILES(X86)" : "PROGRAMFILES"; } protected URL getDriverUrlCkeckingMirror(URL url) { if (config().isUseMirror()) { Optional<URL> mirrorUrl = getMirrorUrl(); if (mirrorUrl.isPresent()) { return mirrorUrl.get(); } } return url; } public static void main(String[] args) { String validBrowsers = "chrome|firefox|opera|edge|phantomjs|iexplorer|selenium_server_standalone"; if (args.length <= 0) { logCliError(validBrowsers); } else { String arg = args[0]; if (arg.equalsIgnoreCase("server")) { startServer(args); } else if (arg.equalsIgnoreCase("clear-preferences")) { new Preferences(new Config()).clear(); } else { resolveLocal(validBrowsers, arg); } } } private static void resolveLocal(String validBrowsers, String arg) { log.info("Using WebDriverManager to resolve {}", arg); try { DriverManagerType driverManagerType = DriverManagerType .valueOf(arg.toUpperCase()); WebDriverManager wdm = WebDriverManager .getInstance(driverManagerType).avoidExport() .targetPath(".").forceDownload(); if (arg.equalsIgnoreCase("edge") || arg.equalsIgnoreCase("iexplorer")) { wdm.operatingSystem(WIN); } wdm.avoidOutputTree().setup(); } catch (Exception e) { log.error("Driver for {} not found (valid browsers {})", arg, validBrowsers); } } private static void startServer(String[] args) { int port = new Config().getServerPort(); if (args.length > 1 && isNumeric(args[1])) { port = parseInt(args[1]); } new Server(port); } private static void logCliError(String validBrowsers) { log.error("There are 3 options to run WebDriverManager CLI"); log.error( "1. WebDriverManager used to resolve binary drivers locally:"); log.error("\tWebDriverManager browserName"); log.error("\t(where browserName={})", validBrowsers); log.error("2. WebDriverManager as a server:"); log.error("\tWebDriverManager server <port>"); log.error("\t(where default port is 4041)"); log.error( "3. To clear previously resolved driver versions (as Java preferences):"); log.error("\tWebDriverManager clear-preferences"); } private void storeVersionToDownload(String version) { if (!isNullOrEmpty(version)) { if (version.startsWith(".")) { version = version.substring(1); } versionToDownload = version; if (isLatest && usePreferences() && !isNullOrEmpty(preferenceKey)) { preferences.putValueInPreferencesIfEmpty(preferenceKey, version); } } } private void setConfig(Config config) { this.config = config; } }
src/main/java/io/github/bonigarcia/wdm/WebDriverManager.java
/* * (C) Copyright 2015 Boni Garcia (http://bonigarcia.github.io/) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package io.github.bonigarcia.wdm; import static io.github.bonigarcia.wdm.Architecture.X32; import static io.github.bonigarcia.wdm.Architecture.X64; import static io.github.bonigarcia.wdm.Config.isNullOrEmpty; import static io.github.bonigarcia.wdm.DriverManagerType.CHROME; import static io.github.bonigarcia.wdm.DriverManagerType.EDGE; import static io.github.bonigarcia.wdm.DriverManagerType.FIREFOX; import static io.github.bonigarcia.wdm.DriverManagerType.IEXPLORER; import static io.github.bonigarcia.wdm.DriverManagerType.OPERA; import static io.github.bonigarcia.wdm.DriverManagerType.PHANTOMJS; import static io.github.bonigarcia.wdm.DriverManagerType.SELENIUM_SERVER_STANDALONE; import static io.github.bonigarcia.wdm.OperatingSystem.WIN; import static io.github.bonigarcia.wdm.Shell.getVersionFromPosixOutput; import static io.github.bonigarcia.wdm.Shell.getVersionFromWmicOutput; import static io.github.bonigarcia.wdm.Shell.runAndWait; import static java.lang.Integer.parseInt; import static java.lang.Integer.signum; import static java.lang.Integer.valueOf; import static java.lang.invoke.MethodHandles.lookup; import static java.util.Collections.sort; import static java.util.Optional.empty; import static javax.xml.xpath.XPathConstants.NODESET; import static javax.xml.xpath.XPathFactory.newInstance; import static org.apache.commons.io.FileUtils.deleteDirectory; import static org.apache.commons.io.FileUtils.listFiles; import static org.apache.commons.io.FilenameUtils.removeExtension; import static org.apache.commons.lang3.StringUtils.isNumeric; import static org.apache.commons.lang3.SystemUtils.IS_OS_LINUX; import static org.apache.commons.lang3.SystemUtils.IS_OS_MAC; import static org.apache.commons.lang3.SystemUtils.IS_OS_WINDOWS; import static org.slf4j.LoggerFactory.getLogger; import java.io.BufferedReader; import java.io.File; import java.io.FilenameFilter; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.net.URL; import java.util.ArrayList; import java.util.EnumMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Properties; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.apache.commons.codec.binary.Base64; import org.apache.http.HttpResponse; import org.apache.http.client.methods.HttpGet; import org.jsoup.Jsoup; import org.slf4j.Logger; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.internal.LinkedTreeMap; /** * Parent driver manager. * * @author Boni Garcia ([email protected]) * @since 2.1.0 */ public abstract class WebDriverManager { static final Logger log = getLogger(lookup().lookupClass()); protected static final String SLASH = "/"; protected static final String INSIDERS = "insiders"; protected static final String BETA = "beta"; protected static final String ONLINE = "online"; protected static final String LOCAL = "local"; protected abstract List<URL> getDrivers() throws IOException; protected abstract Optional<String> getBrowserVersion(); protected abstract DriverManagerType getDriverManagerType(); protected abstract String getDriverName(); protected abstract void setDriverVersion(String version); protected abstract String getDriverVersion(); protected abstract void setDriverUrl(URL url); protected abstract URL getDriverUrl(); protected abstract Optional<URL> getMirrorUrl(); protected abstract Optional<String> getExportParameter(); protected static Map<DriverManagerType, WebDriverManager> instanceMap = new EnumMap<>( DriverManagerType.class); protected HttpClient httpClient; protected Downloader downloader; protected UrlFilter urlFilter; protected String versionToDownload; protected String downloadedVersion; protected String latestVersion; protected String binaryPath; protected boolean mirrorLog; protected List<String> listVersions; protected boolean forcedArch; protected boolean forcedOs; protected boolean isLatest; protected boolean retry = true; protected Config config = new Config(); protected Preferences preferences = new Preferences(config); protected String preferenceKey; protected Properties versionsProperties; public static Config globalConfig() { Config global = new Config(); global.setAvoidAutoReset(true); for (DriverManagerType type : DriverManagerType.values()) { WebDriverManager.getInstance(type).setConfig(global); } return global; } public Config config() { return config; } public static synchronized WebDriverManager chromedriver() { if (!instanceMap.containsKey(CHROME)) { instanceMap.put(CHROME, new ChromeDriverManager()); } return instanceMap.get(CHROME); } public static synchronized WebDriverManager firefoxdriver() { if (!instanceMap.containsKey(FIREFOX)) { instanceMap.put(FIREFOX, new FirefoxDriverManager()); } return instanceMap.get(FIREFOX); } public static synchronized WebDriverManager operadriver() { if (!instanceMap.containsKey(OPERA)) { instanceMap.put(OPERA, new OperaDriverManager()); } return instanceMap.get(OPERA); } public static synchronized WebDriverManager edgedriver() { if (!instanceMap.containsKey(EDGE)) { instanceMap.put(EDGE, new EdgeDriverManager()); } return instanceMap.get(EDGE); } public static synchronized WebDriverManager iedriver() { if (!instanceMap.containsKey(IEXPLORER)) { instanceMap.put(IEXPLORER, new InternetExplorerDriverManager()); } return instanceMap.get(IEXPLORER); } public static synchronized WebDriverManager phantomjs() { if (!instanceMap.containsKey(PHANTOMJS)) { instanceMap.put(PHANTOMJS, new PhantomJsDriverManager()); } return instanceMap.get(PHANTOMJS); } public static synchronized WebDriverManager seleniumServerStandalone() { if (!instanceMap.containsKey(SELENIUM_SERVER_STANDALONE)) { instanceMap.put(SELENIUM_SERVER_STANDALONE, new SeleniumServerStandaloneManager()); } return instanceMap.get(SELENIUM_SERVER_STANDALONE); } protected static synchronized WebDriverManager voiddriver() { return new VoidDriverManager(); } public static synchronized WebDriverManager getInstance( DriverManagerType driverManagerType) { if (driverManagerType == null) { return voiddriver(); } switch (driverManagerType) { case CHROME: return chromedriver(); case FIREFOX: return firefoxdriver(); case OPERA: return operadriver(); case IEXPLORER: return iedriver(); case EDGE: return edgedriver(); case PHANTOMJS: return phantomjs(); case SELENIUM_SERVER_STANDALONE: return seleniumServerStandalone(); default: return voiddriver(); } } public static synchronized WebDriverManager getInstance( Class<?> webDriverClass) { switch (webDriverClass.getName()) { case "org.openqa.selenium.chrome.ChromeDriver": return chromedriver(); case "org.openqa.selenium.firefox.FirefoxDriver": return firefoxdriver(); case "org.openqa.selenium.opera.OperaDriver": return operadriver(); case "org.openqa.selenium.ie.InternetExplorerDriver": return iedriver(); case "org.openqa.selenium.edge.EdgeDriver": return edgedriver(); case "org.openqa.selenium.phantomjs.PhantomJSDriver": return phantomjs(); default: return voiddriver(); } } public synchronized void setup() { if (getDriverManagerType() != null) { try { Architecture architecture = config().getArchitecture(); String driverVersion = getDriverVersion(); isLatest = isVersionLatest(driverVersion); manage(architecture, driverVersion); } finally { if (!config().isAvoidAutoReset()) { reset(); } } } } public WebDriverManager version(String version) { setDriverVersion(version); return instanceMap.get(getDriverManagerType()); } public WebDriverManager architecture(Architecture architecture) { config().setArchitecture(architecture); forcedArch = true; return instanceMap.get(getDriverManagerType()); } public WebDriverManager arch32() { architecture(X32); return instanceMap.get(getDriverManagerType()); } public WebDriverManager arch64() { architecture(X64); return instanceMap.get(getDriverManagerType()); } public WebDriverManager operatingSystem(OperatingSystem os) { config().setOs(os.name()); forcedOs = true; return instanceMap.get(getDriverManagerType()); } public WebDriverManager forceCache() { config().setForceCache(true); return instanceMap.get(getDriverManagerType()); } public WebDriverManager forceDownload() { config().setOverride(true); return instanceMap.get(getDriverManagerType()); } public WebDriverManager driverRepositoryUrl(URL url) { setDriverUrl(url); return instanceMap.get(getDriverManagerType()); } public WebDriverManager useMirror() { Optional<URL> mirrorUrl = getMirrorUrl(); if (!mirrorUrl.isPresent()) { throw new WebDriverManagerException("Mirror URL not available"); } config().setUseMirror(true); setDriverUrl(mirrorUrl.get()); return instanceMap.get(getDriverManagerType()); } public WebDriverManager proxy(String proxy) { config().setProxy(proxy); return instanceMap.get(getDriverManagerType()); } public WebDriverManager proxyUser(String proxyUser) { config().setProxyUser(proxyUser); return instanceMap.get(getDriverManagerType()); } public WebDriverManager proxyPass(String proxyPass) { config().setProxyPass(proxyPass); return instanceMap.get(getDriverManagerType()); } public WebDriverManager useBetaVersions() { config().setUseBetaVersions(true); return instanceMap.get(getDriverManagerType()); } public WebDriverManager ignoreVersions(String... versions) { config().setIgnoreVersions(versions); return instanceMap.get(getDriverManagerType()); } public WebDriverManager gitHubTokenName(String gitHubTokenName) { config().setGitHubTokenName(gitHubTokenName); return instanceMap.get(getDriverManagerType()); } public WebDriverManager gitHubTokenSecret(String gitHubTokenSecret) { config().setGitHubTokenSecret(gitHubTokenSecret); return instanceMap.get(getDriverManagerType()); } public WebDriverManager timeout(int timeout) { config().setTimeout(timeout); return instanceMap.get(getDriverManagerType()); } public WebDriverManager properties(String properties) { config().setProperties(properties); return instanceMap.get(getDriverManagerType()); } public WebDriverManager targetPath(String targetPath) { config().setTargetPath(targetPath); return instanceMap.get(getDriverManagerType()); } public WebDriverManager avoidExport() { config().setAvoidExport(true); return instanceMap.get(getDriverManagerType()); } public WebDriverManager avoidOutputTree() { config().setAvoidOutputTree(true); return instanceMap.get(getDriverManagerType()); } public WebDriverManager avoidAutoVersion() { config().setAvoidAutoVersion(true); return instanceMap.get(getDriverManagerType()); } public WebDriverManager avoidPreferences() { config().setAvoidPreferences(true); return instanceMap.get(getDriverManagerType()); } public WebDriverManager ttl(int seconds) { config().setTtl(seconds); return instanceMap.get(getDriverManagerType()); } public WebDriverManager browserPath(String browserPath) { config().setBinaryPath(browserPath); return instanceMap.get(getDriverManagerType()); } // ------------ public String getBinaryPath() { return instanceMap.get(getDriverManagerType()).binaryPath; } public String getDownloadedVersion() { return instanceMap.get(getDriverManagerType()).downloadedVersion; } public List<String> getVersions() { httpClient = new HttpClient(config()); try { List<URL> drivers = getDrivers(); List<String> versions = new ArrayList<>(); for (URL url : drivers) { String version = getCurrentVersion(url, getDriverName()); if (version.isEmpty() || version.equalsIgnoreCase("icons")) { continue; } if (version.startsWith(".")) { version = version.substring(1); } if (!versions.contains(version)) { versions.add(version); } } log.trace("Version list before sorting {}", versions); sort(versions, new VersionComparator()); return versions; } catch (IOException e) { throw new WebDriverManagerException(e); } } public void clearPreferences() { instanceMap.get(getDriverManagerType()).preferences.clear(); } public void clearCache() { String targetPath = config().getTargetPath(); try { log.debug("Clearing cache at {}", targetPath); deleteDirectory(new File(targetPath)); } catch (Exception e) { log.warn("Exception deleting cache at {}", targetPath, e); } } // ------------ protected String preDownload(String target, String version) { log.trace("Pre-download. target={}, version={}", target, version); return target; } protected File postDownload(File archive) { File parentFolder = archive.getParentFile(); File[] ls = parentFolder.listFiles(); for (File f : ls) { if (getDriverName().contains(removeExtension(f.getName()))) { log.trace("Found binary in post-download: {}", f); return f; } } throw new WebDriverManagerException("Driver " + getDriverName() + " not found (using temporal folder " + parentFolder + ")"); } protected String getCurrentVersion(URL url, String driverName) { String currentVersion = ""; try { currentVersion = url.getFile().substring( url.getFile().indexOf(SLASH) + 1, url.getFile().lastIndexOf(SLASH)); } catch (StringIndexOutOfBoundsException e) { log.trace("Exception getting version of URL {} ({})", url, e.getMessage()); } return currentVersion; } protected void manage(Architecture arch, String version) { httpClient = new HttpClient(config()); try (HttpClient wdmHttpClient = httpClient) { downloader = new Downloader(getDriverManagerType()); urlFilter = new UrlFilter(); boolean getLatest = isVersionLatest(version); boolean cache = config().isForceCache(); if (getLatest) { version = detectDriverVersionFromBrowser(); } getLatest = isNullOrEmpty(version); // For Edge if (checkInsiderVersion(version)) { return; } String os = config().getOs(); log.trace("Managing {} arch={} version={} getLatest={} cache={}", getDriverName(), arch, version, getLatest, cache); if (getLatest && latestVersion != null) { log.debug("Latest version of {} is {} (recently resolved)", getDriverName(), latestVersion); version = latestVersion; cache = true; } Optional<String> driverInCache = handleCache(arch, version, os, getLatest, cache); String versionStr = getLatest ? "(latest version)" : version; if (driverInCache.isPresent() && !config().isOverride()) { storeVersionToDownload(version); downloadedVersion = version; log.debug("Driver {} {} found in cache", getDriverName(), versionStr); exportDriver(driverInCache.get()); } else { List<URL> candidateUrls = filterCandidateUrls(arch, version, getLatest); if (candidateUrls.isEmpty()) { String errorMessage = getDriverName() + " " + versionStr + " for " + os + arch.toString() + " not found in " + getDriverUrl(); log.error(errorMessage); throw new WebDriverManagerException(errorMessage); } downloadCandidateUrls(candidateUrls); } } catch (Exception e) { handleException(e, arch, version); } } private String detectDriverVersionFromBrowser() { String version = ""; if (config().isAvoidAutoVersion()) { return version; } Optional<String> optionalBrowserVersion = getBrowserVersion(); if (optionalBrowserVersion.isPresent()) { String browserVersion = optionalBrowserVersion.get(); log.trace("Detected {} version {}", getDriverManagerType(), browserVersion); preferenceKey = getDriverManagerType().name().toLowerCase() + browserVersion; if (usePreferences() && preferences.checkKeyInPreferences(preferenceKey)) { // Get driver version from preferences version = preferences.getValueFromPreferences(preferenceKey); } else { // Get driver version from properties version = getVersionForInstalledBrowser(browserVersion); } if (!isNullOrEmpty(version)) { log.info( "Using {} {} (since {} {} is installed in your machine)", getDriverName(), version, getDriverManagerType(), browserVersion); } } else { log.debug( "The proper {} version for your {} is unknown ... trying with the latest", getDriverName(), getDriverManagerType()); } return version; } private boolean usePreferences() { boolean usePrefs = !config().isAvoidPreferences() && !config().isOverride() && !forcedArch && !forcedOs; log.trace("Using preferences {}", usePrefs); return usePrefs; } private boolean checkInsiderVersion(String version) { if (version.equals(INSIDERS)) { String systemRoot = System.getenv("SystemRoot"); File microsoftWebDriverFile = new File(systemRoot, "System32" + File.separator + "MicrosoftWebDriver.exe"); if (microsoftWebDriverFile.exists()) { downloadedVersion = INSIDERS; exportDriver(microsoftWebDriverFile.toString()); return true; } else { retry = false; throw new WebDriverManagerException( "MicrosoftWebDriver.exe should be installed in an elevated command prompt executing: " + "dism /Online /Add-Capability /CapabilityName:Microsoft.WebDriver~~~~0.0.1.0"); } } return false; } private boolean isVersionLatest(String version) { return isNullOrEmpty(version) || version.equalsIgnoreCase("latest"); } private String getVersionForInstalledBrowser(String browserVersion) { String driverVersion = ""; DriverManagerType driverManagerType = getDriverManagerType(); String driverLowerCase = driverManagerType.name().toLowerCase(); Optional<String> driverVersionForBrowser = getDriverVersionForBrowserFromProperties( driverLowerCase + browserVersion, false); if (driverVersionForBrowser.isPresent()) { driverVersion = driverVersionForBrowser.get(); } else { log.debug( "The driver version for {} {} is unknown ... trying with latest", driverManagerType, browserVersion); } return driverVersion; } private Optional<String> getDriverVersionForBrowserFromProperties( String key, boolean online) { String onlineMessage = online ? ONLINE : LOCAL; log.trace("Getting driver version from {} properties for {}", onlineMessage, key); String value = getVersionFromProperties(online).getProperty(key); if (value == null) { log.debug("Driver for {} not found in {} properties", key, onlineMessage); versionsProperties = null; value = getVersionFromProperties(!online).getProperty(key); } return value == null ? empty() : Optional.of(value); } private Properties getVersionFromProperties(boolean online) { if (versionsProperties != null) { log.trace("Already created versions.properties"); return versionsProperties; } else { try { InputStream inputStream = getVersionsInputStream(online); versionsProperties = new Properties(); versionsProperties.load(inputStream); inputStream.close(); } catch (Exception e) { versionsProperties = null; throw new IllegalStateException( "Cannot read versions.properties", e); } return versionsProperties; } } private InputStream getVersionsInputStream(boolean online) throws IOException { String onlineMessage = online ? ONLINE : LOCAL; log.trace("Reading {} version.properties to find out driver version", onlineMessage); InputStream inputStream; try { if (online) { inputStream = getOnlineVersionsInputStream(); } else { inputStream = getLocalVersionsInputStream(); } } catch (Exception e) { String exceptionMessage = online ? LOCAL : ONLINE; log.warn("Error reading version.properties, using {} instead", exceptionMessage); if (online) { inputStream = getLocalVersionsInputStream(); } else { inputStream = getOnlineVersionsInputStream(); } } return inputStream; } private InputStream getLocalVersionsInputStream() { InputStream inputStream; inputStream = Config.class.getResourceAsStream("/versions.properties"); return inputStream; } private InputStream getOnlineVersionsInputStream() throws IOException { return httpClient .execute(httpClient .createHttpGet(config().getVersionsPropertiesUrl())) .getEntity().getContent(); } protected void handleException(Exception e, Architecture arch, String version) { String versionStr = isNullOrEmpty(version) ? "(latest version)" : version; String errorMessage = String.format( "There was an error managing %s %s (%s)", getDriverName(), versionStr, e.getMessage()); if (!config().isForceCache() && retry) { config().setForceCache(true); config().setUseMirror(true); retry = false; log.warn("{} ... trying again using cache and mirror", errorMessage); manage(arch, version); } else { log.error("{}", errorMessage, e); throw new WebDriverManagerException(e); } } protected void downloadCandidateUrls(List<URL> candidateUrls) throws IOException, InterruptedException { URL url = candidateUrls.iterator().next(); String exportValue = downloader.download(url, versionToDownload, getDriverName()); exportDriver(exportValue); downloadedVersion = versionToDownload; } protected List<URL> filterCandidateUrls(Architecture arch, String version, boolean getLatest) throws IOException { List<URL> urls = getDrivers(); List<URL> candidateUrls; log.trace("All URLs: {}", urls); boolean continueSearchingVersion; do { // Get the latest or concrete version candidateUrls = getLatest ? checkLatest(urls, getDriverName()) : getVersion(urls, getDriverName(), version); log.trace("Candidate URLs: {}", candidateUrls); if (versionToDownload == null || this.getClass().equals(EdgeDriverManager.class)) { break; } // Filter by OS if (!getDriverName().equalsIgnoreCase("IEDriverServer") && !getDriverName() .equalsIgnoreCase("selenium-server-standalone")) { candidateUrls = urlFilter.filterByOs(candidateUrls, config().getOs()); } // Filter by architecture candidateUrls = urlFilter.filterByArch(candidateUrls, arch, forcedArch); // Filter by distro candidateUrls = filterByDistro(candidateUrls); // Filter by ignored versions candidateUrls = filterByIgnoredVersions(candidateUrls); // Find out if driver version has been found or not continueSearchingVersion = candidateUrls.isEmpty() && getLatest; if (continueSearchingVersion) { log.info( "No binary found for {} {} ... seeking another version", getDriverName(), versionToDownload); urls = removeFromList(urls, versionToDownload); versionToDownload = null; } } while (continueSearchingVersion); return candidateUrls; } protected List<URL> filterByIgnoredVersions(List<URL> candidateUrls) { if (config().getIgnoreVersions() != null && !candidateUrls.isEmpty()) { candidateUrls = urlFilter.filterByIgnoredVersions(candidateUrls, config().getIgnoreVersions()); } return candidateUrls; } protected List<URL> filterByDistro(List<URL> candidateUrls) throws IOException { // Filter phantomjs 2.5.0 in Linux if (config().getOs().equalsIgnoreCase("linux") && getDriverName().contains("phantomjs")) { candidateUrls = urlFilter.filterByDistro(candidateUrls, "2.5.0"); } return candidateUrls; } protected Optional<String> handleCache(Architecture arch, String version, String os, boolean getLatest, boolean cache) { Optional<String> driverInCache = empty(); if (cache || !getLatest) { driverInCache = getDriverFromCache(version, arch, os); } storeVersionToDownload(version); return driverInCache; } protected Optional<String> getDriverFromCache(String driverVersion, Architecture arch, String os) { log.trace("Checking if {} exists in cache", getDriverName()); List<File> filesInCache = getFilesInCache(); if (!filesInCache.isEmpty()) { // Filter by name filesInCache = filterCacheBy(filesInCache, getDriverName()); // Filter by version filesInCache = filterCacheBy(filesInCache, driverVersion); // Filter by OS if (!getDriverName().equals("MicrosoftWebDriver")) { filesInCache = filterCacheBy(filesInCache, os.toLowerCase()); } if (filesInCache.size() == 1) { return Optional.of(filesInCache.get(0).toString()); } // Filter by arch filesInCache = filterCacheBy(filesInCache, arch.toString()); if (!filesInCache.isEmpty()) { return Optional.of( filesInCache.get(filesInCache.size() - 1).toString()); } } log.trace("{} not found in cache", getDriverName()); return empty(); } protected List<File> filterCacheBy(List<File> input, String key) { List<File> output = new ArrayList<>(input); if (!key.isEmpty() && !input.isEmpty()) { for (File f : input) { if (!f.toString().contains(key)) { output.remove(f); } } } log.trace("Filter cache by {} -- input list {} -- output list {} ", key, input, output); return output; } protected List<File> getFilesInCache() { return (List<File>) listFiles(new File(downloader.getTargetPath()), null, true); } protected List<URL> removeFromList(List<URL> list, String version) { List<URL> out = new ArrayList<>(list); for (URL url : list) { if (url.getFile().contains(version)) { out.remove(url); } } return out; } protected List<URL> getVersion(List<URL> list, String driver, String version) { List<URL> out = new ArrayList<>(); if (getDriverName().contains("MicrosoftWebDriver")) { int i = listVersions.indexOf(version); if (i != -1) { out.add(list.get(i)); } } for (URL url : list) { if (url.getFile().contains(driver) && url.getFile().contains(version) && !url.getFile().contains("-symbols")) { out.add(url); } } if (versionToDownload != null && !versionToDownload.equals(version)) { versionToDownload = version; log.info("Using {} {}", driver, version); } return out; } protected List<URL> checkLatest(List<URL> list, String driver) { log.trace("Checking the lastest version of {} with URL list {}", driver, list); List<URL> out = new ArrayList<>(); List<URL> copyOfList = new ArrayList<>(list); for (URL url : copyOfList) { try { handleDriver(url, driver, out); } catch (Exception e) { log.trace("There was a problem with URL {} : {}", url, e.getMessage()); list.remove(url); } } storeVersionToDownload(versionToDownload); latestVersion = versionToDownload; log.info("Latest version of {} is {}", driver, versionToDownload); return out; } protected void handleDriver(URL url, String driver, List<URL> out) { if (!config().isUseBetaVersions() && (url.getFile().toLowerCase().contains("beta"))) { return; } if (url.getFile().contains(driver)) { String currentVersion = getCurrentVersion(url, driver); if (currentVersion.equalsIgnoreCase(driver)) { return; } if (versionToDownload == null) { versionToDownload = currentVersion; } if (versionCompare(currentVersion, versionToDownload) > 0) { versionToDownload = currentVersion; out.clear(); } if (url.getFile().contains(versionToDownload)) { out.add(url); } } } protected boolean isUsingTaobaoMirror() { return getDriverUrl().getHost().equalsIgnoreCase("npm.taobao.org"); } protected Integer versionCompare(String str1, String str2) { String[] vals1 = str1.replaceAll("v", "").split("\\."); String[] vals2 = str2.replaceAll("v", "").split("\\."); if (vals1[0].equals("")) { vals1[0] = "0"; } if (vals2[0].equals("")) { vals2[0] = "0"; } int i = 0; while (i < vals1.length && i < vals2.length && vals1[i].equals(vals2[i])) { i++; } if (i < vals1.length && i < vals2.length) { return signum(valueOf(vals1[i]).compareTo(valueOf(vals2[i]))); } else { return signum(vals1.length - vals2.length); } } /** * This method works also for http://npm.taobao.org/ and * https://bitbucket.org/ mirrors. */ protected List<URL> getDriversFromMirror(URL driverUrl) throws IOException { if (mirrorLog) { log.info("Crawling driver list from mirror {}", driverUrl); mirrorLog = true; } else { log.trace("[Recursive call] Crawling driver list from mirror {}", driverUrl); } String driverStr = driverUrl.toString(); String driverUrlContent = driverUrl.getPath(); HttpResponse response = httpClient .execute(httpClient.createHttpGet(driverUrl)); try (InputStream in = response.getEntity().getContent()) { org.jsoup.nodes.Document doc = Jsoup.parse(in, null, ""); Iterator<org.jsoup.nodes.Element> iterator = doc.select("a") .iterator(); List<URL> urlList = new ArrayList<>(); while (iterator.hasNext()) { String link = iterator.next().attr("href"); if (link.contains("mirror") && link.endsWith(SLASH)) { urlList.addAll(getDriversFromMirror(new URL( driverStr + link.replace(driverUrlContent, "")))); } else if (link.startsWith(driverUrlContent) && !link.contains("icons")) { urlList.add(new URL( driverStr + link.replace(driverUrlContent, ""))); } } return urlList; } } protected List<URL> getDriversFromXml(URL driverUrl) throws IOException { log.info("Reading {} to seek {}", driverUrl, getDriverName()); List<URL> urls = new ArrayList<>(); HttpResponse response = httpClient .execute(httpClient.createHttpGet(driverUrl)); try { try (BufferedReader reader = new BufferedReader( new InputStreamReader(response.getEntity().getContent()))) { Document xml = loadXML(reader); NodeList nodes = (NodeList) newInstance().newXPath().evaluate( "//Contents/Key", xml.getDocumentElement(), NODESET); for (int i = 0; i < nodes.getLength(); ++i) { Element e = (Element) nodes.item(i); urls.add(new URL(driverUrl + e.getChildNodes().item(0).getNodeValue())); } } } catch (Exception e) { throw new WebDriverManagerException(e); } return urls; } protected Document loadXML(Reader reader) throws SAXException, IOException, ParserConfigurationException { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); DocumentBuilder builder = factory.newDocumentBuilder(); InputSource is = new InputSource(reader); return builder.parse(is); } protected void exportDriver(String variableValue) { binaryPath = variableValue; Optional<String> exportParameter = getExportParameter(); if (!config.isAvoidExport() && exportParameter.isPresent()) { String variableName = exportParameter.get(); log.info("Exporting {} as {}", variableName, variableValue); System.setProperty(variableName, variableValue); } else { log.info("Resulting binary {}", variableValue); } } protected InputStream openGitHubConnection(URL driverUrl) throws IOException { HttpGet get = httpClient.createHttpGet(driverUrl); String gitHubTokenName = config().getGitHubTokenName(); String gitHubTokenSecret = config().getGitHubTokenSecret(); if (!isNullOrEmpty(gitHubTokenName) && !isNullOrEmpty(gitHubTokenSecret)) { String userpass = gitHubTokenName + ":" + gitHubTokenSecret; String basicAuth = "Basic " + new String(new Base64().encode(userpass.getBytes())); get.addHeader("Authorization", basicAuth); } return httpClient.execute(get).getEntity().getContent(); } protected List<URL> getDriversFromGitHub() throws IOException { List<URL> urls; URL driverUrl = getDriverUrl(); log.info("Reading {} to seek {}", driverUrl, getDriverName()); if (isUsingTaobaoMirror()) { urls = getDriversFromMirror(driverUrl); } else { String driverVersion = versionToDownload; try (BufferedReader reader = new BufferedReader( new InputStreamReader(openGitHubConnection(driverUrl)))) { GsonBuilder gsonBuilder = new GsonBuilder(); Gson gson = gsonBuilder.create(); GitHubApi[] releaseArray = gson.fromJson(reader, GitHubApi[].class); if (driverVersion != null) { releaseArray = new GitHubApi[] { getVersion(releaseArray, driverVersion) }; } urls = new ArrayList<>(); for (GitHubApi release : releaseArray) { if (release != null) { List<LinkedTreeMap<String, Object>> assets = release .getAssets(); for (LinkedTreeMap<String, Object> asset : assets) { urls.add(new URL(asset.get("browser_download_url") .toString())); } } } } } return urls; } protected GitHubApi getVersion(GitHubApi[] releaseArray, String version) { GitHubApi out = null; for (GitHubApi release : releaseArray) { log.trace("Get version {} of {}", version, release); if ((release.getName() != null && release.getName().contains(version)) || (release.getTagName() != null && release.getTagName().contains(version))) { out = release; break; } } return out; } protected HttpClient getHttpClient() { return httpClient; } protected FilenameFilter getFolderFilter() { return (dir, name) -> dir.isDirectory() && name.toLowerCase().contains(getDriverName()); } protected Optional<String> getDefaultBrowserVersion(String programFilesEnv, String winBrowserName, String linuxBrowserName, String macBrowserName, String versionFlag, String browserNameInOutput) { String browserBinaryPath = config().getBinaryPath(); if (IS_OS_WINDOWS) { String programFiles = System.getenv(programFilesEnv) .replaceAll("\\\\", "\\\\\\\\"); String browserPath = isNullOrEmpty(browserBinaryPath) ? programFiles + winBrowserName : browserBinaryPath; String browserVersionOutput = runAndWait(getExecFile(), "wmic", "datafile", "where", "name='" + browserPath + "'", "get", "Version", "/value"); if (!isNullOrEmpty(browserVersionOutput)) { return Optional .of(getVersionFromWmicOutput(browserVersionOutput)); } } else if (IS_OS_LINUX || IS_OS_MAC) { String browserPath; if (!isNullOrEmpty(browserBinaryPath)) { browserPath = browserBinaryPath; } else { browserPath = IS_OS_LINUX ? linuxBrowserName : macBrowserName; } String browserVersionOutput = runAndWait(browserPath, versionFlag); if (!isNullOrEmpty(browserVersionOutput)) { return Optional.of(getVersionFromPosixOutput( browserVersionOutput, browserNameInOutput)); } } return empty(); } protected File getExecFile() { String systemRoot = System.getenv("SystemRoot"); File system32 = new File(systemRoot, "System32"); if (IS_OS_WINDOWS && system32.exists() && system32.isDirectory()) { return system32; } return new File("."); } protected void reset() { config().reset(); mirrorLog = false; listVersions = null; versionToDownload = null; forcedArch = false; forcedOs = false; retry = true; isLatest = true; } protected String getProgramFilesEnv() { return System.getProperty("os.arch").contains("64") ? "PROGRAMFILES(X86)" : "PROGRAMFILES"; } protected URL getDriverUrlCkeckingMirror(URL url) { if (config().isUseMirror()) { Optional<URL> mirrorUrl = getMirrorUrl(); if (mirrorUrl.isPresent()) { return mirrorUrl.get(); } } return url; } public static void main(String[] args) { String validBrowsers = "chrome|firefox|opera|edge|phantomjs|iexplorer|selenium_server_standalone"; if (args.length <= 0) { logCliError(validBrowsers); } else { String arg = args[0]; if (arg.equalsIgnoreCase("server")) { startServer(args); } else if (arg.equalsIgnoreCase("clear-preferences")) { new Preferences(new Config()).clear(); } else { resolveLocal(validBrowsers, arg); } } } private static void resolveLocal(String validBrowsers, String arg) { log.info("Using WebDriverManager to resolve {}", arg); try { DriverManagerType driverManagerType = DriverManagerType .valueOf(arg.toUpperCase()); WebDriverManager wdm = WebDriverManager .getInstance(driverManagerType).avoidExport() .targetPath(".").forceDownload(); if (arg.equalsIgnoreCase("edge") || arg.equalsIgnoreCase("iexplorer")) { wdm.operatingSystem(WIN); } wdm.avoidOutputTree().setup(); } catch (Exception e) { log.error("Driver for {} not found (valid browsers {})", arg, validBrowsers); } } private static void startServer(String[] args) { int port = new Config().getServerPort(); if (args.length > 1 && isNumeric(args[1])) { port = parseInt(args[1]); } new Server(port); } private static void logCliError(String validBrowsers) { log.error("There are 3 options to run WebDriverManager CLI"); log.error( "1. WebDriverManager used to resolve binary drivers locally:"); log.error("\tWebDriverManager browserName"); log.error("\t(where browserName={})", validBrowsers); log.error("2. WebDriverManager as a server:"); log.error("\tWebDriverManager server <port>"); log.error("\t(where default port is 4041)"); log.error( "3. To clear previously resolved driver versions (as Java preferences):"); log.error("\tWebDriverManager clear-preferences"); } private void storeVersionToDownload(String version) { if (!isNullOrEmpty(version)) { if (version.startsWith(".")) { version = version.substring(1); } versionToDownload = version; if (isLatest && usePreferences() && !isNullOrEmpty(preferenceKey)) { preferences.putValueInPreferencesIfEmpty(preferenceKey, version); } } } private void setConfig(Config config) { this.config = config; } }
chromedriver storage non-directory url path support
src/main/java/io/github/bonigarcia/wdm/WebDriverManager.java
chromedriver storage non-directory url path support
Java
apache-2.0
11d53b83603320ebdfc28243a5345a49f983134a
0
TanayParikh/foam2,TanayParikh/foam2,foam-framework/foam2,foam-framework/foam2,foam-framework/foam2,foam-framework/foam2,TanayParikh/foam2,jacksonic/vjlofvhjfgm,TanayParikh/foam2,jacksonic/vjlofvhjfgm,foam-framework/foam2,jacksonic/vjlofvhjfgm
/** * @license * Copyright 2017 The FOAM Authors. All Rights Reserved. * http://www.apache.org/licenses/LICENSE-2.0 */ package foam.lib.json; import foam.lib.parse.*; public class StringParser implements Parser { private char delim_; private char escape_; public StringParser() { escape_ = '\\'; } public PStream parse(PStream ps, ParserContext x) { delim_ = ps.head(); if ( delim_ != '"' && delim_ != '\'' ) { return null; } ps = ps.tail(); char lastc = delim_; StringBuilder sb = new StringBuilder(); while ( ps.valid() ) { char c = ps.head(); if ( c == delim_ && lastc != escape_ ) { break; } if ( c != escape_ ) sb.append(c); if ( c == '\\' && ps.tail().head() == 'u' ) { Parser unicodeParser = new UnicodeParser(); PStream unicodePS = unicodeParser.parse(ps, x); if ( unicodePS != null ) { sb.append(unicodePS.value()); ps = unicodePS; c = ((Character) unicodePS.value()).charValue(); } else { ps = ps.tail(); } } else { ps = ps.tail(); } lastc = c; } return ps.tail().setValue(sb.toString()); } }
src/foam/lib/json/StringParser.java
/** * @license * Copyright 2017 The FOAM Authors. All Rights Reserved. * http://www.apache.org/licenses/LICENSE-2.0 */ package foam.lib.json; import foam.lib.parse.*; public class StringParser implements Parser { private char delim_; private char escape_; public StringParser() { escape_ = '\\'; } public PStream parse(PStream ps, ParserContext x) { delim_ = ps.head(); if ( delim_ != '"' && delim_ != '\'' ) { return null; } ps = ps.tail(); char lastc = delim_; StringBuilder sb = new StringBuilder(); while ( ps.valid() ) { char c = ps.head(); if ( c == delim_ && lastc != escape_ ) { break; } if ( c != escape_ ) sb.append(c); lastc = c; ps = ps.tail(); } return ps.tail().setValue(sb.toString()); } }
Added unicode parsing support for Java StringParser
src/foam/lib/json/StringParser.java
Added unicode parsing support for Java StringParser
Java
apache-2.0
eea2a9840b1d7d2fc6f53fccd07a65652738952a
0
aegif/nemakiware-android-client
package de.fmaul.android.cmis; import java.util.ArrayList; import android.app.ListActivity; import android.content.Intent; import android.content.SharedPreferences; import android.os.Bundle; import android.preference.PreferenceManager; import android.view.ContextMenu; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ContextMenu.ContextMenuInfo; import android.widget.AdapterView; import android.widget.ListView; import android.widget.Toast; import de.fmaul.android.cmis.repo.CmisDBAdapter; import de.fmaul.android.cmis.repo.Server; public class ServerActivity extends ListActivity { private CmisDBAdapter cmisDbAdapter; private CmisServersAdapter cmisSAdapter; private SharedPreferences preferences; private SharedPreferences.Editor editor; private ArrayList<Server> listServer; /** Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); preferences = PreferenceManager.getDefaultSharedPreferences(this); editor = preferences.edit(); setContentView(R.layout.server); createServerList(); registerForContextMenu(getListView()); } public void createServerList(){ cmisDbAdapter = new CmisDBAdapter(this); cmisDbAdapter.open(); listServer = cmisDbAdapter.getAllServers(); cmisSAdapter = new CmisServersAdapter(this, R.layout.server_row, listServer); setListAdapter(cmisSAdapter); cmisDbAdapter.close(); } public boolean onCreateOptionsMenu(Menu menu){ super.onCreateOptionsMenu(menu); MenuItem menuItem = menu.add(Menu.NONE, 1, 0, R.string.menu_item_server_add); menuItem.setIcon(R.drawable.add); menuItem = menu.add(Menu.NONE, 2, 0, R.string.quit); menuItem.setIcon(R.drawable.quit); return true; } @Override public boolean onOptionsItemSelected(MenuItem item){ switch(item.getItemId()){ case 1: startActivity(new Intent(this,ServerEditActivity.class)); return true; case 2: this.finish(); return true; } return super.onOptionsItemSelected(item); } protected void onListItemClick(ListView l, View v, int position, long id) { Server s = listServer.get(position); if (s != null){ Intent intent = new Intent(this, ListCmisFeedActivity.class); intent.putExtra("isFirstStart", true); intent.putExtra("title", s.getName()); editor.putLong("serverID", s.getId()); editor.putString("serverName", s.getName()); editor.putString("serverURL", s.getUrl()); editor.putString("username", s.getUsername()); editor.putString("password", s.getPassword()); editor.putString("workspace", s.getWorkspace()); editor.commit(); startActivity(intent); } else { Toast.makeText(this, "ERROR", 3); } } public void onCreateContextMenu(ContextMenu menu, View v, ContextMenuInfo menuInfo) { super.onCreateContextMenu(menu, v, menuInfo); menu.setHeaderIcon(android.R.drawable.ic_menu_more); menu.setHeaderTitle(this.getString(R.string.context_menu_title)); menu.add(0, 1, Menu.NONE, getString(R.string.edit)); menu.add(0, 2, Menu.NONE, getString(R.string.delete)); } @Override public boolean onContextItemSelected(MenuItem menuItem) { AdapterView.AdapterContextMenuInfo menuInfo; try { menuInfo = (AdapterView.AdapterContextMenuInfo) menuItem.getMenuInfo(); } catch (ClassCastException e) { return false; } Server server = (Server) getListView().getItemAtPosition(menuInfo.position); switch (menuItem.getItemId()) { case 1: if (server != null) { editServer(server); } return true; case 2: if (server != null) { deleteServer(server.getId()); } return true; default: return super.onContextItemSelected(menuItem); } } public void deleteServer(long id){ cmisDbAdapter = new CmisDBAdapter(this); cmisDbAdapter.open(); if (cmisDbAdapter.deleteServer(id)){ Toast.makeText(this, this.getString(R.string.server_delete), Toast.LENGTH_LONG).show(); createServerList(); }else{ Toast.makeText(this, this.getString(R.string.server_delete_error), Toast.LENGTH_LONG).show(); } cmisDbAdapter.close(); } public void editServer(Server server){ Intent intent = new Intent(this, ServerEditActivity.class); intent.putExtra("server", server); startActivity(intent); } }
src/de/fmaul/android/cmis/ServerActivity.java
package de.fmaul.android.cmis; import java.util.ArrayList; import android.app.ListActivity; import android.content.Intent; import android.content.SharedPreferences; import android.os.Bundle; import android.preference.PreferenceManager; import android.view.ContextMenu; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ContextMenu.ContextMenuInfo; import android.widget.AdapterView; import android.widget.ListView; import android.widget.Toast; import de.fmaul.android.cmis.repo.CmisDBAdapter; import de.fmaul.android.cmis.repo.Server; public class ServerActivity extends ListActivity { private CmisDBAdapter cmisDbAdapter; private CmisServersAdapter cmisSAdapter; private SharedPreferences preferences; private SharedPreferences.Editor editor; private ArrayList<Server> listServer; /** Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); preferences = PreferenceManager.getDefaultSharedPreferences(this); editor = preferences.edit(); setContentView(R.layout.server); createServerList(); registerForContextMenu(getListView()); } public void createServerList(){ cmisDbAdapter = new CmisDBAdapter(this); cmisDbAdapter.open(); listServer = cmisDbAdapter.getAllServers(); cmisSAdapter = new CmisServersAdapter(this, R.layout.server_row, listServer); setListAdapter(cmisSAdapter); cmisDbAdapter.close(); } public boolean onCreateOptionsMenu(Menu menu){ super.onCreateOptionsMenu(menu); MenuItem menuItem = menu.add(Menu.NONE, 1, 0, R.string.menu_item_server_add); menuItem.setIcon(R.drawable.add); menuItem = menu.add(Menu.NONE, 2, 0, R.string.quit); menuItem.setIcon(R.drawable.quit); return true; } @Override public boolean onOptionsItemSelected(MenuItem item){ switch(item.getItemId()){ case 1: startActivity(new Intent(this,ServerEditActivity.class)); return true; case 2: this.finish(); return true; } return super.onOptionsItemSelected(item); } protected void onListItemClick(ListView l, View v, int position, long id) { Server s = listServer.get(position); if (s != null){ Intent intent = new Intent(this, ListCmisFeedActivity.class); intent.putExtra("isFirstStart", true); intent.putExtra("title", s.getName()); editor.putLong("serverID", s.getId()); editor.putString("serverName", s.getName()); editor.putString("serverURL", s.getUrl()); editor.putString("username", s.getUsername()); editor.putString("password", s.getPassword()); editor.putString("workspace", s.getWorkspace()); editor.commit(); startActivity(intent); } else { Toast.makeText(this, "ERROR", 3); } } public void onCreateContextMenu(ContextMenu menu, View v, ContextMenuInfo menuInfo) { super.onCreateContextMenu(menu, v, menuInfo); menu.setHeaderIcon(android.R.drawable.ic_menu_more); menu.setHeaderTitle(this.getString(R.string.context_menu_title)); menu.add(0, 1, Menu.NONE, getString(R.string.edit)); menu.add(0, 2, Menu.NONE, getString(R.string.delete)); } @Override public boolean onContextItemSelected(MenuItem menuItem) { AdapterView.AdapterContextMenuInfo menuInfo; try { menuInfo = (AdapterView.AdapterContextMenuInfo) menuItem.getMenuInfo(); } catch (ClassCastException e) { return false; } Server server = (Server) getListView().getItemAtPosition(menuInfo.position); switch (menuItem.getItemId()) { case 1: if (server != null) { editServer(server); } return true; case 2: if (server != null) { deleteServer(server.getId()); } return true; default: return super.onContextItemSelected(menuItem); } } public void deleteServer(long id){ cmisDbAdapter = new CmisDBAdapter(this); cmisDbAdapter.open(); if (cmisDbAdapter.deleteServer(id)){ Toast.makeText(this, this.getString(R.string.server_delete), Toast.LENGTH_LONG).show(); createServerList(); }else{ Toast.makeText(this, this.getString(R.string.server_delete_error), Toast.LENGTH_LONG).show(); } cmisDbAdapter.close(); } public void editServer(Server server){ Intent intent = new Intent(this, ServerEditActivity.class); intent.putExtra("server", server); this.finish(); startActivity(intent); } }
Change beahviour of Back button in edit Server screen
src/de/fmaul/android/cmis/ServerActivity.java
Change beahviour of Back button in edit Server screen
Java
apache-2.0
1b1c90910d81db242d98e5757c9dbc6ac54d1880
0
arx-deidentifier/arx,RaffaelBild/arx,RaffaelBild/arx,arx-deidentifier/arx
/* * ARX: Powerful Data Anonymization * Copyright 2012 - 2017 Fabian Prasser, Florian Kohlmayer and contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.deidentifier.arx; import java.io.IOException; import java.io.ObjectInputStream; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.deidentifier.arx.ARXConfiguration.ARXConfigurationInternal; import org.deidentifier.arx.certificate.elements.ElementData; import org.deidentifier.arx.framework.lattice.SolutionSpace; import org.deidentifier.arx.framework.lattice.Transformation; import org.deidentifier.arx.metric.InformationLoss; import org.deidentifier.arx.metric.Metric; import cern.colt.list.LongArrayList; import com.carrotsearch.hppc.IntObjectOpenHashMap; import com.carrotsearch.hppc.LongObjectOpenHashMap; import de.linearbits.jhpl.JHPLIterator.LongIterator; /** * This class implements a representation of the generalization lattice that is * exposed to users of the API. * * @author Fabian Prasser * @author Florian Kohlmayer */ public class ARXLattice implements Serializable { /** * The internal accessor class. * * @author Fabian Prasser * @author Florian Kohlmayer */ public class Access implements Serializable { /** SVUID */ private static final long serialVersionUID = 6654627605797832468L; /** Lattice */ private final ARXLattice lattice; /** * Constructor * * @param lattice */ public Access(final ARXLattice lattice) { this.lattice = lattice; } /** * Accessor method * * @return */ public Map<String, Integer> getAttributeMap() { return bottom.headermap; } /** * Accessor method * * @param bottom */ public void setBottom(final ARXNode bottom) { lattice.bottom = bottom; } /** * Accessor method * * @param levels */ public void setLevels(final ARXNode[][] levels) { lattice.levels = levels; } /** * Accessor method * * @param config */ public void setMonotonicity(ARXConfiguration config) { lattice.setMonotonicity(config.isSuppressionAlwaysEnabled(), config.getAbsoluteSuppressionLimit()); } /** * Accessor method * * @param node */ public void setOptimum(final ARXNode node) { lattice.optimum = node; } /** * Accessor method * * @param model */ public void setQualityModel(final Metric<?> model) { lattice.metric = model; } /** * Accessor method * * @param size */ public void setSize(final int size) { lattice.size = size; } /** * Updates the solution space * @param solutions */ public void setSolutionSpace(SolutionSpace solutions) { lattice.solutions = solutions; // For backwards compatibility lattice.map = new LongObjectOpenHashMap<ARXNode>(); for (ARXNode[] level : lattice.levels) { for (ARXNode node : level) { int[] levels = node.getTransformation(); Transformation transformation = lattice.solutions.getTransformation(levels); long id = transformation.getIdentifier(); lattice.map.put(id, node); } } } /** * Accessor method * * @param top */ public void setTop(final ARXNode top) { lattice.top = top; } /** * Accessor method * * @param uncertainty */ public void setUncertainty(final boolean uncertainty) { lattice.uncertainty = uncertainty; } } /** * Reflects different anonymity properties. */ public static enum Anonymity { /** ANONYMOUS */ ANONYMOUS, /** NOT_ANONYMOUS */ NOT_ANONYMOUS, /** UNKNOWN */ UNKNOWN, /** PROBABLY_ANONYMOUS */ PROBABLY_ANONYMOUS, /** PROBABLY_NOT_ANONYMOUS */ PROBABLY_NOT_ANONYMOUS } /** * A node in the lattice. * * @author Fabian Prasser * @author Florian Kohlmayer */ public class ARXNode { /** * Internal access class. * * @author Fabian Prasser * @author Florian Kohlmayer */ public class Access { /** Node */ private final ARXNode node; /** * Accessor class * * @param node */ public Access(final ARXNode node) { this.node = node; } /** * Sets the anonymity. * * @param anonymity */ public void setAnonymity(final Anonymity anonymity) { node.anonymity = anonymity; } /** * Set anonymous. */ public void setAnonymous() { node.anonymity = Anonymity.ANONYMOUS; } /** * Sets the attributes. * * @param attributes */ public void setAttributes(final Map<Integer, Object> attributes) { node.attributes = attributes; } /** * Set checked. * * @param checked */ public void setChecked(final boolean checked) { node.checked = checked; } /** * Sets the headermap. * * @param headermap */ public void setHeadermap(final Map<String, Integer> headermap) { node.headermap = headermap; } /** * Sets the maximal information loss. * * @param a */ public void setHighestScore(final InformationLoss<?> a) { node.maxInformationLoss = InformationLoss.createInformationLoss(a, metric, getDeserializationContext().minLevel, getDeserializationContext().maxLevel); } /** * Sets the lower bound. * * @param a */ public void setLowerBound(final InformationLoss<?> a) { node.lowerBound = InformationLoss.createInformationLoss(a, metric, getDeserializationContext().minLevel, getDeserializationContext().maxLevel); } /** * Sets the minimal information loss. * * @param a */ public void setLowestScore(final InformationLoss<?> a) { node.minInformationLoss = InformationLoss.createInformationLoss(a, metric, getDeserializationContext().minLevel, getDeserializationContext().maxLevel); } /** * Set not anonymous. */ public void setNotAnonymous() { node.anonymity = Anonymity.NOT_ANONYMOUS; } /** * Sets the predecessors. * * @param predecessors */ public void setPredecessors(final ARXNode[] predecessors) { node.predecessors = predecessors; } /** * Sets the successors. * * @param successors */ public void setSuccessors(final ARXNode[] successors) { node.successors = successors; } /** * Sets the transformation. * * @param transformation */ public void setTransformation(final int[] transformation) { node.transformation = transformation; } } /** Id. */ private Integer id = null; /** The access. */ private final Access access = new Access(this); /** Is it anonymous. */ private Anonymity anonymity; /** Attributes. */ private Map<Integer, Object> attributes = new HashMap<Integer, Object>(); /** Has the node been checked. */ private boolean checked; /** The header map. */ private Map<String, Integer> headermap; /** The lower bound. */ private InformationLoss<?> lowerBound; /** The max information loss. */ private InformationLoss<?> maxInformationLoss; /** The min information loss. */ private InformationLoss<?> minInformationLoss; /** The predecessors. */ private ARXNode[] predecessors; /** The successors. */ private ARXNode[] successors; /** The transformation. */ private int[] transformation; /** The underlying lattice */ private final ARXLattice lattice; /** * Internal constructor for deserialization. * * @param lattice */ public ARXNode(ARXLattice lattice) { this.lattice = lattice; } /** * Constructor. * * @param lattice * @param solutions * @param transformation * @param headermap */ private ARXNode(final ARXLattice lattice, final SolutionSpace solutions, final Transformation transformation, final Map<String, Integer> headermap) { // Set properties this.lattice = lattice; this.headermap = headermap; this.transformation = transformation.getGeneralization(); this.minInformationLoss = transformation.getInformationLoss(); this.maxInformationLoss = transformation.getInformationLoss(); this.lowerBound = transformation.getLowerBound(); this.checked = transformation.hasProperty(solutions.getPropertyChecked()); // Transfer anonymity property without uncertainty if (transformation.hasProperty(solutions.getPropertyChecked())){ if (transformation.hasProperty(solutions.getPropertyAnonymous())) { this.anonymity = Anonymity.ANONYMOUS; } else if(transformation.hasProperty(solutions.getPropertyNotAnonymous())) { this.anonymity = Anonymity.NOT_ANONYMOUS; } else { this.anonymity = Anonymity.UNKNOWN; } // This is a node for which the property is unknown } else { if (transformation.hasProperty(solutions.getPropertyAnonymous())) { this.anonymity = uncertainty ? Anonymity.PROBABLY_ANONYMOUS : Anonymity.ANONYMOUS; } else if (transformation.hasProperty(solutions.getPropertyNotAnonymous())) { this.anonymity = uncertainty ? Anonymity.PROBABLY_NOT_ANONYMOUS : Anonymity.NOT_ANONYMOUS; } else if (transformation.hasProperty(solutions.getPropertyNotKAnonymous())) { this.anonymity = Anonymity.NOT_ANONYMOUS; } else if (transformation.hasProperty(solutions.getPropertyInsufficientUtility())) { this.anonymity = Anonymity.UNKNOWN; } else { this.anonymity = Anonymity.UNKNOWN; } } // Make sure that we have information loss available // Important for expand operations if (this.maxInformationLoss == null) { this.maxInformationLoss = metric.createInstanceOfHighestScore(); } if (this.minInformationLoss == null) { this.minInformationLoss = metric.createInstanceOfLowestScore(); } } /** * Alter associated fields. * * @return */ public Access access() { return access; } /** * Materializes any non-materialized predecessors and successors */ public void expand() { this.lattice.expand(this); } /** * Returns the anonymity property. * * @return */ public Anonymity getAnonymity() { return anonymity; } /** * Returns the attributes. * * @return */ public Map<Integer, Object> getAttributes() { return attributes; } /** * Returns the index of an attribute. * * @param attr * @return */ public int getDimension(final String attr) { return headermap.get(attr); } /** * Returns the generalization for the attribute. * * @param attribute * @return */ public int getGeneralization(final String attribute) { final Integer index = headermap.get(attribute); if (index == null) { return 0; } return transformation[index]; } /** * Returns the highest score. Lower is better. * @return */ public InformationLoss<?> getHighestScore() { return maxInformationLoss; } /** * Returns the highest score. Lower is better. * @return */ public InformationLoss<?> getLowestScore() { return minInformationLoss; } /** * The predecessors. * * @return */ public ARXNode[] getPredecessors() { return predecessors; } /** * Returns the quasi identifiers. * * @return */ public String[] getQuasiIdentifyingAttributes() { final String[] result = new String[headermap.size()]; for (final String key : headermap.keySet()) { result[headermap.get(key)] = key; } return result; } /** * The successors. * * @return */ public ARXNode[] getSuccessors() { return successors; } /** * Returns the sum of all generalization levels. * * @return */ public int getTotalGeneralizationLevel() { int level = 0; for (int i : transformation) { level += i; } return level; } /** * Returns the transformation as an array. * * @return */ public int[] getTransformation() { return transformation; } /** * Returns if the node has been checked explicitly. * * @return */ public boolean isChecked() { return checked; } /** * Renders this object * @return */ public ElementData render() { ElementData result = new ElementData("Transformation"); result.addProperty("Anonymity", this.anonymity); result.addProperty("Minimum information loss", this.minInformationLoss.toString()); result.addProperty("Maximum information loss", this.maxInformationLoss.toString()); result.addProperty(null, renderGeneralizationScheme()); return result; } /** * De-serialization. * * @param aInputStream * @throws ClassNotFoundException * @throws IOException */ private void readObject(ObjectInputStream aInputStream) throws ClassNotFoundException, IOException { // Default de-serialization aInputStream.defaultReadObject(); // Translate information loss, if necessary this.lowerBound = InformationLoss.createInformationLoss(this.lowerBound, metric, getDeserializationContext().minLevel, getDeserializationContext().maxLevel); this.maxInformationLoss = InformationLoss.createInformationLoss(this.maxInformationLoss, metric, getDeserializationContext().minLevel, getDeserializationContext().maxLevel); this.minInformationLoss = InformationLoss.createInformationLoss(this.minInformationLoss, metric, getDeserializationContext().minLevel, getDeserializationContext().maxLevel); } /** * Renders this object * @return */ private ElementData renderGeneralizationScheme() { ElementData result = new ElementData("Generalization scheme"); for (String qi : this.getQuasiIdentifyingAttributes()) { result.addProperty(qi, this.getGeneralization(qi) + "/" + this.lattice.getTop().getGeneralization(qi)); } return result; } /** * Returns the headermap * @return */ protected Map<String, Integer> getHeaderMap() { return this.headermap; } /** * Returns a node's internal id. * * @return */ protected Integer getId(){ return this.id; } /** * Returns a node's lower bound, if any. * * @return */ protected InformationLoss<?> getLowerBound(){ return this.lowerBound; } /** * Internal method that sets the id. * * @param id */ protected void setId(int id) { this.id = id; } } /** * Context for deserialization. * * @author Florian Kohlmayer */ public static class LatticeDeserializationContext { /** Min level */ public int minLevel = 0; /** Max level */ public int maxLevel = 0; } /** Deserialization context. */ private static LatticeDeserializationContext deserializationContext = new LatticeDeserializationContext(); /** SVUID */ private static final long serialVersionUID = -8790104959905019184L; /** * Returns the deserialization context. * * @return */ public static LatticeDeserializationContext getDeserializationContext() { return deserializationContext; } /** The accessor. */ private final Access access = new Access(this); /** The bottom node. */ private transient ARXNode bottom; /** The levels in the lattice. */ private transient ARXNode[][] levels; /** Metric. */ private Metric<?> metric; /** The optimum. */ private transient ARXNode optimum; /** The number of nodes. */ private int size; /** The virtual size */ private Long virtualSize; /** The top node. */ private transient ARXNode top; /** Is practical monotonicity being assumed. */ private boolean uncertainty; /** Kept only for backwards compatibility */ private Boolean complete; /** Monotonicity of information loss. */ private boolean monotonicAnonymous; /** Monotonicity of information loss. */ private boolean monotonicNonAnonymous; /** Minimum loss in the lattice. */ private InformationLoss<?> minimumInformationLoss = null; /** Maximum loss in the lattice. */ private InformationLoss<?> maximumInformationLoss = null; /** The solution space */ private transient SolutionSpace solutions; /** Map from ids to nodes */ private transient LongObjectOpenHashMap<ARXNode> map; /** * Constructor. * * @param solutions The solution space * @param optimum The optimum * @param header The header * @param config The config */ ARXLattice(final SolutionSpace solutions, final Transformation optimum, final String[] header, final ARXConfigurationInternal config) { // Init this.solutions = solutions; this.metric = config.getQualityModel(); this.setMonotonicity(config.isSuppressionAlwaysEnabled(), config.getAbsoluteSuppressionLimit()); this.virtualSize = solutions.getSize(); // Set this flag to true, if practical monotonicity is being assumed this.uncertainty = config.isPracticalMonotonicity(); // Build header map final Map<String, Integer> headermap = new HashMap<String, Integer>(); int index = 0; for (int i = 0; i < header.length; i++) { headermap.put(header[i], index++); } // Build lattice build(optimum, headermap); // find bottom node outer: for (int i = 0; i < this.levels.length; i++) { final ARXNode[] level = this.levels[i]; for (int j = 0; j < level.length; j++) { final ARXNode node = level[j]; if (node != null) { this.bottom = node; break outer; } } } // find top node outer: for (int i = this.levels.length - 1; i >= 0; i--) { final ARXNode[] level = this.levels[i]; for (int j = 0; j < level.length; j++) { final ARXNode node = level[j]; if (node != null) { this.top = node; break outer; } } } // Estimate information loss of all nodes estimateInformationLoss(); } /** * Access fields of this class. * * @return */ public Access access() { return access; } /** * Materializes any non-materialized predecessors and successors */ public void expand(ARXNode center) { // Initialize int[] indices = center.getTransformation(); Transformation transformation = solutions.getTransformation(indices); // Collect neighbors LongArrayList neighbors = transformation.getPredecessors(); LongArrayList successors = transformation.getSuccessors(); neighbors.addAllOfFromTo(successors, 0, successors.size() - 1); // Collect affected levels int lowerLevel = transformation.getLevel() - 1; int higherLevel = transformation.getLevel() + 1; if (transformation.getLevel() == solutions.getTop().getLevel()) { lowerLevel = solutions.getTop().getLevel() - 1; higherLevel = solutions.getTop().getLevel() - 1; } else if (transformation.getLevel() == solutions.getBottom().getLevel()) { lowerLevel = solutions.getBottom().getLevel() + 1; higherLevel = solutions.getBottom().getLevel() + 1; } // Find missing neighbors and initialize variables Map<String, Integer> headermap = this.getBottom().getHeaderMap(); Set<Long> missing = new HashSet<Long>(); for (int i = 0; i < neighbors.size(); i++) { missing.add(neighbors.getQuick(i)); } outer: for (int level = lowerLevel; level <= higherLevel; level ++) { if (level != transformation.getLevel()) { for (ARXNode node : this.levels[level]) { Long id = solutions.getTransformation(node.getTransformation()).getIdentifier(); missing.remove(id); if (missing.isEmpty()) { break outer; } } } } // Materialize missing nodes Map<Integer, List<ARXNode>> levels = new HashMap<Integer, List<ARXNode>>(); for (long id : missing) { // Materialize transformation = solutions.getTransformation(id); ARXNode node = new ARXNode(this, solutions, transformation, headermap); // Store in global map map.put(id, node); // Store in map of levels if (!levels.containsKey(transformation.getLevel())) { levels.put(transformation.getLevel(), new ArrayList<ARXNode>()); } levels.get(transformation.getLevel()).add(node); } // Insert missing nodes into level Arrays for (int level : levels.keySet()) { // Sort nodes to insert, lexicographically List<ARXNode> nodes = levels.get(level); Collections.sort(nodes, new Comparator<ARXNode>(){ public int compare(ARXNode o1, ARXNode o2) { return compareLexicographically(o1, o2); } }); // Initialize new level List<ARXNode> list = new ArrayList<ARXNode>(); // Now add all nodes in one pass int index = 0; for (ARXNode node : this.levels[level]) { while (index < nodes.size() && compareLexicographically(nodes.get(index), node) < 0) { list.add(nodes.get(index++)); } list.add(node); } // Add remaining while (index < nodes.size()) { list.add(nodes.get(index++)); } // Convert this.levels[level] = list.toArray(new ARXNode[list.size()]); } // Build relationships from/to missing nodes for (long id : missing) { this.createExpandedRelationships(solutions, id); } // Update size this.size += missing.size(); // Update information loss for (long id : missing) { // Pull lower bound from predecessors transformation = solutions.getTransformation(id); ARXNode node = map.get(id); InformationLoss<?> lowerBound = null; InformationLoss<?> min = metric.createInstanceOfLowestScore(); InformationLoss<?> max = metric.createInstanceOfHighestScore(); LongArrayList list = transformation.getPredecessors(); for (int i = 0; i < list.size(); i++) { ARXNode predecessor = map.get(list.getQuick(i)); if (predecessor != null && predecessor.getLowerBound() != null) { min.max(predecessor.getLowerBound().clone()); if (lowerBound == null) { lowerBound = predecessor.getLowerBound().clone(); } else { lowerBound.max(predecessor.getLowerBound().clone()); } } } node.access.setLowestScore(min); node.access.setLowerBound(lowerBound); node.access.setHighestScore(max); } } /** * Returns the bottom node. * * @return */ public ARXNode getBottom() { return bottom; } /** * Returns the highest score. Lower is better. * @return */ public InformationLoss<?> getHighestScore(){ return this.maximumInformationLoss; } /** * Returns the levels of the generalization lattice. * * @return */ public ARXNode[][] getLevels() { return levels; } /** * Returns the lowest score. Lower is better. * @return */ public InformationLoss<?> getLowestScore(){ return this.minimumInformationLoss; } /** * Returns the number of nodes. * * @return */ public int getSize() { return size; } /** * Returns the top node. * * @return */ public ARXNode getTop() { return top; } /** * Returns the virtual size of the solution space * @return */ public long getVirtualSize() { return virtualSize != null ? virtualSize : size; } /** * Renders this object * @return */ public ElementData render() { ElementData result = new ElementData("Search space"); result.addProperty("Size", this.virtualSize); result.addProperty("Materialized", this.size); return result; } /** * Build an ARX lattice for an incompletely classified solution space * @param optimum * @param headermap */ private void build(final Transformation optimum, Map<String, Integer> headermap) { // Create nodes this.map = new LongObjectOpenHashMap<ARXNode>(); final IntObjectOpenHashMap<List<ARXNode>> levels = new IntObjectOpenHashMap<List<ARXNode>>(); int size = 0; int maxlevel = 0; for (LongIterator iterator = solutions.getMaterializedTransformations(); iterator.hasNext();) { Transformation transformation = solutions.getTransformation(iterator.next()); if (!levels.containsKey(transformation.getLevel())) { levels.put(transformation.getLevel(), new ArrayList<ARXNode>()); } ARXNode node = new ARXNode(this, solutions, transformation, headermap); map.put(transformation.getIdentifier(), node); levels.get(transformation.getLevel()).add(node); if (optimum != null && transformation.getIdentifier() == optimum.getIdentifier()) { this.optimum = node; } maxlevel = Math.max(maxlevel, transformation.getLevel()); size++; } // Make sure that bottom and top are in the resulting solution space Transformation top = solutions.getTop(); Transformation bottom = solutions.getBottom(); if (!map.containsKey(top.getIdentifier())) { if (!levels.containsKey(top.getLevel())) { levels.put(top.getLevel(), new ArrayList<ARXNode>()); } ARXNode node = new ARXNode(this, solutions, top, headermap); map.put(top.getIdentifier(), node); levels.get(top.getLevel()).add(node); maxlevel = top.getLevel(); size++; } if (!map.containsKey(bottom.getIdentifier())) { if (!levels.containsKey(bottom.getLevel())) { levels.put(bottom.getLevel(), new ArrayList<ARXNode>()); } ARXNode node = new ARXNode(this, solutions, bottom, headermap); map.put(bottom.getIdentifier(), node); levels.get(bottom.getLevel()).add(node); size++; } // Create levels array this.size = size; this.levels = new ARXNode[maxlevel+1][]; for (int i = 0; i < this.levels.length; i++) { if (levels.containsKey(i)) { this.levels[i] = levels.get(i).toArray(new ARXNode[levels.get(i).size()]); } else { this.levels[i] = new ARXNode[0]; } } // Create relationships for (LongIterator iterator = solutions.getMaterializedTransformations(); iterator.hasNext();) { createRelationships(solutions, iterator.next()); } createRelationships(solutions, solutions.getTop().getIdentifier()); createRelationships(solutions, solutions.getBottom().getIdentifier()); } /** * Compares the transformations of two nodes lexicographically * @param first * @param second * @return */ private int compareLexicographically(ARXNode first, ARXNode second) { int[] firstArray = first.getTransformation(); int[] secondArray = second.getTransformation(); for (int i = 0; i < firstArray.length; i++) { if (firstArray[i] < secondArray[i]) { return -1; } else if (firstArray[i] > secondArray[i]) { return +1; } } return 0; } /** * Creates all relationships * @param solutions * @param map * @param id */ private void createExpandedRelationships(final SolutionSpace solutions, final long id) { // Obtain given node final ARXNode center = map.get(id); final Transformation transformation = solutions.getTransformation(id); // Collect materialized successors and predecessors List<ARXNode> successors = new ArrayList<ARXNode>(); List<ARXNode> predecessors = new ArrayList<ARXNode>(); LongArrayList list1 = transformation.getSuccessors(); for (int i = 0; i < list1.size(); i++) { ARXNode node = map.get(list1.getQuick(i)); if (node != null) { successors.add(node); } } LongArrayList list2 = transformation.getPredecessors(); for (int i = 0; i < list2.size(); i++) { ARXNode node = map.get(list2.getQuick(i)); if (node != null) { predecessors.add(node); } } // Add successors and predecessors to given node center.successors = successors.toArray(new ARXNode[successors.size()]); center.predecessors = predecessors.toArray(new ARXNode[predecessors.size()]); // Update predecessors for (ARXNode node : predecessors) { List<ARXNode> nodeSuccessors = new ArrayList<ARXNode>(); nodeSuccessors.addAll(Arrays.asList(node.successors)); int index = 0; while (index < nodeSuccessors.size() && compareLexicographically(nodeSuccessors.get(index), center) < 0 ) { index++; } // Subtract index = index == 0 ? 0 : index - 1; nodeSuccessors.add(index, center); // Add and update node.successors = nodeSuccessors.toArray(new ARXNode[nodeSuccessors.size()]); } // Update successors for (ARXNode node : successors) { List<ARXNode> nodePredecessors = new ArrayList<ARXNode>(); nodePredecessors.addAll(Arrays.asList(node.predecessors)); int index = 0; while (index < nodePredecessors.size() && compareLexicographically(nodePredecessors.get(index), center) < 0 ) { index++; } // Subtract index = index == 0 ? 0 : index - 1; // Add and update nodePredecessors.add(index, center); node.predecessors = nodePredecessors.toArray(new ARXNode[nodePredecessors.size()]); } } /** * Creates all relationships * @param solutions * @param map * @param id */ private void createRelationships(final SolutionSpace solutions, final long id) { final ARXNode fnode = map.get(id); final Transformation transformation = solutions.getTransformation(id); List<ARXNode> successors = new ArrayList<ARXNode>(); List<ARXNode> predecessors = new ArrayList<ARXNode>(); LongArrayList list1 = transformation.getSuccessors(); for (int i = 0; i < list1.size(); i++) { ARXNode node = map.get(list1.getQuick(i)); if (node != null) { successors.add(node); } } LongArrayList list2 = transformation.getPredecessors(); for (int i = 0; i < list2.size(); i++) { ARXNode node = map.get(list2.getQuick(i)); if (node != null) { predecessors.add(node); } } fnode.successors = successors.toArray(new ARXNode[successors.size()]); fnode.predecessors = predecessors.toArray(new ARXNode[predecessors.size()]); } /** * De-serialization. * * @param aInputStream * @throws ClassNotFoundException * @throws IOException */ private void readObject(ObjectInputStream aInputStream) throws ClassNotFoundException, IOException { // Default de-serialization aInputStream.defaultReadObject(); // Translate minimum and maximum this.maximumInformationLoss = InformationLoss.createInformationLoss(this.maximumInformationLoss, metric, getDeserializationContext().minLevel, getDeserializationContext().maxLevel); this.minimumInformationLoss = InformationLoss.createInformationLoss(this.minimumInformationLoss, metric, getDeserializationContext().minLevel, getDeserializationContext().maxLevel); // Translate metric, if necessary this.metric = Metric.createMetric(this.metric, getDeserializationContext().minLevel, getDeserializationContext().maxLevel); } /** * Sets the monotonicity based on the current configuration * @param isSuppressionAlwaysEnabled * @param absoluteSuppressionLimit */ private void setMonotonicity(boolean isSuppressionAlwaysEnabled, int absoluteSuppressionLimit) { this.monotonicNonAnonymous = (this.metric.isMonotonicWithSuppression() && isSuppressionAlwaysEnabled) || (this.metric.isMonotonicWithGeneralization() && !isSuppressionAlwaysEnabled); this.monotonicAnonymous = this.metric.isMonotonic(absoluteSuppressionLimit); } /** * This method triggers the estimation of the information loss of all nodes * in the lattice regardless of whether they have been checked for anonymity * or not. Additionally, it computes global upper and lower bounds on utility */ protected void estimateInformationLoss() { UtilityEstimator estimator = new UtilityEstimator(this, metric, monotonicAnonymous, monotonicNonAnonymous); estimator.estimate(); this.minimumInformationLoss = estimator.getGlobalMinimum(); this.maximumInformationLoss = estimator.getGlobalMaximum(); } /** * Returns the optimum, if any. * * @return */ protected ARXNode getOptimum() { return optimum; } /** * Kept only for backwards compatibility * @return */ boolean _legacySearchedWithFlash() { return complete == null ? true : complete; } }
src/main/org/deidentifier/arx/ARXLattice.java
/* * ARX: Powerful Data Anonymization * Copyright 2012 - 2017 Fabian Prasser, Florian Kohlmayer and contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.deidentifier.arx; import java.io.IOException; import java.io.ObjectInputStream; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.deidentifier.arx.ARXConfiguration.ARXConfigurationInternal; import org.deidentifier.arx.certificate.elements.ElementData; import org.deidentifier.arx.framework.lattice.SolutionSpace; import org.deidentifier.arx.framework.lattice.Transformation; import org.deidentifier.arx.metric.InformationLoss; import org.deidentifier.arx.metric.Metric; import cern.colt.list.LongArrayList; import com.carrotsearch.hppc.IntObjectOpenHashMap; import com.carrotsearch.hppc.LongObjectOpenHashMap; import de.linearbits.jhpl.JHPLIterator.LongIterator; /** * This class implements a representation of the generalization lattice that is * exposed to users of the API. * * @author Fabian Prasser * @author Florian Kohlmayer */ public class ARXLattice implements Serializable { /** * The internal accessor class. * * @author Fabian Prasser * @author Florian Kohlmayer */ public class Access implements Serializable { /** SVUID */ private static final long serialVersionUID = 6654627605797832468L; /** Lattice */ private final ARXLattice lattice; /** * Constructor * * @param lattice */ public Access(final ARXLattice lattice) { this.lattice = lattice; } /** * Accessor method * * @return */ public Map<String, Integer> getAttributeMap() { return bottom.headermap; } /** * Accessor method * * @param bottom */ public void setBottom(final ARXNode bottom) { lattice.bottom = bottom; } /** * Accessor method * * @param levels */ public void setLevels(final ARXNode[][] levels) { lattice.levels = levels; } /** * Accessor method * * @param config */ public void setMonotonicity(ARXConfiguration config) { lattice.setMonotonicity(config.isSuppressionAlwaysEnabled(), config.getAbsoluteSuppressionLimit()); } /** * Accessor method * * @param node */ public void setOptimum(final ARXNode node) { lattice.optimum = node; } /** * Accessor method * * @param model */ public void setQualityModel(final Metric<?> model) { lattice.metric = model; } /** * Accessor method * * @param size */ public void setSize(final int size) { lattice.size = size; } /** * Updates the solution space * @param solutions */ public void setSolutionSpace(SolutionSpace solutions) { lattice.solutions = solutions; // For backwards compatibility lattice.map = new LongObjectOpenHashMap<ARXNode>(); for (ARXNode[] level : lattice.levels) { for (ARXNode node : level) { int[] levels = node.getTransformation(); Transformation transformation = lattice.solutions.getTransformation(levels); long id = transformation.getIdentifier(); lattice.map.put(id, node); } } } /** * Accessor method * * @param top */ public void setTop(final ARXNode top) { lattice.top = top; } /** * Accessor method * * @param uncertainty */ public void setUncertainty(final boolean uncertainty) { lattice.uncertainty = uncertainty; } } /** * Reflects different anonymity properties. */ public static enum Anonymity { /** ANONYMOUS */ ANONYMOUS, /** NOT_ANONYMOUS */ NOT_ANONYMOUS, /** UNKNOWN */ UNKNOWN, /** PROBABLY_ANONYMOUS */ PROBABLY_ANONYMOUS, /** PROBABLY_NOT_ANONYMOUS */ PROBABLY_NOT_ANONYMOUS } /** * A node in the lattice. * * @author Fabian Prasser * @author Florian Kohlmayer */ public class ARXNode { /** * Internal access class. * * @author Fabian Prasser * @author Florian Kohlmayer */ public class Access { /** Node */ private final ARXNode node; /** * Accessor class * * @param node */ public Access(final ARXNode node) { this.node = node; } /** * Sets the anonymity. * * @param anonymity */ public void setAnonymity(final Anonymity anonymity) { node.anonymity = anonymity; } /** * Set anonymous. */ public void setAnonymous() { node.anonymity = Anonymity.ANONYMOUS; } /** * Sets the attributes. * * @param attributes */ public void setAttributes(final Map<Integer, Object> attributes) { node.attributes = attributes; } /** * Set checked. * * @param checked */ public void setChecked(final boolean checked) { node.checked = checked; } /** * Sets the headermap. * * @param headermap */ public void setHeadermap(final Map<String, Integer> headermap) { node.headermap = headermap; } /** * Sets the maximal information loss. * * @param a */ public void setHighestScore(final InformationLoss<?> a) { node.maxInformationLoss = InformationLoss.createInformationLoss(a, metric, getDeserializationContext().minLevel, getDeserializationContext().maxLevel); } /** * Sets the lower bound. * * @param a */ public void setLowerBound(final InformationLoss<?> a) { node.lowerBound = InformationLoss.createInformationLoss(a, metric, getDeserializationContext().minLevel, getDeserializationContext().maxLevel); } /** * Sets the minimal information loss. * * @param a */ public void setLowestScore(final InformationLoss<?> a) { node.minInformationLoss = InformationLoss.createInformationLoss(a, metric, getDeserializationContext().minLevel, getDeserializationContext().maxLevel); } /** * Set not anonymous. */ public void setNotAnonymous() { node.anonymity = Anonymity.NOT_ANONYMOUS; } /** * Sets the predecessors. * * @param predecessors */ public void setPredecessors(final ARXNode[] predecessors) { node.predecessors = predecessors; } /** * Sets the successors. * * @param successors */ public void setSuccessors(final ARXNode[] successors) { node.successors = successors; } /** * Sets the transformation. * * @param transformation */ public void setTransformation(final int[] transformation) { node.transformation = transformation; } } /** Id. */ private Integer id = null; /** The access. */ private final Access access = new Access(this); /** Is it anonymous. */ private Anonymity anonymity; /** Attributes. */ private Map<Integer, Object> attributes = new HashMap<Integer, Object>(); /** Has the node been checked. */ private boolean checked; /** The header map. */ private Map<String, Integer> headermap; /** The lower bound. */ private InformationLoss<?> lowerBound; /** The max information loss. */ private InformationLoss<?> maxInformationLoss; /** The min information loss. */ private InformationLoss<?> minInformationLoss; /** The predecessors. */ private ARXNode[] predecessors; /** The successors. */ private ARXNode[] successors; /** The transformation. */ private int[] transformation; /** The underlying lattice */ private final ARXLattice lattice; /** * Internal constructor for deserialization. * * @param lattice */ public ARXNode(ARXLattice lattice) { this.lattice = lattice; } /** * Constructor. * * @param lattice * @param solutions * @param transformation * @param headermap */ private ARXNode(final ARXLattice lattice, final SolutionSpace solutions, final Transformation transformation, final Map<String, Integer> headermap) { // Set properties this.lattice = lattice; this.headermap = headermap; this.transformation = transformation.getGeneralization(); this.minInformationLoss = transformation.getInformationLoss(); this.maxInformationLoss = transformation.getInformationLoss(); this.lowerBound = transformation.getLowerBound(); this.checked = transformation.hasProperty(solutions.getPropertyChecked()); // Transfer anonymity property without uncertainty if (transformation.hasProperty(solutions.getPropertyChecked())){ if (transformation.hasProperty(solutions.getPropertyAnonymous())) { this.anonymity = Anonymity.ANONYMOUS; } else if(transformation.hasProperty(solutions.getPropertyNotAnonymous())) { this.anonymity = Anonymity.NOT_ANONYMOUS; } else { this.anonymity = Anonymity.UNKNOWN; } // This is a node for which the property is unknown } else { if (transformation.hasProperty(solutions.getPropertyAnonymous())) { this.anonymity = uncertainty ? Anonymity.PROBABLY_ANONYMOUS : Anonymity.ANONYMOUS; } else if (transformation.hasProperty(solutions.getPropertyNotAnonymous())) { this.anonymity = uncertainty ? Anonymity.PROBABLY_NOT_ANONYMOUS : Anonymity.NOT_ANONYMOUS; } else if (transformation.hasProperty(solutions.getPropertyNotKAnonymous())) { this.anonymity = Anonymity.NOT_ANONYMOUS; } else if (transformation.hasProperty(solutions.getPropertyInsufficientUtility())) { this.anonymity = Anonymity.UNKNOWN; } else { this.anonymity = Anonymity.UNKNOWN; } } // Make sure that we have information loss available // Important for expand operations if (this.maxInformationLoss == null) { this.maxInformationLoss = metric.createInstanceOfHighestScore(); } if (this.minInformationLoss == null) { this.minInformationLoss = metric.createInstanceOfLowestScore(); } } /** * Alter associated fields. * * @return */ public Access access() { return access; } /** * Materializes any non-materialized predecessors and successors */ public void expand() { this.lattice.expand(this); } /** * Returns the anonymity property. * * @return */ public Anonymity getAnonymity() { return anonymity; } /** * Returns the attributes. * * @return */ public Map<Integer, Object> getAttributes() { return attributes; } /** * Returns the index of an attribute. * * @param attr * @return */ public int getDimension(final String attr) { return headermap.get(attr); } /** * Returns the generalization for the attribute. * * @param attribute * @return */ public int getGeneralization(final String attribute) { final Integer index = headermap.get(attribute); if (index == null) { return 0; } return transformation[index]; } /** * Returns the highest score. Lower is better. * @return */ public InformationLoss<?> getHighestScore() { return maxInformationLoss; } /** * Returns the highest score. Lower is better. * @return */ public InformationLoss<?> getLowestScore() { return minInformationLoss; } /** * The predecessors. * * @return */ public ARXNode[] getPredecessors() { return predecessors; } /** * Returns the quasi identifiers. * * @return */ public String[] getQuasiIdentifyingAttributes() { final String[] result = new String[headermap.size()]; for (final String key : headermap.keySet()) { result[headermap.get(key)] = key; } return result; } /** * The successors. * * @return */ public ARXNode[] getSuccessors() { return successors; } /** * Returns the sum of all generalization levels. * * @return */ public int getTotalGeneralizationLevel() { int level = 0; for (int i : transformation) { level += i; } return level; } /** * Returns the transformation as an array. * * @return */ public int[] getTransformation() { return transformation; } /** * Returns if the node has been checked explicitly. * * @return */ public boolean isChecked() { return checked; } /** * Renders this object * @return */ public ElementData render() { ElementData result = new ElementData("Transformation"); result.addProperty("Anonymity", this.anonymity); result.addProperty("Minimum information loss", this.minInformationLoss.toString()); result.addProperty("Maximum information loss", this.maxInformationLoss.toString()); result.addProperty(null, renderGeneralizationScheme()); return result; } /** * De-serialization. * * @param aInputStream * @throws ClassNotFoundException * @throws IOException */ private void readObject(ObjectInputStream aInputStream) throws ClassNotFoundException, IOException { // Default de-serialization aInputStream.defaultReadObject(); // Translate information loss, if necessary this.lowerBound = InformationLoss.createInformationLoss(this.lowerBound, metric, getDeserializationContext().minLevel, getDeserializationContext().maxLevel); this.maxInformationLoss = InformationLoss.createInformationLoss(this.maxInformationLoss, metric, getDeserializationContext().minLevel, getDeserializationContext().maxLevel); this.minInformationLoss = InformationLoss.createInformationLoss(this.minInformationLoss, metric, getDeserializationContext().minLevel, getDeserializationContext().maxLevel); } /** * Renders this object * @return */ private ElementData renderGeneralizationScheme() { ElementData result = new ElementData("Generalization scheme"); for (String qi : this.getQuasiIdentifyingAttributes()) { result.addProperty(qi, this.getGeneralization(qi) + "/" + this.lattice.getTop().getGeneralization(qi)); } return result; } /** * Returns the headermap * @return */ protected Map<String, Integer> getHeaderMap() { return this.headermap; } /** * Returns a node's internal id. * * @return */ protected Integer getId(){ return this.id; } /** * Returns a node's lower bound, if any. * * @return */ protected InformationLoss<?> getLowerBound(){ return this.lowerBound; } /** * Internal method that sets the id. * * @param id */ protected void setId(int id) { this.id = id; } } /** * Context for deserialization. * * @author Florian Kohlmayer */ public static class LatticeDeserializationContext { /** Min level */ public int minLevel = 0; /** Max level */ public int maxLevel = 0; } /** Deserialization context. */ private static LatticeDeserializationContext deserializationContext = new LatticeDeserializationContext(); /** SVUID */ private static final long serialVersionUID = -8790104959905019184L; /** * Returns the deserialization context. * * @return */ public static LatticeDeserializationContext getDeserializationContext() { return deserializationContext; } /** The accessor. */ private final Access access = new Access(this); /** The bottom node. */ private transient ARXNode bottom; /** The levels in the lattice. */ private transient ARXNode[][] levels; /** Metric. */ private Metric<?> metric; /** The optimum. */ private transient ARXNode optimum; /** The number of nodes. */ private int size; /** The virtual size */ private Long virtualSize; /** The top node. */ private transient ARXNode top; /** Is practical monotonicity being assumed. */ private boolean uncertainty; /** Kept only for backwards compatibility */ private Boolean complete; /** Monotonicity of information loss. */ private boolean monotonicAnonymous; /** Monotonicity of information loss. */ private boolean monotonicNonAnonymous; /** Minimum loss in the lattice. */ private InformationLoss<?> minimumInformationLoss = null; /** Maximum loss in the lattice. */ private InformationLoss<?> maximumInformationLoss = null; /** The solution space */ private transient SolutionSpace solutions; /** Map from ids to nodes */ private transient LongObjectOpenHashMap<ARXNode> map; /** * Constructor. * * @param solutions The solution space * @param optimum The optimum * @param header The header * @param config The config */ ARXLattice(final SolutionSpace solutions, final Transformation optimum, final String[] header, final ARXConfigurationInternal config) { // Init this.solutions = solutions; this.metric = config.getQualityModel(); this.setMonotonicity(config.isSuppressionAlwaysEnabled(), config.getAbsoluteSuppressionLimit()); this.virtualSize = solutions.getSize(); // Set this flag to true, if practical monotonicity is being assumed this.uncertainty = config.isPracticalMonotonicity(); // Build header map final Map<String, Integer> headermap = new HashMap<String, Integer>(); int index = 0; for (int i = 0; i < header.length; i++) { headermap.put(header[i], index++); } // Build lattice build(optimum, headermap); // find bottom node outer: for (int i = 0; i < this.levels.length; i++) { final ARXNode[] level = this.levels[i]; for (int j = 0; j < level.length; j++) { final ARXNode node = level[j]; if (node != null) { this.bottom = node; break outer; } } } // find top node outer: for (int i = this.levels.length - 1; i >= 0; i--) { final ARXNode[] level = this.levels[i]; for (int j = 0; j < level.length; j++) { final ARXNode node = level[j]; if (node != null) { this.top = node; break outer; } } } // Estimate information loss of all nodes estimateInformationLoss(); } /** * Access fields of this class. * * @return */ public Access access() { return access; } /** * Materializes any non-materialized predecessors and successors */ public void expand(ARXNode center) { // Initialize int[] indices = center.getTransformation(); Transformation transformation = solutions.getTransformation(indices); // Collect neighbors LongArrayList neighbors = transformation.getPredecessors(); LongArrayList successors = transformation.getSuccessors(); neighbors.addAllOfFromTo(successors, 0, successors.size() - 1); // Collect affected levels int lowerLevel = transformation.getLevel() - 1; int higherLevel = transformation.getLevel() + 1; if (transformation.getLevel() == solutions.getTop().getLevel()) { lowerLevel = solutions.getTop().getLevel() - 1; higherLevel = solutions.getTop().getLevel() - 1; } else if (transformation.getLevel() == solutions.getBottom().getLevel()) { lowerLevel = solutions.getBottom().getLevel() + 1; higherLevel = solutions.getBottom().getLevel() + 1; } // Find missing neighbors and initialize variables Map<String, Integer> headermap = this.getBottom().getHeaderMap(); Set<Long> missing = new HashSet<Long>(); for (int i = 0; i < neighbors.size(); i++) { missing.add(neighbors.getQuick(i)); } outer: for (int level = lowerLevel; level <= higherLevel; level ++) { if (level != transformation.getLevel()) { for (ARXNode node : this.levels[level]) { Long id = solutions.getTransformation(node.getTransformation()).getIdentifier(); missing.remove(id); if (missing.isEmpty()) { break outer; } } } } // Materialize missing nodes Map<Integer, List<ARXNode>> levels = new HashMap<Integer, List<ARXNode>>(); for (long id : missing) { // Materialize transformation = solutions.getTransformation(id); ARXNode node = new ARXNode(this, solutions, transformation, headermap); // Store in global map map.put(id, node); // Store in map of levels if (!levels.containsKey(transformation.getLevel())) { levels.put(transformation.getLevel(), new ArrayList<ARXNode>()); } levels.get(transformation.getLevel()).add(node); } // Insert missing nodes into level Arrays for (int level : levels.keySet()) { // Sort nodes to insert, lexicographically List<ARXNode> nodes = levels.get(level); Collections.sort(nodes, new Comparator<ARXNode>(){ public int compare(ARXNode o1, ARXNode o2) { return compareLexicographically(o1, o2); } }); // Initialize new level List<ARXNode> list = new ArrayList<ARXNode>(); // Now add all nodes in one pass int index = 0; for (ARXNode node : this.levels[level]) { while (index < nodes.size() && compareLexicographically(nodes.get(index), node) < 0) { list.add(nodes.get(index++)); } list.add(node); } // Add remaining while (index < nodes.size()) { list.add(nodes.get(index++)); } // Convert this.levels[level] = list.toArray(new ARXNode[list.size()]); } // Build relationships from/to missing nodes for (long id : missing) { this.createExpandedRelationships(solutions, id); } // Update size this.size += missing.size(); // Update information loss for (long id : missing) { // Pull lower bound from predecessors transformation = solutions.getTransformation(id); ARXNode node = map.get(id); InformationLoss<?> lowerBound = null; InformationLoss<?> min = metric.createInstanceOfLowestScore(); InformationLoss<?> max = metric.createInstanceOfHighestScore(); LongArrayList list = transformation.getPredecessors(); for (int i = 0; i < list.size(); i++) { ARXNode predecessor = map.get(list.getQuick(i)); if (predecessor != null && predecessor.getLowerBound() != null) { min.max(predecessor.getLowerBound().clone()); if (lowerBound == null) { lowerBound = predecessor.getLowerBound().clone(); } else { lowerBound.max(predecessor.getLowerBound().clone()); } } } node.access.setLowestScore(min); node.access.setLowerBound(lowerBound); node.access.setHighestScore(max); } } /** * Returns the bottom node. * * @return */ public ARXNode getBottom() { return bottom; } /** * Returns the highest score. Lower is better. * @return */ public InformationLoss<?> getHighestScore(){ return this.maximumInformationLoss; } /** * Returns the levels of the generalization lattice. * * @return */ public ARXNode[][] getLevels() { return levels; } /** * Returns the lowest score. Lower is better. * @return */ public InformationLoss<?> getLowestScore(){ return this.minimumInformationLoss; } /** * Returns the number of nodes. * * @return */ public int getSize() { return size; } /** * Returns the top node. * * @return */ public ARXNode getTop() { return top; } /** * Returns the virtual size of the solution space * @return */ public long getVirtualSize() { return virtualSize != null ? virtualSize : size; } /** * Renders this object * @return */ public ElementData render() { ElementData result = new ElementData("Search space"); result.addProperty("Size", this.virtualSize); result.addProperty("Materialized", this.size); return result; } /** * Build an ARX lattice for an incompletely classified solution space * @param optimum * @param headermap */ private void build(final Transformation optimum, Map<String, Integer> headermap) { // Create nodes this.map = new LongObjectOpenHashMap<ARXNode>(); final IntObjectOpenHashMap<List<ARXNode>> levels = new IntObjectOpenHashMap<List<ARXNode>>(); int size = 0; int maxlevel = 0; for (LongIterator iterator = solutions.getMaterializedTransformations(); iterator.hasNext();) { Transformation transformation = solutions.getTransformation(iterator.next()); if (!levels.containsKey(transformation.getLevel())) { levels.put(transformation.getLevel(), new ArrayList<ARXNode>()); } ARXNode node = new ARXNode(this, solutions, transformation, headermap); map.put(transformation.getIdentifier(), node); levels.get(transformation.getLevel()).add(node); if (optimum != null && transformation.getIdentifier() == optimum.getIdentifier()) { this.optimum = node; } maxlevel = Math.max(maxlevel, transformation.getLevel()); size++; } // Make sure that bottom and top are in the resulting solution space Transformation top = solutions.getTop(); Transformation bottom = solutions.getBottom(); if (!map.containsKey(top.getIdentifier())) { if (!levels.containsKey(top.getLevel())) { levels.put(top.getLevel(), new ArrayList<ARXNode>()); } ARXNode node = new ARXNode(this, solutions, top, headermap); map.put(top.getIdentifier(), node); levels.get(top.getLevel()).add(node); maxlevel = top.getLevel(); size++; } if (!map.containsKey(bottom.getIdentifier())) { if (!levels.containsKey(bottom.getLevel())) { levels.put(bottom.getLevel(), new ArrayList<ARXNode>()); } ARXNode node = new ARXNode(this, solutions, bottom, headermap); map.put(bottom.getIdentifier(), node); levels.get(bottom.getLevel()).add(node); size++; } // Create levels array this.size = size; this.levels = new ARXNode[maxlevel+1][]; for (int i = 0; i < this.levels.length; i++) { if (levels.containsKey(i)) { this.levels[i] = levels.get(i).toArray(new ARXNode[levels.get(i).size()]); } else { this.levels[i] = new ARXNode[0]; } } // Create relationships for (LongIterator iterator = solutions.getMaterializedTransformations(); iterator.hasNext();) { createRelationships(solutions, iterator.next()); } createRelationships(solutions, solutions.getTop().getIdentifier()); createRelationships(solutions, solutions.getBottom().getIdentifier()); } /** * Compares the transformations of two nodes lexicographically * @param first * @param second * @return */ private int compareLexicographically(ARXNode first, ARXNode second) { int[] firstArray = first.getTransformation(); int[] secondArray = second.getTransformation(); for (int i = 0; i < firstArray.length; i++) { if (firstArray[i] < secondArray[i]) { return -1; } else if (firstArray[i] > secondArray[i]) { return +1; } } return 0; } /** * Creates all relationships * @param solutions * @param map * @param id */ private void createExpandedRelationships(final SolutionSpace solutions, final long id) { // Obtain given node final ARXNode center = map.get(id); final Transformation transformation = solutions.getTransformation(id); // Collect materialized successors and predecessors List<ARXNode> successors = new ArrayList<ARXNode>(); List<ARXNode> predecessors = new ArrayList<ARXNode>(); LongArrayList list1 = transformation.getSuccessors(); for (int i = 0; i < list1.size(); i++) { ARXNode node = map.get(list1.getQuick(i)); if (node != null) { successors.add(node); } } LongArrayList list2 = transformation.getPredecessors(); for (int i = 0; i < list2.size(); i++) { ARXNode node = map.get(list2.getQuick(i)); if (node != null) { predecessors.add(node); } } // Add successors and predecessors to given node center.successors = successors.toArray(new ARXNode[successors.size()]); center.predecessors = predecessors.toArray(new ARXNode[predecessors.size()]); // Update predecessors for (ARXNode node : predecessors) { List<ARXNode> nodeSuccessors = new ArrayList<ARXNode>(); nodeSuccessors.addAll(Arrays.asList(node.successors)); int index = 0; while (index < nodeSuccessors.size() && compareLexicographically(nodeSuccessors.get(index), center) < 0 ) { index++; } // Subtract index = index == 0 ? 0 : index - 1; nodeSuccessors.add(index, center); // Add and update node.successors = nodeSuccessors.toArray(new ARXNode[nodeSuccessors.size()]); } // Update successors for (ARXNode node : successors) { List<ARXNode> nodePredecessors = new ArrayList<ARXNode>(); nodePredecessors.addAll(Arrays.asList(node.predecessors)); int index = 0; while (index < nodePredecessors.size() && compareLexicographically(nodePredecessors.get(index), center) < 0 ) { index++; } // Subtract index = index == 0 ? 0 : index - 1; // Add and update nodePredecessors.add(index, center); node.predecessors = nodePredecessors.toArray(new ARXNode[nodePredecessors.size()]); } } /** * Creates all relationships * @param solutions * @param map * @param id */ private void createRelationships(final SolutionSpace solutions, final long id) { final ARXNode fnode = map.get(id); final Transformation transformation = solutions.getTransformation(id); List<ARXNode> successors = new ArrayList<ARXNode>(); List<ARXNode> predecessors = new ArrayList<ARXNode>(); LongArrayList list1 = transformation.getSuccessors(); for (int i = 0; i < list1.size(); i++) { ARXNode node = map.get(list1.getQuick(i)); if (node != null) { successors.add(node); } } LongArrayList list2 = transformation.getPredecessors(); for (int i = 0; i < list2.size(); i++) { ARXNode node = map.get(list2.getQuick(i)); if (node != null) { predecessors.add(node); } } fnode.successors = successors.toArray(new ARXNode[successors.size()]); fnode.predecessors = predecessors.toArray(new ARXNode[predecessors.size()]); } /** * De-serialization. * * @param aInputStream * @throws ClassNotFoundException * @throws IOException */ private void readObject(ObjectInputStream aInputStream) throws ClassNotFoundException, IOException { // Default de-serialization aInputStream.defaultReadObject(); // Translate minimum and maximum this.maximumInformationLoss = InformationLoss.createInformationLoss(this.maximumInformationLoss, metric, getDeserializationContext().minLevel, getDeserializationContext().maxLevel); this.minimumInformationLoss = InformationLoss.createInformationLoss(this.minimumInformationLoss, metric, getDeserializationContext().minLevel, getDeserializationContext().maxLevel); // Translate metric, if necessary this.metric = Metric.createMetric(this.metric, getDeserializationContext().minLevel, getDeserializationContext().maxLevel); } /** * Sets the monotonicity based on the current configuration * @param isSuppressionAlwaysEnabled * @param absoluteSuppressionLimit */ private void setMonotonicity(boolean isSuppressionAlwaysEnabled, int absoluteSuppressionLimit) { this.monotonicNonAnonymous = (this.metric.isMonotonicWithSuppression() && isSuppressionAlwaysEnabled) || (this.metric.isMonotonicWithGeneralization() && !isSuppressionAlwaysEnabled); this.monotonicAnonymous = this.metric.isMonotonic(absoluteSuppressionLimit); } /** * This method triggers the estimation of the information loss of all nodes * in the lattice regardless of whether they have been checked for anonymity * or not. Additionally, it computes global upper and lower bounds on utility */ protected void estimateInformationLoss() { UtilityEstimator estimator = new UtilityEstimator(this, metric, monotonicAnonymous, monotonicNonAnonymous); estimator.estimate(); this.minimumInformationLoss = estimator.getGlobalMinimum(); this.maximumInformationLoss = estimator.getGlobalMaximum(); } /** * Returns the optimum, if any. * * @return */ protected ARXNode getOptimum() { return optimum; } /** * Kept only for backwards compatibility * @return */ boolean _legacySearchedWithFlash() { return complete; } }
Bugfix: handle null
src/main/org/deidentifier/arx/ARXLattice.java
Bugfix: handle null
Java
apache-2.0
f901f24cbd181d995454233a12155e33c3cb37e2
0
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.ide.ui; import com.fasterxml.jackson.databind.ObjectMapper; import com.intellij.ide.plugins.cl.PluginClassLoader; import com.intellij.openapi.util.IconLoader; import com.intellij.openapi.util.IconPathPatcher; import com.intellij.openapi.util.text.StringUtil; import com.intellij.ui.ColorUtil; import com.intellij.util.SVGLoader; import com.intellij.util.ui.JBDimension; import com.intellij.util.ui.JBInsets; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import javax.swing.*; import javax.swing.plaf.BorderUIResource; import javax.swing.plaf.ColorUIResource; import javax.swing.plaf.IconUIResource; import java.awt.*; import java.io.IOException; import java.io.InputStream; import java.util.HashMap; import java.util.List; import java.util.Map; import static com.intellij.util.ui.JBUI.Borders.customLine; import static com.intellij.util.ui.JBUI.asUIResource; /** * @author Konstantin Bulenkov */ public class UITheme { private String name; private boolean dark; private String author; private String id; private String editorScheme; private Map<String, Object> ui; private Map<String, Object> icons; private IconPathPatcher patcher; private Map<String, Object> background; private ClassLoader providerClassLoader = getClass().getClassLoader(); private String editorSchemeName; private SVGLoader.SvgColorPatcher colorPatcher; private UITheme() { } public String getName() { return name; } public boolean isDark() { return dark; } public String getAuthor() { return author; } public static UITheme loadFromJson(InputStream stream, @NotNull String themeId, @NotNull ClassLoader provider) throws IOException { UITheme theme = new ObjectMapper().readValue(stream, UITheme.class); theme.id = themeId; theme.providerClassLoader = provider; if (theme.icons != null && !theme.icons.isEmpty()) { theme.patcher = new IconPathPatcher() { @Nullable @Override public String patchPath(String path, ClassLoader classLoader) { if (classLoader instanceof PluginClassLoader) { String pluginId = ((PluginClassLoader)classLoader).getPluginId().getIdString(); Object icons = theme.icons.get(pluginId); if (icons instanceof Map) { Object pluginIconPath = ((Map)icons).get(path); if (pluginIconPath instanceof String) { return (String)pluginIconPath; } } } Object value = theme.icons.get(path); return value instanceof String ? (String)value : null; } @Nullable @Override public ClassLoader getContextClassLoader(String path, ClassLoader originalClassLoader) { return theme.providerClassLoader; } }; Object palette = theme.icons.get("ColorPalette"); if (palette instanceof Map) { Map colors = (Map)palette; Map<String, String> newPalette = new HashMap<>(); Map<String, Integer> alphas = new HashMap<>(); for (Object o : colors.keySet()) { String key = toColorString(o.toString(), theme.isDark()); Object v = colors.get(o.toString()); if (v instanceof String) { String value = (String)v; String alpha = null; if (value.length() == 9) { alpha = value.substring(7); value = value.substring(0, 7); } if (ColorUtil.fromHex(key, null) != null && ColorUtil.fromHex(value, null) != null) { newPalette.put(key, value); int fillTransparency = -1; if (alpha != null) { try { fillTransparency = Integer.parseInt(alpha, 16); } catch (Exception ignore) {} } if (fillTransparency != -1) { alphas.put(value, fillTransparency); } } } } theme.colorPatcher = new SVGLoader.SvgColorPatcher() { @Override public void patchColors(Element svg) { String fill = svg.getAttribute("fill"); if (fill != null) { String newFill = newPalette.get(StringUtil.toLowerCase(fill)); if (newFill != null) { svg.setAttribute("fill", newFill); if (alphas.get(newFill) != null) { svg.setAttribute("fill-opacity", String.valueOf((Float.valueOf(alphas.get(newFill)) / 255f))); } } } NodeList nodes = svg.getChildNodes(); int length = nodes.getLength(); for (int i = 0; i < length; i++) { Node item = nodes.item(i); if (item instanceof Element) { patchColors((Element)item); } } } }; } } return theme; } private static String toColorString(String fillValue, boolean darkTheme) { if (darkTheme && fillValue.startsWith("Actions.") && !fillValue.endsWith(".Dark")) { fillValue += ".Dark"; } String color = colorPalette.get(fillValue); if (color != null) { return StringUtil.toLowerCase(color); } return StringUtil.toLowerCase(fillValue); } private static final Map<String, String> colorPalette = new HashMap<>(); static { colorPalette.put("Actions.Red", "#DB5860"); colorPalette.put("Actions.Red.Dark", "#C75450"); colorPalette.put("Actions.Yellow", "#EDA200"); colorPalette.put("Actions.Yellow.Dark", "#F0A732"); colorPalette.put("Actions.Green", "#59A869"); colorPalette.put("Actions.Green.Dark", "#499C54"); colorPalette.put("Actions.Blue", "#389FD6"); colorPalette.put("Actions.Blue.Dark", "#3592C4"); colorPalette.put("Actions.Grey", "#6E6E6E"); colorPalette.put("Actions.Grey.Dark", "#AFB1B3"); colorPalette.put("Objects.Grey", "#9AA7B0"); colorPalette.put("Objects.Blue", "#40B6E0"); colorPalette.put("Objects.Green", "#62B543"); colorPalette.put("Objects.Yellow", "#F4AF3D"); colorPalette.put("Objects.YellowDark", "#D9A343"); colorPalette.put("Objects.Purple", "#B99BF8"); colorPalette.put("Objects.Pink", "#F98B9E"); colorPalette.put("Objects.Red", "#F26522"); colorPalette.put("Objects.RedStatus", "#E05555"); colorPalette.put("Objects.GreenAndroid", "#A4C639"); colorPalette.put("Objects.BlackText", "#231F20"); } public String getId() { return id; } @Nullable public String getEditorScheme() { return editorScheme; } public Map<String, Object> getBackground() { return background; } public void applyProperties(UIDefaults defaults) { if (ui == null) return; for (Map.Entry<String, Object> entry : ui.entrySet()) { apply(entry.getKey(), entry.getValue(), defaults); } } public IconPathPatcher getPatcher() { return patcher; } public SVGLoader.SvgColorPatcher getColorPatcher() { return colorPatcher; } @NotNull public ClassLoader getProviderClassLoader() { return providerClassLoader; } private static void apply(String key, Object value, UIDefaults defaults) { if (value instanceof HashMap) { for (Map.Entry<String, Object> o : ((HashMap<String, Object>)value).entrySet()) { apply(key + "." + o.getKey(), o.getValue(), defaults); } } else { value = parseValue(key, value.toString()); if (key.startsWith("*.")) { String tail = key.substring(1); Object finalValue = value; //please DO NOT stream on UIDefaults directly ((UIDefaults)defaults.clone()).keySet().stream() .filter(k -> k instanceof String && ((String)k).endsWith(tail)) .forEach(k -> defaults.put(k, finalValue)); } else { defaults.put(key, value); } } } public static Object parseValue(String key, @NotNull String value) { if ("null".equals(value)) { return null; } if ("true".equals(value)) return Boolean.TRUE; if ("false".equals(value)) return Boolean.FALSE; if (key.endsWith("Insets") || key.endsWith("padding")) { return parseInsets(value); } else if (key.endsWith("Border") || key.endsWith("border")) { try { List<String> ints = StringUtil.split(value, ","); if (ints.size() == 4) { return new BorderUIResource.EmptyBorderUIResource(parseInsets(value)); } else if (ints.size() == 5) { return asUIResource(customLine(ColorUtil.fromHex(ints.get(4)), Integer.parseInt(ints.get(0)), Integer.parseInt(ints.get(1)), Integer.parseInt(ints.get(2)), Integer.parseInt(ints.get(3)))); } else if (ColorUtil.fromHex(value, null) != null) { return asUIResource(customLine(ColorUtil.fromHex(value), 1)); } else { return Class.forName(value).newInstance(); } } catch (Exception e) { e.printStackTrace(); } } else if (key.endsWith("Size")) { return parseSize(value); } else if (key.endsWith("Width")) { return getInteger(value); } else if (key.endsWith("grayFilter")) { return parseGrayFilter(value); } else { final Color color = parseColor(value); final Integer invVal = getInteger(value); Icon icon = value.startsWith("AllIcons.") ? IconLoader.getIcon(value) : null; if (color != null) { return new ColorUIResource(color); } else if (invVal != null) { return invVal; } else if (icon != null) { return new IconUIResource(icon); } } return value; } private static Insets parseInsets(String value) { final java.util.List<String> numbers = StringUtil.split(value, ","); return new JBInsets(Integer.parseInt(numbers.get(0)), Integer.parseInt(numbers.get(1)), Integer.parseInt(numbers.get(2)), Integer.parseInt(numbers.get(3))).asUIResource(); } private static UIUtil.GrayFilter parseGrayFilter(String value) { java.util.List<String> numbers = StringUtil.split(value, ","); return new UIUtil.GrayFilter(Integer.parseInt(numbers.get(0)), Integer.parseInt(numbers.get(1)), Integer.parseInt(numbers.get(2))).asUIResource(); } @SuppressWarnings("UseJBColor") private static Color parseColor(String value) { if (value != null && value.length() == 8) { final Color color = ColorUtil.fromHex(value.substring(0, 6)); try { int alpha = Integer.parseInt(value.substring(6, 8), 16); return new ColorUIResource(new Color(color.getRed(), color.getGreen(), color.getBlue(), alpha)); } catch (Exception ignore){} return null; } return ColorUtil.fromHex(value, null); } private static Integer getInteger(String value) { try { return Integer.parseInt(value); } catch (NumberFormatException e) { return null; } } private static Dimension parseSize(String value) { final List<String> numbers = StringUtil.split(value, ","); return new JBDimension(Integer.parseInt(numbers.get(0)), Integer.parseInt(numbers.get(1))).asUIResource(); } public String getEditorSchemeName() { return editorSchemeName; } public void setEditorSchemeName(String editorSchemeName) { this.editorSchemeName = editorSchemeName; } // //json deserialization methods // @SuppressWarnings("unused") private void setName(String name) { this.name = name; } @SuppressWarnings("unused") private void setDark(boolean dark) { this.dark = dark; } @SuppressWarnings("unused") private void setAuthor(String author) { this.author = author; } @SuppressWarnings("unused") private void setUi(Map<String, Object> ui) { this.ui = ui; } @SuppressWarnings("unused") private void setIcons(Map<String, Object> icons) { this.icons = icons; } @SuppressWarnings("unused") public void setEditorScheme(String editorScheme) { this.editorScheme = editorScheme; } public void setBackground(Map<String, Object> background) { this.background = background; } }
platform/platform-impl/src/com/intellij/ide/ui/UITheme.java
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.ide.ui; import com.fasterxml.jackson.databind.ObjectMapper; import com.intellij.ide.plugins.cl.PluginClassLoader; import com.intellij.openapi.util.IconLoader; import com.intellij.openapi.util.IconPathPatcher; import com.intellij.openapi.util.text.StringUtil; import com.intellij.ui.ColorUtil; import com.intellij.util.SVGLoader; import com.intellij.util.ui.JBDimension; import com.intellij.util.ui.JBInsets; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import javax.swing.*; import javax.swing.plaf.BorderUIResource; import javax.swing.plaf.ColorUIResource; import javax.swing.plaf.IconUIResource; import java.awt.*; import java.io.IOException; import java.io.InputStream; import java.util.HashMap; import java.util.List; import java.util.Map; import static com.intellij.util.ui.JBUI.Borders.customLine; import static com.intellij.util.ui.JBUI.asUIResource; /** * @author Konstantin Bulenkov */ public class UITheme { private String name; private boolean dark; private String author; private String id; private String editorScheme; private Map<String, Object> ui; private Map<String, Object> icons; private IconPathPatcher patcher; private Map<String, Object> background; private ClassLoader providerClassLoader = getClass().getClassLoader(); private String editorSchemeName; private SVGLoader.SvgColorPatcher colorPatcher; private UITheme() { } public String getName() { return name; } public boolean isDark() { return dark; } public String getAuthor() { return author; } public static UITheme loadFromJson(InputStream stream, @NotNull String themeId, @NotNull ClassLoader provider) throws IOException { UITheme theme = new ObjectMapper().readValue(stream, UITheme.class); theme.id = themeId; theme.providerClassLoader = provider; if (!theme.icons.isEmpty()) { theme.patcher = new IconPathPatcher() { @Nullable @Override public String patchPath(String path, ClassLoader classLoader) { if (classLoader instanceof PluginClassLoader) { String pluginId = ((PluginClassLoader)classLoader).getPluginId().getIdString(); Object icons = theme.icons.get(pluginId); if (icons instanceof Map) { Object pluginIconPath = ((Map)icons).get(path); if (pluginIconPath instanceof String) { return (String)pluginIconPath; } } } Object value = theme.icons.get(path); return value instanceof String ? (String)value : null; } @Nullable @Override public ClassLoader getContextClassLoader(String path, ClassLoader originalClassLoader) { return theme.providerClassLoader; } }; Object palette = theme.icons.get("ColorPalette"); if (palette instanceof Map) { Map colors = (Map)palette; Map<String, String> newPalette = new HashMap<>(); Map<String, Integer> alphas = new HashMap<>(); for (Object o : colors.keySet()) { String key = toColorString(o.toString(), theme.isDark()); Object v = colors.get(o.toString()); if (v instanceof String) { String value = (String)v; String alpha = null; if (value.length() == 9) { alpha = value.substring(7); value = value.substring(0, 7); } if (ColorUtil.fromHex(key, null) != null && ColorUtil.fromHex(value, null) != null) { newPalette.put(key, value); int fillTransparency = -1; if (alpha != null) { try { fillTransparency = Integer.parseInt(alpha, 16); } catch (Exception ignore) {} } if (fillTransparency != -1) { alphas.put(value, fillTransparency); } } } } theme.colorPatcher = new SVGLoader.SvgColorPatcher() { @Override public void patchColors(Element svg) { String fill = svg.getAttribute("fill"); if (fill != null) { String newFill = newPalette.get(StringUtil.toLowerCase(fill)); if (newFill != null) { svg.setAttribute("fill", newFill); if (alphas.get(newFill) != null) { svg.setAttribute("fill-opacity", String.valueOf((Float.valueOf(alphas.get(newFill)) / 255f))); } } } NodeList nodes = svg.getChildNodes(); int length = nodes.getLength(); for (int i = 0; i < length; i++) { Node item = nodes.item(i); if (item instanceof Element) { patchColors((Element)item); } } } }; } } return theme; } private static String toColorString(String fillValue, boolean darkTheme) { if (darkTheme && fillValue.startsWith("Actions.") && !fillValue.endsWith(".Dark")) { fillValue += ".Dark"; } String color = colorPalette.get(fillValue); if (color != null) { return StringUtil.toLowerCase(color); } return StringUtil.toLowerCase(fillValue); } private static final Map<String, String> colorPalette = new HashMap<>(); static { colorPalette.put("Actions.Red", "#DB5860"); colorPalette.put("Actions.Red.Dark", "#C75450"); colorPalette.put("Actions.Yellow", "#EDA200"); colorPalette.put("Actions.Yellow.Dark", "#F0A732"); colorPalette.put("Actions.Green", "#59A869"); colorPalette.put("Actions.Green.Dark", "#499C54"); colorPalette.put("Actions.Blue", "#389FD6"); colorPalette.put("Actions.Blue.Dark", "#3592C4"); colorPalette.put("Actions.Grey", "#6E6E6E"); colorPalette.put("Actions.Grey.Dark", "#AFB1B3"); colorPalette.put("Objects.Grey", "#9AA7B0"); colorPalette.put("Objects.Blue", "#40B6E0"); colorPalette.put("Objects.Green", "#62B543"); colorPalette.put("Objects.Yellow", "#F4AF3D"); colorPalette.put("Objects.YellowDark", "#D9A343"); colorPalette.put("Objects.Purple", "#B99BF8"); colorPalette.put("Objects.Pink", "#F98B9E"); colorPalette.put("Objects.Red", "#F26522"); colorPalette.put("Objects.RedStatus", "#E05555"); colorPalette.put("Objects.GreenAndroid", "#A4C639"); colorPalette.put("Objects.BlackText", "#231F20"); } public String getId() { return id; } @Nullable public String getEditorScheme() { return editorScheme; } public Map<String, Object> getBackground() { return background; } public void applyProperties(UIDefaults defaults) { if (ui == null) return; for (Map.Entry<String, Object> entry : ui.entrySet()) { apply(entry.getKey(), entry.getValue(), defaults); } } public IconPathPatcher getPatcher() { return patcher; } public SVGLoader.SvgColorPatcher getColorPatcher() { return colorPatcher; } @NotNull public ClassLoader getProviderClassLoader() { return providerClassLoader; } private static void apply(String key, Object value, UIDefaults defaults) { if (value instanceof HashMap) { for (Map.Entry<String, Object> o : ((HashMap<String, Object>)value).entrySet()) { apply(key + "." + o.getKey(), o.getValue(), defaults); } } else { value = parseValue(key, value.toString()); if (key.startsWith("*.")) { String tail = key.substring(1); Object finalValue = value; //please DO NOT stream on UIDefaults directly ((UIDefaults)defaults.clone()).keySet().stream() .filter(k -> k instanceof String && ((String)k).endsWith(tail)) .forEach(k -> defaults.put(k, finalValue)); } else { defaults.put(key, value); } } } public static Object parseValue(String key, @NotNull String value) { if ("null".equals(value)) { return null; } if ("true".equals(value)) return Boolean.TRUE; if ("false".equals(value)) return Boolean.FALSE; if (key.endsWith("Insets") || key.endsWith("padding")) { return parseInsets(value); } else if (key.endsWith("Border") || key.endsWith("border")) { try { List<String> ints = StringUtil.split(value, ","); if (ints.size() == 4) { return new BorderUIResource.EmptyBorderUIResource(parseInsets(value)); } else if (ints.size() == 5) { return asUIResource(customLine(ColorUtil.fromHex(ints.get(4)), Integer.parseInt(ints.get(0)), Integer.parseInt(ints.get(1)), Integer.parseInt(ints.get(2)), Integer.parseInt(ints.get(3)))); } else if (ColorUtil.fromHex(value, null) != null) { return asUIResource(customLine(ColorUtil.fromHex(value), 1)); } else { return Class.forName(value).newInstance(); } } catch (Exception e) { e.printStackTrace(); } } else if (key.endsWith("Size")) { return parseSize(value); } else if (key.endsWith("Width")) { return getInteger(value); } else if (key.endsWith("grayFilter")) { return parseGrayFilter(value); } else { final Color color = parseColor(value); final Integer invVal = getInteger(value); Icon icon = value.startsWith("AllIcons.") ? IconLoader.getIcon(value) : null; if (color != null) { return new ColorUIResource(color); } else if (invVal != null) { return invVal; } else if (icon != null) { return new IconUIResource(icon); } } return value; } private static Insets parseInsets(String value) { final java.util.List<String> numbers = StringUtil.split(value, ","); return new JBInsets(Integer.parseInt(numbers.get(0)), Integer.parseInt(numbers.get(1)), Integer.parseInt(numbers.get(2)), Integer.parseInt(numbers.get(3))).asUIResource(); } private static UIUtil.GrayFilter parseGrayFilter(String value) { java.util.List<String> numbers = StringUtil.split(value, ","); return new UIUtil.GrayFilter(Integer.parseInt(numbers.get(0)), Integer.parseInt(numbers.get(1)), Integer.parseInt(numbers.get(2))).asUIResource(); } @SuppressWarnings("UseJBColor") private static Color parseColor(String value) { if (value != null && value.length() == 8) { final Color color = ColorUtil.fromHex(value.substring(0, 6)); try { int alpha = Integer.parseInt(value.substring(6, 8), 16); return new ColorUIResource(new Color(color.getRed(), color.getGreen(), color.getBlue(), alpha)); } catch (Exception ignore){} return null; } return ColorUtil.fromHex(value, null); } private static Integer getInteger(String value) { try { return Integer.parseInt(value); } catch (NumberFormatException e) { return null; } } private static Dimension parseSize(String value) { final List<String> numbers = StringUtil.split(value, ","); return new JBDimension(Integer.parseInt(numbers.get(0)), Integer.parseInt(numbers.get(1))).asUIResource(); } public String getEditorSchemeName() { return editorSchemeName; } public void setEditorSchemeName(String editorSchemeName) { this.editorSchemeName = editorSchemeName; } // //json deserialization methods // @SuppressWarnings("unused") private void setName(String name) { this.name = name; } @SuppressWarnings("unused") private void setDark(boolean dark) { this.dark = dark; } @SuppressWarnings("unused") private void setAuthor(String author) { this.author = author; } @SuppressWarnings("unused") private void setUi(Map<String, Object> ui) { this.ui = ui; } @SuppressWarnings("unused") private void setIcons(Map<String, Object> icons) { this.icons = icons; } @SuppressWarnings("unused") public void setEditorScheme(String editorScheme) { this.editorScheme = editorScheme; } public void setBackground(Map<String, Object> background) { this.background = background; } }
IDEA-199416 IDEA startup NPE if no 'icons' specified in theme file
platform/platform-impl/src/com/intellij/ide/ui/UITheme.java
IDEA-199416 IDEA startup NPE if no 'icons' specified in theme file
Java
apache-2.0
e081e1363e82d558a6afe305591973e30abe2cad
0
aparo/elasticsearch,aparo/elasticsearch,fubuki/elasticsearch,fubuki/elasticsearch,fubuki/elasticsearch,fubuki/elasticsearch,fubuki/elasticsearch,aparo/elasticsearch,fubuki/elasticsearch,aparo/elasticsearch,aparo/elasticsearch,aparo/elasticsearch
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.test; import com.carrotsearch.randomizedtesting.RandomizedTest; import com.carrotsearch.randomizedtesting.SeedUtils; import com.carrotsearch.randomizedtesting.generators.RandomInts; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.collect.*; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.SettableFuture; import org.apache.lucene.util.AbstractRandomizedTest; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.XIOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchIllegalStateException; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.cache.recycler.CacheRecycler; import org.elasticsearch.cache.recycler.PageCacheRecyclerModule; import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider; import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; import org.elasticsearch.common.Strings; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.network.NetworkUtils; import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.ImmutableSettings.Builder; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArraysModule; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.index.cache.filter.FilterCacheModule; import org.elasticsearch.index.cache.filter.none.NoneFilterCache; import org.elasticsearch.index.cache.filter.weighted.WeightedFilterCache; import org.elasticsearch.index.engine.IndexEngineModule; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.node.Node; import org.elasticsearch.node.internal.InternalNode; import org.elasticsearch.node.service.NodeService; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.search.SearchService; import org.elasticsearch.test.cache.recycler.MockBigArraysModule; import org.elasticsearch.test.cache.recycler.MockPageCacheRecyclerModule; import org.elasticsearch.test.disruption.ServiceDisruptionScheme; import org.elasticsearch.test.engine.MockEngineModule; import org.elasticsearch.test.store.MockFSIndexStoreModule; import org.elasticsearch.test.transport.AssertingLocalTransport; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportModule; import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.netty.NettyTransport; import org.junit.Assert; import java.io.Closeable; import java.io.File; import java.io.IOException; import java.net.InetSocketAddress; import java.nio.file.Path; import java.util.*; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import static com.carrotsearch.randomizedtesting.RandomizedTest.between; import static com.carrotsearch.randomizedtesting.RandomizedTest.randomBoolean; import static junit.framework.Assert.fail; import static org.apache.lucene.util.LuceneTestCase.TEST_NIGHTLY; import static org.apache.lucene.util.LuceneTestCase.rarely; import static org.apache.lucene.util.LuceneTestCase.usually; import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder; import static org.elasticsearch.node.NodeBuilder.nodeBuilder; import static org.elasticsearch.test.ElasticsearchTestCase.assertBusy; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout; import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; /** * InternalTestCluster manages a set of JVM private nodes and allows convenient access to them. * The cluster supports randomized configuration such that nodes started in the cluster will * automatically load asserting services tracking resources like file handles or open searchers. * <p> * The Cluster is bound to a test lifecycle where tests must call {@link #beforeTest(java.util.Random, double)} and * {@link #afterTest()} to initialize and reset the cluster in order to be more reproducible. The term "more" relates * to the async nature of Elasticsearch in combination with randomized testing. Once Threads and asynchronous calls * are involved reproducibility is very limited. This class should only be used through {@link ElasticsearchIntegrationTest}. * </p> */ public final class InternalTestCluster extends TestCluster { private final ESLogger logger = Loggers.getLogger(getClass()); static SettingsSource DEFAULT_SETTINGS_SOURCE = SettingsSource.EMPTY; /** * A boolean value to enable or disable mock modules. This is useful to test the * system without asserting modules that to make sure they don't hide any bugs in * production. * * @see ElasticsearchIntegrationTest */ public static final String TESTS_ENABLE_MOCK_MODULES = "tests.enable_mock_modules"; /** * A node level setting that holds a per node random seed that is consistent across node restarts */ public static final String SETTING_CLUSTER_NODE_SEED = "test.cluster.node.seed"; private static final boolean ENABLE_MOCK_MODULES = RandomizedTest.systemPropertyAsBoolean(TESTS_ENABLE_MOCK_MODULES, true); static final int DEFAULT_MIN_NUM_DATA_NODES = 2; static final int DEFAULT_MAX_NUM_DATA_NODES = 6; static final int DEFAULT_NUM_CLIENT_NODES = -1; static final int DEFAULT_MIN_NUM_CLIENT_NODES = 0; static final int DEFAULT_MAX_NUM_CLIENT_NODES = 1; static final boolean DEFAULT_ENABLE_RANDOM_BENCH_NODES = true; static final boolean DEFAULT_ENABLE_HTTP_PIPELINING = true; public static final String NODE_MODE = nodeMode(); /* sorted map to make traverse order reproducible, concurrent since we do checks on it not within a sync block */ private final NavigableMap<String, NodeAndClient> nodes = new TreeMap<>(); private final Set<Path> dataDirToClean = new HashSet<>(); private final String clusterName; private final AtomicBoolean open = new AtomicBoolean(true); private final Settings defaultSettings; private AtomicInteger nextNodeId = new AtomicInteger(0); /* Each shared node has a node seed that is used to start up the node and get default settings * this is important if a node is randomly shut down in a test since the next test relies on a * fully shared cluster to be more reproducible */ private final long[] sharedNodesSeeds; private final int numSharedDataNodes; private final int numSharedClientNodes; private final boolean enableRandomBenchNodes; private final SettingsSource settingsSource; private final ExecutorService executor; private final boolean hasFilterCache; /** * All nodes started by the cluster will have their name set to nodePrefix followed by a positive number */ private final String nodePrefix; private ServiceDisruptionScheme activeDisruptionScheme; public InternalTestCluster(long clusterSeed, int minNumDataNodes, int maxNumDataNodes, String clusterName, int numClientNodes, boolean enableRandomBenchNodes, boolean enableHttpPipelining, int jvmOrdinal, String nodePrefix) { this(clusterSeed, minNumDataNodes, maxNumDataNodes, clusterName, DEFAULT_SETTINGS_SOURCE, numClientNodes, enableRandomBenchNodes, enableHttpPipelining, jvmOrdinal, nodePrefix); } public InternalTestCluster(long clusterSeed, int minNumDataNodes, int maxNumDataNodes, String clusterName, SettingsSource settingsSource, int numClientNodes, boolean enableRandomBenchNodes, boolean enableHttpPipelining, int jvmOrdinal, String nodePrefix) { super(clusterSeed); this.clusterName = clusterName; if (minNumDataNodes < 0 || maxNumDataNodes < 0) { throw new IllegalArgumentException("minimum and maximum number of data nodes must be >= 0"); } if (maxNumDataNodes < minNumDataNodes) { throw new IllegalArgumentException("maximum number of data nodes must be >= minimum number of data nodes"); } Random random = new Random(clusterSeed); this.numSharedDataNodes = RandomInts.randomIntBetween(random, minNumDataNodes, maxNumDataNodes); assert this.numSharedDataNodes >= 0; //for now all shared data nodes are also master eligible if (numSharedDataNodes == 0) { this.numSharedClientNodes = 0; } else { if (numClientNodes < 0) { this.numSharedClientNodes = RandomInts.randomIntBetween(random, DEFAULT_MIN_NUM_CLIENT_NODES, DEFAULT_MAX_NUM_CLIENT_NODES); } else { this.numSharedClientNodes = numClientNodes; } } assert this.numSharedClientNodes >= 0; this.enableRandomBenchNodes = enableRandomBenchNodes; this.nodePrefix = nodePrefix; assert nodePrefix != null; /* * TODO * - we might want start some master only nodes? * - we could add a flag that returns a client to the master all the time? * - we could add a flag that never returns a client to the master * - along those lines use a dedicated node that is master eligible and let all other nodes be only data nodes */ sharedNodesSeeds = new long[numSharedDataNodes + numSharedClientNodes]; for (int i = 0; i < sharedNodesSeeds.length; i++) { sharedNodesSeeds[i] = random.nextLong(); } logger.info("Setup InternalTestCluster [{}] with seed [{}] using [{}] data nodes and [{}] client nodes", clusterName, SeedUtils.formatSeed(clusterSeed), numSharedDataNodes, numSharedClientNodes); this.settingsSource = settingsSource; Builder builder = ImmutableSettings.settingsBuilder(); if (random.nextInt(5) == 0) { // sometimes set this // randomize (multi/single) data path, special case for 0, don't set it at all... final int numOfDataPaths = random.nextInt(5); if (numOfDataPaths > 0) { StringBuilder dataPath = new StringBuilder(); for (int i = 0; i < numOfDataPaths; i++) { dataPath.append(new File("data/d" + i).getAbsolutePath()).append(','); } builder.put("path.data", dataPath.toString()); } } final int basePort = 9300 + (100 * (jvmOrdinal+1)); builder.put("transport.tcp.port", basePort + "-" + (basePort+100)); builder.put("http.port", basePort+101 + "-" + (basePort+200)); builder.put("config.ignore_system_properties", true); builder.put("node.mode", NODE_MODE); builder.put("script.disable_dynamic", false); builder.put("http.pipelining", enableHttpPipelining); builder.put("plugins." + PluginsService.LOAD_PLUGIN_FROM_CLASSPATH, false); if (Strings.hasLength(System.getProperty("es.logger.level"))) { builder.put("logger.level", System.getProperty("es.logger.level")); } if (Strings.hasLength(System.getProperty("es.logger.prefix"))) { builder.put("logger.prefix", System.getProperty("es.logger.level")); } // Default the watermarks to absurdly low to prevent the tests // from failing on nodes without enough disk space builder.put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, "1b"); builder.put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, "1b"); if (TEST_NIGHTLY) { builder.put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_STREAMS, between(10, 15)); builder.put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS, between(10, 15)); builder.put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES, between(5, 10)); } else if (randomBoolean()) { builder.put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_STREAMS, between(3, 6)); builder.put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS, between(3, 6)); builder.put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES, between(2, 5)); } defaultSettings = builder.build(); executor = EsExecutors.newCached(0, TimeUnit.SECONDS, EsExecutors.daemonThreadFactory("test_" + clusterName)); this.hasFilterCache = random.nextBoolean(); } public static String nodeMode() { Builder builder = ImmutableSettings.builder(); if (Strings.isEmpty(System.getProperty("es.node.mode")) && Strings.isEmpty(System.getProperty("es.node.local"))) { return "local"; // default if nothing is specified } if (Strings.hasLength(System.getProperty("es.node.mode"))) { builder.put("node.mode", System.getProperty("es.node.mode")); } if (Strings.hasLength(System.getProperty("es.node.local"))) { builder.put("node.local", System.getProperty("es.node.local")); } if (DiscoveryNode.localNode(builder.build())) { return "local"; } else { return "network"; } } @Override public String getClusterName() { return clusterName; } public String[] getNodeNames() { return nodes.keySet().toArray(Strings.EMPTY_ARRAY); } private static boolean isLocalTransportConfigured() { if ("local".equals(System.getProperty("es.node.mode", "network"))) { return true; } return Boolean.parseBoolean(System.getProperty("es.node.local", "false")); } private Settings getSettings(int nodeOrdinal, long nodeSeed, Settings others) { Builder builder = ImmutableSettings.settingsBuilder().put(defaultSettings) .put(getRandomNodeSettings(nodeSeed)) .put(FilterCacheModule.FilterCacheSettings.FILTER_CACHE_TYPE, hasFilterCache() ? WeightedFilterCache.class : NoneFilterCache.class); Settings settings = settingsSource.node(nodeOrdinal); if (settings != null) { if (settings.get(ClusterName.SETTING) != null) { throw new ElasticsearchIllegalStateException("Tests must not set a '" + ClusterName.SETTING + "' as a node setting set '" + ClusterName.SETTING + "': [" + settings.get(ClusterName.SETTING) + "]"); } builder.put(settings); } if (others != null) { builder.put(others); } builder.put(ClusterName.SETTING, clusterName); return builder.build(); } private static Settings getRandomNodeSettings(long seed) { Random random = new Random(seed); Builder builder = ImmutableSettings.settingsBuilder() /* use RAM directories in 10% of the runs */ //.put("index.store.type", random.nextInt(10) == 0 ? MockRamIndexStoreModule.class.getName() : MockFSIndexStoreModule.class.getName()) // decrease the routing schedule so new nodes will be added quickly - some random value between 30 and 80 ms .put("cluster.routing.schedule", (30 + random.nextInt(50)) + "ms") // default to non gateway .put("gateway.type", "none") .put(SETTING_CLUSTER_NODE_SEED, seed); if (ENABLE_MOCK_MODULES && usually(random)) { builder.put("index.store.type", MockFSIndexStoreModule.class.getName()); // no RAM dir for now! builder.put(IndexEngineModule.EngineSettings.ENGINE_TYPE, MockEngineModule.class.getName()); builder.put(PageCacheRecyclerModule.CACHE_IMPL, MockPageCacheRecyclerModule.class.getName()); builder.put(BigArraysModule.IMPL, MockBigArraysModule.class.getName()); builder.put(TransportModule.TRANSPORT_SERVICE_TYPE_KEY, MockTransportService.class.getName()); } if (isLocalTransportConfigured()) { builder.put(TransportModule.TRANSPORT_TYPE_KEY, AssertingLocalTransport.class.getName()); } else { builder.put(Transport.TransportSettings.TRANSPORT_TCP_COMPRESS, rarely(random)); } builder.put("type", RandomPicks.randomFrom(random, CacheRecycler.Type.values())); if (random.nextBoolean()) { builder.put("cache.recycler.page.type", RandomPicks.randomFrom(random, CacheRecycler.Type.values())); } if (random.nextInt(10) == 0) { // 10% of the nodes have a very frequent check interval builder.put(SearchService.KEEPALIVE_INTERVAL_KEY, TimeValue.timeValueMillis(10 + random.nextInt(2000))); } else if (random.nextInt(10) != 0) { // 90% of the time - 10% of the time we don't set anything builder.put(SearchService.KEEPALIVE_INTERVAL_KEY, TimeValue.timeValueSeconds(10 + random.nextInt(5 * 60))); } if (random.nextBoolean()) { // sometimes set a builder.put(SearchService.DEFAUTL_KEEPALIVE_KEY, TimeValue.timeValueSeconds(100 + random.nextInt(5 * 60))); } if (random.nextBoolean()) { // change threadpool types to make sure we don't have components that rely on the type of thread pools for (String name : Arrays.asList(ThreadPool.Names.BULK, ThreadPool.Names.FLUSH, ThreadPool.Names.GET, ThreadPool.Names.INDEX, ThreadPool.Names.MANAGEMENT, ThreadPool.Names.MERGE, ThreadPool.Names.OPTIMIZE, ThreadPool.Names.PERCOLATE, ThreadPool.Names.REFRESH, ThreadPool.Names.SEARCH, ThreadPool.Names.SNAPSHOT, ThreadPool.Names.SUGGEST, ThreadPool.Names.WARMER)) { if (random.nextBoolean()) { final String type = RandomPicks.randomFrom(random, Arrays.asList("fixed", "cached", "scaling")); builder.put(ThreadPool.THREADPOOL_GROUP + name + ".type", type); } } } if (random.nextInt(10) == 0) { builder.put(EsExecutors.PROCESSORS, 1 + random.nextInt(AbstractRandomizedTest.TESTS_PROCESSORS)); } else { builder.put(EsExecutors.PROCESSORS, AbstractRandomizedTest.TESTS_PROCESSORS); } if (random.nextBoolean()) { if (random.nextBoolean()) { builder.put("indices.fielddata.cache.size", 1 + random.nextInt(1000), ByteSizeUnit.MB); } if (random.nextBoolean()) { builder.put("indices.fielddata.cache.expire", TimeValue.timeValueMillis(1 + random.nextInt(10000))); } } // randomize netty settings if (random.nextBoolean()) { builder.put(NettyTransport.WORKER_COUNT, random.nextInt(3) + 1); builder.put(NettyTransport.CONNECTIONS_PER_NODE_RECOVERY, random.nextInt(2) + 1); builder.put(NettyTransport.CONNECTIONS_PER_NODE_BULK, random.nextInt(3) + 1); builder.put(NettyTransport.CONNECTIONS_PER_NODE_REG, random.nextInt(6) + 1); } if (random.nextBoolean()) { builder.put(MappingUpdatedAction.INDICES_MAPPING_ADDITIONAL_MAPPING_CHANGE_TIME, RandomInts.randomIntBetween(random, 0, 500) /*milliseconds*/); } if (random.nextInt(10) == 0) { builder.put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_TYPE_SETTING, "noop"); builder.put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING, "noop"); } return builder.build(); } public static String clusterName(String prefix, String childVMId, long clusterSeed) { StringBuilder builder = new StringBuilder(prefix); builder.append('-').append(NetworkUtils.getLocalHostName("__default_host__")); builder.append("-CHILD_VM=[").append(childVMId).append(']'); builder.append("-CLUSTER_SEED=[").append(clusterSeed).append(']'); // if multiple maven task run on a single host we better have an identifier that doesn't rely on input params builder.append("-HASH=[").append(SeedUtils.formatSeed(System.nanoTime())).append(']'); return builder.toString(); } private void ensureOpen() { if (!open.get()) { throw new RuntimeException("Cluster is already closed"); } } private synchronized NodeAndClient getOrBuildRandomNode() { ensureOpen(); NodeAndClient randomNodeAndClient = getRandomNodeAndClient(); if (randomNodeAndClient != null) { return randomNodeAndClient; } NodeAndClient buildNode = buildNode(); buildNode.node().start(); publishNode(buildNode); return buildNode; } private synchronized NodeAndClient getRandomNodeAndClient() { Predicate<NodeAndClient> all = Predicates.alwaysTrue(); return getRandomNodeAndClient(all); } private synchronized NodeAndClient getRandomNodeAndClient(Predicate<NodeAndClient> predicate) { ensureOpen(); Collection<NodeAndClient> values = Collections2.filter(nodes.values(), predicate); if (!values.isEmpty()) { int whichOne = random.nextInt(values.size()); for (NodeAndClient nodeAndClient : values) { if (whichOne-- == 0) { return nodeAndClient; } } } return null; } /** * Ensures that at least <code>n</code> data nodes are present in the cluster. * if more nodes than <code>n</code> are present this method will not * stop any of the running nodes. */ public void ensureAtLeastNumDataNodes(int n) { List<ListenableFuture<String>> futures = Lists.newArrayList(); synchronized (this) { int size = numDataNodes(); for (int i = size; i < n; i++) { logger.info("increasing cluster size from {} to {}", size, n); futures.add(startNodeAsync()); } } try { Futures.allAsList(futures).get(); } catch (Exception e) { throw new ElasticsearchException("failed to start nodes", e); } if (!futures.isEmpty()) { synchronized (this) { assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForNodes(Integer.toString(nodes.size())).get()); } } } /** * Ensures that at most <code>n</code> are up and running. * If less nodes that <code>n</code> are running this method * will not start any additional nodes. */ public synchronized void ensureAtMostNumDataNodes(int n) throws IOException { int size = numDataNodes(); if (size <= n) { return; } // prevent killing the master if possible and client nodes final Iterator<NodeAndClient> values = n == 0 ? nodes.values().iterator() : Iterators.filter(nodes.values().iterator(), Predicates.and(new DataNodePredicate(), Predicates.not(new MasterNodePredicate(getMasterName())))); final Iterator<NodeAndClient> limit = Iterators.limit(values, size - n); logger.info("changing cluster size from {} to {}, {} data nodes", size(), n + numSharedClientNodes, n); Set<NodeAndClient> nodesToRemove = new HashSet<>(); while (limit.hasNext()) { NodeAndClient next = limit.next(); nodesToRemove.add(next); removeDisruptionSchemeFromNode(next); next.close(); } for (NodeAndClient toRemove : nodesToRemove) { nodes.remove(toRemove.name); } if (!nodesToRemove.isEmpty() && size() > 0) { assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForNodes(Integer.toString(nodes.size())).get()); } } private NodeAndClient buildNode(Settings settings, Version version) { int ord = nextNodeId.getAndIncrement(); return buildNode(ord, random.nextLong(), settings, version); } private NodeAndClient buildNode() { int ord = nextNodeId.getAndIncrement(); return buildNode(ord, random.nextLong(), null, Version.CURRENT); } private NodeAndClient buildNode(int nodeId, long seed, Settings settings, Version version) { assert Thread.holdsLock(this); ensureOpen(); settings = getSettings(nodeId, seed, settings); String name = buildNodeName(nodeId); assert !nodes.containsKey(name); Settings finalSettings = settingsBuilder() .put(settings) .put("name", name) .put("discovery.id.seed", seed) .put("tests.mock.version", version) .build(); Node node = nodeBuilder().settings(finalSettings).build(); return new NodeAndClient(name, node); } private String buildNodeName(int id) { return nodePrefix + id; } /** * Returns the common node name prefix for this test cluster. */ public String nodePrefix() { return nodePrefix; } @Override public synchronized Client client() { ensureOpen(); /* Randomly return a client to one of the nodes in the cluster */ return getOrBuildRandomNode().client(random); } /** * Returns a node client to a data node in the cluster. * Note: use this with care tests should not rely on a certain nodes client. */ public synchronized Client dataNodeClient() { ensureOpen(); /* Randomly return a client to one of the nodes in the cluster */ return getRandomNodeAndClient(new DataNodePredicate()).client(random); } /** * Returns a node client to the current master node. * Note: use this with care tests should not rely on a certain nodes client. */ public synchronized Client masterClient() { ensureOpen(); NodeAndClient randomNodeAndClient = getRandomNodeAndClient(new MasterNodePredicate(getMasterName())); if (randomNodeAndClient != null) { return randomNodeAndClient.nodeClient(); // ensure node client master is requested } Assert.fail("No master client found"); return null; // can't happen } /** * Returns a node client to random node but not the master. This method will fail if no non-master client is available. */ public synchronized Client nonMasterClient() { ensureOpen(); NodeAndClient randomNodeAndClient = getRandomNodeAndClient(Predicates.not(new MasterNodePredicate(getMasterName()))); if (randomNodeAndClient != null) { return randomNodeAndClient.nodeClient(); // ensure node client non-master is requested } Assert.fail("No non-master client found"); return null; // can't happen } /** * Returns a client to a node started with "node.client: true" */ public synchronized Client clientNodeClient() { ensureOpen(); NodeAndClient randomNodeAndClient = getRandomNodeAndClient(new ClientNodePredicate()); if (randomNodeAndClient != null) { return randomNodeAndClient.client(random); } int nodeId = nextNodeId.getAndIncrement(); Settings settings = getSettings(nodeId, random.nextLong(), ImmutableSettings.EMPTY); startNodeClient(settings); return getRandomNodeAndClient(new ClientNodePredicate()).client(random); } public synchronized Client startNodeClient(Settings settings) { ensureOpen(); // currently unused Builder builder = settingsBuilder().put(settings).put("node.client", true); if (size() == 0) { // if we are the first node - don't wait for a state builder.put("discovery.initial_state_timeout", 0); } String name = startNode(builder); return nodes.get(name).nodeClient(); } /** * Returns a transport client */ public synchronized Client transportClient() { ensureOpen(); // randomly return a transport client going to one of the nodes in the cluster return getOrBuildRandomNode().transportClient(); } /** * Returns a node client to a given node. */ public synchronized Client client(String nodeName) { ensureOpen(); NodeAndClient nodeAndClient = nodes.get(nodeName); if (nodeAndClient != null) { return nodeAndClient.client(random); } Assert.fail("No node found with name: [" + nodeName + "]"); return null; // can't happen } /** * Returns a "smart" node client to a random node in the cluster */ public synchronized Client smartClient() { NodeAndClient randomNodeAndClient = getRandomNodeAndClient(); if (randomNodeAndClient != null) { return randomNodeAndClient.nodeClient(); } Assert.fail("No smart client found"); return null; // can't happen } /** * Returns a random node that applies to the given predicate. * The predicate can filter nodes based on the nodes settings. * If all nodes are filtered out this method will return <code>null</code> */ public synchronized Client client(final Predicate<Settings> filterPredicate) { ensureOpen(); final NodeAndClient randomNodeAndClient = getRandomNodeAndClient(new Predicate<NodeAndClient>() { @Override public boolean apply(NodeAndClient nodeAndClient) { return filterPredicate.apply(nodeAndClient.node.settings()); } }); if (randomNodeAndClient != null) { return randomNodeAndClient.client(random); } return null; } @Override public void close() { if (this.open.compareAndSet(true, false)) { if (activeDisruptionScheme != null) { activeDisruptionScheme.testClusterClosed(); activeDisruptionScheme = null; } IOUtils.closeWhileHandlingException(nodes.values()); nodes.clear(); executor.shutdownNow(); } } private final class NodeAndClient implements Closeable { private InternalNode node; private Client nodeClient; private Client transportClient; private final AtomicBoolean closed = new AtomicBoolean(false); private final String name; NodeAndClient(String name, Node node) { this.node = (InternalNode) node; this.name = name; } Node node() { if (closed.get()) { throw new RuntimeException("already closed"); } return node; } Client client(Random random) { if (closed.get()) { throw new RuntimeException("already closed"); } double nextDouble = random.nextDouble(); if (nextDouble < transportClientRatio) { if (logger.isTraceEnabled()) { logger.trace("Using transport client for node [{}] sniff: [{}]", node.settings().get("name"), false); } return getOrBuildTransportClient(); } else { return getOrBuildNodeClient(); } } Client nodeClient() { if (closed.get()) { throw new RuntimeException("already closed"); } return getOrBuildNodeClient(); } Client transportClient() { if (closed.get()) { throw new RuntimeException("already closed"); } return getOrBuildTransportClient(); } private Client getOrBuildNodeClient() { if (nodeClient != null) { return nodeClient; } return nodeClient = node.client(); } private Client getOrBuildTransportClient() { if (transportClient != null) { return transportClient; } /* no sniff client for now - doesn't work will all tests since it might throw NoNodeAvailableException if nodes are shut down. * we first need support of transportClientRatio as annotations or so */ return transportClient = TransportClientFactory.noSniff(settingsSource.transportClient()).client(node, clusterName); } void resetClient() throws IOException { if (closed.get()) { throw new RuntimeException("already closed"); } Releasables.close(nodeClient, transportClient); nodeClient = null; transportClient = null; } void closeNode() { registerDataPath(); node.close(); } void restart(RestartCallback callback) throws Exception { assert callback != null; resetClient(); if (!node.isClosed()) { closeNode(); } Settings newSettings = callback.onNodeStopped(name); if (newSettings == null) { newSettings = ImmutableSettings.EMPTY; } if (callback.clearData(name)) { NodeEnvironment nodeEnv = getInstanceFromNode(NodeEnvironment.class, node); if (nodeEnv.hasNodeFile()) { XIOUtils.rm(nodeEnv.nodeDataPaths()); } } node = (InternalNode) nodeBuilder().settings(node.settings()).settings(newSettings).node(); } void registerDataPath() { NodeEnvironment nodeEnv = getInstanceFromNode(NodeEnvironment.class, node); if (nodeEnv.hasNodeFile()) { dataDirToClean.addAll(Arrays.asList(nodeEnv.nodeDataPaths())); } } @Override public void close() throws IOException { resetClient(); closed.set(true); closeNode(); } } public static final String TRANSPORT_CLIENT_PREFIX = "transport_client_"; static class TransportClientFactory { private static TransportClientFactory NO_SNIFF_CLIENT_FACTORY = new TransportClientFactory(false, ImmutableSettings.EMPTY); private static TransportClientFactory SNIFF_CLIENT_FACTORY = new TransportClientFactory(true, ImmutableSettings.EMPTY); private final boolean sniff; private final Settings settings; public static TransportClientFactory noSniff(Settings settings) { if (settings == null || settings.names().isEmpty()) { return NO_SNIFF_CLIENT_FACTORY; } return new TransportClientFactory(false, settings); } public static TransportClientFactory sniff(Settings settings) { if (settings == null || settings.names().isEmpty()) { return SNIFF_CLIENT_FACTORY; } return new TransportClientFactory(true, settings); } TransportClientFactory(boolean sniff, Settings settings) { this.sniff = sniff; this.settings = settings != null ? settings : ImmutableSettings.EMPTY; } public Client client(Node node, String clusterName) { TransportAddress addr = ((InternalNode) node).injector().getInstance(TransportService.class).boundAddress().publishAddress(); Settings nodeSettings = node.settings(); Builder builder = settingsBuilder() .put("client.transport.nodes_sampler_interval", "1s") .put("name", TRANSPORT_CLIENT_PREFIX + node.settings().get("name")) .put("plugins." + PluginsService.LOAD_PLUGIN_FROM_CLASSPATH, false) .put(ClusterName.SETTING, clusterName).put("client.transport.sniff", sniff) .put("node.mode", nodeSettings.get("node.mode", NODE_MODE)) .put("node.local", nodeSettings.get("node.local", "")) .put("logger.prefix", nodeSettings.get("logger.prefix", "")) .put("logger.level", nodeSettings.get("logger.level", "INFO")) .put("config.ignore_system_properties", true) .put(settings); TransportClient client = new TransportClient(builder.build()); client.addTransportAddress(addr); return client; } } @Override public synchronized void beforeTest(Random random, double transportClientRatio) throws IOException { super.beforeTest(random, transportClientRatio); reset(true); } private synchronized void reset(boolean wipeData) throws IOException { // clear all rules for mock transport services for (NodeAndClient nodeAndClient : nodes.values()) { TransportService transportService = nodeAndClient.node.injector().getInstance(TransportService.class); if (transportService instanceof MockTransportService) { ((MockTransportService) transportService).clearAllRules(); } } randomlyResetClients(); if (wipeData) { wipeDataDirectories(); } if (nextNodeId.get() == sharedNodesSeeds.length && nodes.size() == sharedNodesSeeds.length) { logger.debug("Cluster hasn't changed - moving out - nodes: [{}] nextNodeId: [{}] numSharedNodes: [{}]", nodes.keySet(), nextNodeId.get(), sharedNodesSeeds.length); return; } logger.debug("Cluster is NOT consistent - restarting shared nodes - nodes: [{}] nextNodeId: [{}] numSharedNodes: [{}]", nodes.keySet(), nextNodeId.get(), sharedNodesSeeds.length); Set<NodeAndClient> sharedNodes = new HashSet<>(); assert sharedNodesSeeds.length == numSharedDataNodes + numSharedClientNodes; boolean changed = false; for (int i = 0; i < numSharedDataNodes; i++) { String buildNodeName = buildNodeName(i); NodeAndClient nodeAndClient = nodes.get(buildNodeName); if (nodeAndClient == null) { changed = true; nodeAndClient = buildNode(i, sharedNodesSeeds[i], null, Version.CURRENT); nodeAndClient.node.start(); logger.info("Start Shared Node [{}] not shared", nodeAndClient.name); } sharedNodes.add(nodeAndClient); } for (int i = numSharedDataNodes; i < numSharedDataNodes + numSharedClientNodes; i++) { String buildNodeName = buildNodeName(i); NodeAndClient nodeAndClient = nodes.get(buildNodeName); if (nodeAndClient == null) { changed = true; Builder clientSettingsBuilder = ImmutableSettings.builder().put("node.client", true); if (enableRandomBenchNodes && usually(random)) { //client nodes might also be bench nodes clientSettingsBuilder.put("node.bench", true); } nodeAndClient = buildNode(i, sharedNodesSeeds[i], clientSettingsBuilder.build(), Version.CURRENT); nodeAndClient.node.start(); logger.info("Start Shared Node [{}] not shared", nodeAndClient.name); } sharedNodes.add(nodeAndClient); } if (!changed && sharedNodes.size() == nodes.size()) { logger.debug("Cluster is consistent - moving out - nodes: [{}] nextNodeId: [{}] numSharedNodes: [{}]", nodes.keySet(), nextNodeId.get(), sharedNodesSeeds.length); if (size() > 0) { client().admin().cluster().prepareHealth().setWaitForNodes(Integer.toString(sharedNodesSeeds.length)).get(); } return; // we are consistent - return } for (NodeAndClient nodeAndClient : sharedNodes) { nodes.remove(nodeAndClient.name); } // trash the remaining nodes final Collection<NodeAndClient> toShutDown = nodes.values(); for (NodeAndClient nodeAndClient : toShutDown) { logger.debug("Close Node [{}] not shared", nodeAndClient.name); nodeAndClient.close(); } nodes.clear(); for (NodeAndClient nodeAndClient : sharedNodes) { publishNode(nodeAndClient); } nextNodeId.set(sharedNodesSeeds.length); assert size() == sharedNodesSeeds.length; if (size() > 0) { client().admin().cluster().prepareHealth().setWaitForNodes(Integer.toString(sharedNodesSeeds.length)).get(); } logger.debug("Cluster is consistent again - nodes: [{}] nextNodeId: [{}] numSharedNodes: [{}]", nodes.keySet(), nextNodeId.get(), sharedNodesSeeds.length); } @Override public synchronized void afterTest() throws IOException { wipeDataDirectories(); randomlyResetClients(); /* reset all clients - each test gets its own client based on the Random instance created above. */ } private void randomlyResetClients() throws IOException { // only reset the clients on nightly tests, it causes heavy load... if (RandomizedTest.isNightly() && rarely(random)) { final Collection<NodeAndClient> nodesAndClients = nodes.values(); for (NodeAndClient nodeAndClient : nodesAndClients) { nodeAndClient.resetClient(); } } } private void wipeDataDirectories() { if (!dataDirToClean.isEmpty()) { try { for (Path path : dataDirToClean) { try { FileSystemUtils.deleteSubDirectories(path); logger.info("Successfully wiped data directory for node location: {}", path); } catch (IOException e) { logger.info("Failed to wipe data directory for node location: {}", path); } } } finally { dataDirToClean.clear(); } } } /** * Returns a reference to a random nodes {@link ClusterService} */ public synchronized ClusterService clusterService() { return getInstance(ClusterService.class); } /** * Returns an Iterable to all instances for the given class &gt;T&lt; across all nodes in the cluster. */ public synchronized <T> Iterable<T> getInstances(Class<T> clazz) { List<T> instances = new ArrayList<>(nodes.size()); for (NodeAndClient nodeAndClient : nodes.values()) { instances.add(getInstanceFromNode(clazz, nodeAndClient.node)); } return instances; } /** * Returns an Iterable to all instances for the given class &gt;T&lt; across all data nodes in the cluster. */ public synchronized <T> Iterable<T> getDataNodeInstances(Class<T> clazz) { return getInstances(clazz, new DataNodePredicate()); } private synchronized <T> Iterable<T> getInstances(Class<T> clazz, Predicate<NodeAndClient> predicate) { Iterable<NodeAndClient> filteredNodes = Iterables.filter(nodes.values(), predicate); List<T> instances = new ArrayList<>(); for (NodeAndClient nodeAndClient : filteredNodes) { instances.add(getInstanceFromNode(clazz, nodeAndClient.node)); } return instances; } /** * Returns a reference to the given nodes instances of the given class &gt;T&lt; */ public synchronized <T> T getInstance(Class<T> clazz, final String node) { final Predicate<InternalTestCluster.NodeAndClient> predicate; if (node != null) { predicate = new Predicate<InternalTestCluster.NodeAndClient>() { public boolean apply(NodeAndClient nodeAndClient) { return node.equals(nodeAndClient.name); } }; } else { predicate = Predicates.alwaysTrue(); } return getInstance(clazz, predicate); } public synchronized <T> T getDataNodeInstance(Class<T> clazz) { return getInstance(clazz, new DataNodePredicate()); } private synchronized <T> T getInstance(Class<T> clazz, Predicate<NodeAndClient> predicate) { NodeAndClient randomNodeAndClient = getRandomNodeAndClient(predicate); assert randomNodeAndClient != null; return getInstanceFromNode(clazz, randomNodeAndClient.node); } /** * Returns a reference to a random nodes instances of the given class &gt;T&lt; */ public synchronized <T> T getInstance(Class<T> clazz) { return getInstance(clazz, Predicates.<NodeAndClient>alwaysTrue()); } private synchronized <T> T getInstanceFromNode(Class<T> clazz, InternalNode node) { return node.injector().getInstance(clazz); } @Override public synchronized int size() { return this.nodes.size(); } @Override public InetSocketAddress[] httpAddresses() { List<InetSocketAddress> addresses = Lists.newArrayList(); for (HttpServerTransport httpServerTransport : getInstances(HttpServerTransport.class)) { addresses.add(((InetSocketTransportAddress) httpServerTransport.boundAddress().publishAddress()).address()); } return addresses.toArray(new InetSocketAddress[addresses.size()]); } /** * Stops a random data node in the cluster. Returns true if a node was found to stop, false otherwise. */ public synchronized boolean stopRandomDataNode() throws IOException { ensureOpen(); NodeAndClient nodeAndClient = getRandomNodeAndClient(new DataNodePredicate()); if (nodeAndClient != null) { logger.info("Closing random node [{}] ", nodeAndClient.name); removeDisruptionSchemeFromNode(nodeAndClient); nodes.remove(nodeAndClient.name); nodeAndClient.close(); return true; } return false; } /** * Stops a random node in the cluster that applies to the given filter or non if the non of the nodes applies to the * filter. */ public synchronized void stopRandomNode(final Predicate<Settings> filter) throws IOException { ensureOpen(); NodeAndClient nodeAndClient = getRandomNodeAndClient(new Predicate<InternalTestCluster.NodeAndClient>() { @Override public boolean apply(NodeAndClient nodeAndClient) { return filter.apply(nodeAndClient.node.settings()); } }); if (nodeAndClient != null) { logger.info("Closing filtered random node [{}] ", nodeAndClient.name); removeDisruptionSchemeFromNode(nodeAndClient); nodes.remove(nodeAndClient.name); nodeAndClient.close(); } } /** * Stops the current master node forcefully */ public synchronized void stopCurrentMasterNode() throws IOException { ensureOpen(); assert size() > 0; String masterNodeName = getMasterName(); assert nodes.containsKey(masterNodeName); logger.info("Closing master node [{}] ", masterNodeName); removeDisruptionSchemeFromNode(nodes.get(masterNodeName)); NodeAndClient remove = nodes.remove(masterNodeName); remove.close(); } /** * Stops the any of the current nodes but not the master node. */ public void stopRandomNonMasterNode() throws IOException { NodeAndClient nodeAndClient = getRandomNodeAndClient(Predicates.not(new MasterNodePredicate(getMasterName()))); if (nodeAndClient != null) { logger.info("Closing random non master node [{}] current master [{}] ", nodeAndClient.name, getMasterName()); removeDisruptionSchemeFromNode(nodeAndClient); nodes.remove(nodeAndClient.name); nodeAndClient.close(); } } /** * Restarts a random node in the cluster */ public void restartRandomNode() throws Exception { restartRandomNode(EMPTY_CALLBACK); } /** * Restarts a random node in the cluster and calls the callback during restart. */ public void restartRandomNode(RestartCallback callback) throws Exception { restartRandomNode(Predicates.<NodeAndClient>alwaysTrue(), callback); } /** * Restarts a random data node in the cluster */ public void restartRandomDataNode() throws Exception { restartRandomNode(EMPTY_CALLBACK); } /** * Restarts a random data node in the cluster and calls the callback during restart. */ public void restartRandomDataNode(RestartCallback callback) throws Exception { restartRandomNode(new DataNodePredicate(), callback); } /** * Restarts a random node in the cluster and calls the callback during restart. */ private void restartRandomNode(Predicate<NodeAndClient> predicate, RestartCallback callback) throws Exception { ensureOpen(); NodeAndClient nodeAndClient = getRandomNodeAndClient(predicate); if (nodeAndClient != null) { logger.info("Restarting random node [{}] ", nodeAndClient.name); nodeAndClient.restart(callback); } } private void restartAllNodes(boolean rollingRestart, RestartCallback callback) throws Exception { ensureOpen(); List<NodeAndClient> toRemove = new ArrayList<>(); try { for (NodeAndClient nodeAndClient : nodes.values()) { if (!callback.doRestart(nodeAndClient.name)) { logger.info("Closing node [{}] during restart", nodeAndClient.name); toRemove.add(nodeAndClient); if (activeDisruptionScheme != null) { activeDisruptionScheme.removeFromNode(nodeAndClient.name, this); } nodeAndClient.close(); } } } finally { for (NodeAndClient nodeAndClient : toRemove) { nodes.remove(nodeAndClient.name); } } logger.info("Restarting remaining nodes rollingRestart [{}]", rollingRestart); if (rollingRestart) { int numNodesRestarted = 0; for (NodeAndClient nodeAndClient : nodes.values()) { callback.doAfterNodes(numNodesRestarted++, nodeAndClient.nodeClient()); logger.info("Restarting node [{}] ", nodeAndClient.name); if (activeDisruptionScheme != null) { activeDisruptionScheme.removeFromNode(nodeAndClient.name, this); } nodeAndClient.restart(callback); if (activeDisruptionScheme != null) { activeDisruptionScheme.applyToNode(nodeAndClient.name, this); } } } else { int numNodesRestarted = 0; for (NodeAndClient nodeAndClient : nodes.values()) { callback.doAfterNodes(numNodesRestarted++, nodeAndClient.nodeClient()); logger.info("Stopping node [{}] ", nodeAndClient.name); if (activeDisruptionScheme != null) { activeDisruptionScheme.removeFromNode(nodeAndClient.name, this); } nodeAndClient.closeNode(); } for (NodeAndClient nodeAndClient : nodes.values()) { logger.info("Starting node [{}] ", nodeAndClient.name); if (activeDisruptionScheme != null) { activeDisruptionScheme.removeFromNode(nodeAndClient.name, this); } nodeAndClient.restart(callback); if (activeDisruptionScheme != null) { activeDisruptionScheme.applyToNode(nodeAndClient.name, this); } } } } private static final RestartCallback EMPTY_CALLBACK = new RestartCallback() { public Settings onNodeStopped(String node) { return null; } }; /** * Restarts all nodes in the cluster. It first stops all nodes and then restarts all the nodes again. */ public void fullRestart() throws Exception { fullRestart(EMPTY_CALLBACK); } /** * Restarts all nodes in a rolling restart fashion ie. only restarts on node a time. */ public void rollingRestart() throws Exception { rollingRestart(EMPTY_CALLBACK); } /** * Restarts all nodes in a rolling restart fashion ie. only restarts on node a time. */ public void rollingRestart(RestartCallback function) throws Exception { restartAllNodes(true, function); } /** * Restarts all nodes in the cluster. It first stops all nodes and then restarts all the nodes again. */ public void fullRestart(RestartCallback function) throws Exception { restartAllNodes(false, function); } /** * get the name of the current master node */ public String getMasterName() { try { ClusterState state = client().admin().cluster().prepareState().execute().actionGet().getState(); return state.nodes().masterNode().name(); } catch (Throwable e) { logger.warn("Can't fetch cluster state", e); throw new RuntimeException("Can't get master node " + e.getMessage(), e); } } synchronized Set<String> allDataNodesButN(int numNodes) { return nRandomDataNodes(numDataNodes() - numNodes); } private synchronized Set<String> nRandomDataNodes(int numNodes) { assert size() >= numNodes; NavigableMap<String, NodeAndClient> dataNodes = Maps.filterEntries(nodes, new EntryNodePredicate(new DataNodePredicate())); return Sets.newHashSet(Iterators.limit(dataNodes.keySet().iterator(), numNodes)); } /** * Returns a set of nodes that have at least one shard of the given index. */ public synchronized Set<String> nodesInclude(String index) { if (clusterService().state().routingTable().hasIndex(index)) { List<ShardRouting> allShards = clusterService().state().routingTable().allShards(index); DiscoveryNodes discoveryNodes = clusterService().state().getNodes(); Set<String> nodes = new HashSet<>(); for (ShardRouting shardRouting : allShards) { if (shardRouting.assignedToNode()) { DiscoveryNode discoveryNode = discoveryNodes.get(shardRouting.currentNodeId()); nodes.add(discoveryNode.getName()); } } return nodes; } return Collections.emptySet(); } /** * Starts a node with default settings and returns it's name. */ public synchronized String startNode() { return startNode(ImmutableSettings.EMPTY, Version.CURRENT); } /** * Starts a node with default settings ad the specified version and returns it's name. */ public synchronized String startNode(Version version) { return startNode(ImmutableSettings.EMPTY, version); } /** * Starts a node with the given settings builder and returns it's name. */ public synchronized String startNode(Settings.Builder settings) { return startNode(settings.build(), Version.CURRENT); } /** * Starts a node with the given settings and returns it's name. */ public synchronized String startNode(Settings settings) { return startNode(settings, Version.CURRENT); } /** * Starts a node with the given settings and version and returns it's name. */ public synchronized String startNode(Settings settings, Version version) { NodeAndClient buildNode = buildNode(settings, version); buildNode.node().start(); publishNode(buildNode); return buildNode.name; } /** * Starts a node in an async manner with the given settings and returns future with its name. */ public synchronized ListenableFuture<String> startNodeAsync() { return startNodeAsync(ImmutableSettings.EMPTY, Version.CURRENT); } /** * Starts a node in an async manner with the given settings and returns future with its name. */ public synchronized ListenableFuture<String> startNodeAsync(final Settings settings) { return startNodeAsync(settings, Version.CURRENT); } /** * Starts a node in an async manner with the given settings and version and returns future with its name. */ public synchronized ListenableFuture<String> startNodeAsync(final Settings settings, final Version version) { final SettableFuture<String> future = SettableFuture.create(); final NodeAndClient buildNode = buildNode(settings, version); Runnable startNode = new Runnable() { @Override public void run() { try { buildNode.node().start(); publishNode(buildNode); future.set(buildNode.name); } catch (Throwable t) { future.setException(t); } } }; executor.execute(startNode); return future; } /** * Starts multiple nodes in an async manner and returns future with its name. */ public synchronized ListenableFuture<List<String>> startNodesAsync(final int numNodes) { return startNodesAsync(numNodes, ImmutableSettings.EMPTY, Version.CURRENT); } /** * Starts multiple nodes in an async manner with the given settings and returns future with its name. */ public synchronized ListenableFuture<List<String>> startNodesAsync(final int numNodes, final Settings settings) { return startNodesAsync(numNodes, settings, Version.CURRENT); } /** * Starts multiple nodes in an async manner with the given settings and version and returns future with its name. */ public synchronized ListenableFuture<List<String>> startNodesAsync(final int numNodes, final Settings settings, final Version version) { List<ListenableFuture<String>> futures = Lists.newArrayList(); for (int i = 0; i < numNodes; i++) { futures.add(startNodeAsync(settings, version)); } return Futures.allAsList(futures); } /** * Starts multiple nodes (based on the number of settings provided) in an async manner, with explicit settings for each node. * The order of the node names returned matches the order of the settings provided. */ public synchronized ListenableFuture<List<String>> startNodesAsync(final Settings... settings) { List<ListenableFuture<String>> futures = Lists.newArrayList(); for (Settings setting : settings) { futures.add(startNodeAsync(setting, Version.CURRENT)); } return Futures.allAsList(futures); } private synchronized void publishNode(NodeAndClient nodeAndClient) { assert !nodeAndClient.node().isClosed(); NodeEnvironment nodeEnv = getInstanceFromNode(NodeEnvironment.class, nodeAndClient.node); if (nodeEnv.hasNodeFile()) { dataDirToClean.addAll(Arrays.asList(nodeEnv.nodeDataPaths())); } nodes.put(nodeAndClient.name, nodeAndClient); applyDisruptionSchemeToNode(nodeAndClient); } public void closeNonSharedNodes(boolean wipeData) throws IOException { reset(wipeData); } @Override public int numDataNodes() { return dataNodeAndClients().size(); } @Override public int numDataAndMasterNodes() { return dataAndMasterNodes().size(); } @Override public int numBenchNodes() { return benchNodeAndClients().size(); } @Override public boolean hasFilterCache() { return hasFilterCache; } public void setDisruptionScheme(ServiceDisruptionScheme scheme) { clearDisruptionScheme(); scheme.applyToCluster(this); activeDisruptionScheme = scheme; } public void clearDisruptionScheme() { if (activeDisruptionScheme != null) { TimeValue expectedHealingTime = activeDisruptionScheme.expectedTimeToHeal(); logger.info("Clearing active scheme {}, expected healing time {}", activeDisruptionScheme, expectedHealingTime); activeDisruptionScheme.removeFromCluster(this); // We don't what scheme is picked, certain schemes don't partition the cluster, but process slow, so we need // to to sleep, cluster health alone doesn't verify if these schemes have been cleared. if (expectedHealingTime != null && expectedHealingTime.millis() > 0) { try { Thread.sleep(expectedHealingTime.millis()); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } assertFalse("cluster failed to form after disruption was healed", client().admin().cluster().prepareHealth() .setWaitForNodes("" + nodes.size()) .setWaitForRelocatingShards(0) .get().isTimedOut()); } activeDisruptionScheme = null; } private void applyDisruptionSchemeToNode(NodeAndClient nodeAndClient) { if (activeDisruptionScheme != null) { assert nodes.containsKey(nodeAndClient.name); activeDisruptionScheme.applyToNode(nodeAndClient.name, this); } } private void removeDisruptionSchemeFromNode(NodeAndClient nodeAndClient) { if (activeDisruptionScheme != null) { assert nodes.containsKey(nodeAndClient.name); activeDisruptionScheme.removeFromNode(nodeAndClient.name, this); } } private synchronized Collection<NodeAndClient> dataNodeAndClients() { return Collections2.filter(nodes.values(), new DataNodePredicate()); } private synchronized Collection<NodeAndClient> dataAndMasterNodes() { return Collections2.filter(nodes.values(), new DataOrMasterNodePredicate()); } private static final class DataNodePredicate implements Predicate<NodeAndClient> { @Override public boolean apply(NodeAndClient nodeAndClient) { return DiscoveryNode.dataNode(nodeAndClient.node.settings()); } } private static final class DataOrMasterNodePredicate implements Predicate<NodeAndClient> { @Override public boolean apply(NodeAndClient nodeAndClient) { return DiscoveryNode.dataNode(nodeAndClient.node.settings()) || DiscoveryNode.masterNode(nodeAndClient.node.settings()); } } private static final class MasterNodePredicate implements Predicate<NodeAndClient> { private final String masterNodeName; public MasterNodePredicate(String masterNodeName) { this.masterNodeName = masterNodeName; } @Override public boolean apply(NodeAndClient nodeAndClient) { return masterNodeName.equals(nodeAndClient.name); } } private static final class ClientNodePredicate implements Predicate<NodeAndClient> { @Override public boolean apply(NodeAndClient nodeAndClient) { return DiscoveryNode.clientNode(nodeAndClient.node.settings()); } } private synchronized Collection<NodeAndClient> benchNodeAndClients() { return Collections2.filter(nodes.values(), new BenchNodePredicate()); } private static final class BenchNodePredicate implements Predicate<NodeAndClient> { @Override public boolean apply(NodeAndClient nodeAndClient) { return nodeAndClient.node.settings().getAsBoolean("node.bench", false); } } private static final class EntryNodePredicate implements Predicate<Map.Entry<String, NodeAndClient>> { private final Predicate<NodeAndClient> delegateNodePredicate; EntryNodePredicate(Predicate<NodeAndClient> delegateNodePredicate) { this.delegateNodePredicate = delegateNodePredicate; } @Override public boolean apply(Map.Entry<String, NodeAndClient> entry) { return delegateNodePredicate.apply(entry.getValue()); } } @Override public synchronized Iterator<Client> iterator() { ensureOpen(); final Iterator<NodeAndClient> iterator = nodes.values().iterator(); return new Iterator<Client>() { @Override public boolean hasNext() { return iterator.hasNext(); } @Override public Client next() { return iterator.next().client(random); } @Override public void remove() { throw new UnsupportedOperationException(""); } }; } /** * Returns a predicate that only accepts settings of nodes with one of the given names. */ public static Predicate<Settings> nameFilter(String... nodeName) { return new NodeNamePredicate(new HashSet<>(Arrays.asList(nodeName))); } private static final class NodeNamePredicate implements Predicate<Settings> { private final HashSet<String> nodeNames; public NodeNamePredicate(HashSet<String> nodeNames) { this.nodeNames = nodeNames; } @Override public boolean apply(Settings settings) { return nodeNames.contains(settings.get("name")); } } /** * An abstract class that is called during {@link #rollingRestart(InternalTestCluster.RestartCallback)} * and / or {@link #fullRestart(InternalTestCluster.RestartCallback)} to execute actions at certain * stages of the restart. */ public static abstract class RestartCallback { /** * Executed once the give node name has been stopped. */ public Settings onNodeStopped(String nodeName) throws Exception { return ImmutableSettings.EMPTY; } /** * Executed for each node before the <tt>n+1</tt> node is restarted. The given client is * an active client to the node that will be restarted next. */ public void doAfterNodes(int n, Client client) throws Exception { } /** * If this returns <code>true</code> all data for the node with the given node name will be cleared including * gateways and all index data. Returns <code>false</code> by default. */ public boolean clearData(String nodeName) { return false; } /** * If this returns <code>false</code> the node with the given node name will not be restarted. It will be * closed and removed from the cluster. Returns <code>true</code> by default. */ public boolean doRestart(String nodeName) { return true; } } public Settings getDefaultSettings() { return defaultSettings; } @Override public void ensureEstimatedStats() { if (size() > 0) { // Checks that the breakers have been reset without incurring a // network request, because a network request can increment one // of the breakers for (NodeAndClient nodeAndClient : nodes.values()) { final String name = nodeAndClient.name; final CircuitBreakerService breakerService = getInstanceFromNode(CircuitBreakerService.class, nodeAndClient.node); CircuitBreaker fdBreaker = breakerService.getBreaker(CircuitBreaker.Name.FIELDDATA); assertThat("Fielddata breaker not reset to 0 on node: " + name, fdBreaker.getUsed(), equalTo(0L)); // Anything that uses transport or HTTP can increase the // request breaker (because they use bigarrays), because of // that the breaker can sometimes be incremented from ping // requests from other clusters because Jenkins is running // multiple ES testing jobs in parallel on the same machine. // To combat this we check whether the breaker has reached 0 // in an assertBusy loop, so it will try for 10 seconds and // fail if it never reached 0 try { assertBusy(new Runnable() { @Override public void run() { CircuitBreaker reqBreaker = breakerService.getBreaker(CircuitBreaker.Name.REQUEST); assertThat("Request breaker not reset to 0 on node: " + name, reqBreaker.getUsed(), equalTo(0L)); } }); } catch (Exception e) { fail("Exception during check for request breaker reset to 0: " + e); } NodeService nodeService = getInstanceFromNode(NodeService.class, nodeAndClient.node); NodeStats stats = nodeService.stats(CommonStatsFlags.ALL, false, false, false, false, false, false, false, false, false); assertThat("Fielddata size must be 0 on node: " + stats.getNode(), stats.getIndices().getFieldData().getMemorySizeInBytes(), equalTo(0l)); assertThat("Filter cache size must be 0 on node: " + stats.getNode(), stats.getIndices().getFilterCache().getMemorySizeInBytes(), equalTo(0l)); assertThat("FixedBitSet cache size must be 0 on node: " + stats.getNode(), stats.getIndices().getSegments().getFixedBitSetMemoryInBytes(), equalTo(0l)); } } } @Override public void assertAfterTest() throws IOException { super.assertAfterTest(); for (NodeEnvironment env : this.getInstances(NodeEnvironment.class)) { Set<ShardId> shardIds = env.lockedShards(); for (ShardId id : shardIds) { try { env.shardLock(id, TimeUnit.SECONDS.toMillis(5)).close(); } catch (IOException ex) { fail("Shard " + id + " is still locked after 5 sec waiting"); } } } } }
src/test/java/org/elasticsearch/test/InternalTestCluster.java
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.test; import com.carrotsearch.randomizedtesting.RandomizedTest; import com.carrotsearch.randomizedtesting.SeedUtils; import com.carrotsearch.randomizedtesting.generators.RandomInts; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.collect.*; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.SettableFuture; import org.apache.lucene.store.Lock; import org.apache.lucene.store.NativeFSLockFactory; import org.apache.lucene.util.AbstractRandomizedTest; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.XIOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchIllegalStateException; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.cache.recycler.CacheRecycler; import org.elasticsearch.cache.recycler.PageCacheRecyclerModule; import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider; import org.elasticsearch.common.Strings; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.network.NetworkUtils; import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.ImmutableSettings.Builder; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArraysModule; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.index.cache.filter.FilterCacheModule; import org.elasticsearch.index.cache.filter.none.NoneFilterCache; import org.elasticsearch.index.cache.filter.weighted.WeightedFilterCache; import org.elasticsearch.index.engine.IndexEngineModule; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.node.Node; import org.elasticsearch.node.internal.InternalNode; import org.elasticsearch.node.service.NodeService; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.search.SearchService; import org.elasticsearch.test.cache.recycler.MockBigArraysModule; import org.elasticsearch.test.cache.recycler.MockPageCacheRecyclerModule; import org.elasticsearch.test.disruption.ServiceDisruptionScheme; import org.elasticsearch.test.engine.MockEngineModule; import org.elasticsearch.test.store.MockFSIndexStoreModule; import org.elasticsearch.test.transport.AssertingLocalTransport; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportModule; import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.netty.NettyTransport; import org.junit.Assert; import java.io.Closeable; import java.io.File; import java.io.IOException; import java.net.InetSocketAddress; import java.nio.file.Path; import java.util.*; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import static junit.framework.Assert.fail; import static org.apache.lucene.util.LuceneTestCase.rarely; import static org.apache.lucene.util.LuceneTestCase.usually; import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder; import static org.elasticsearch.node.NodeBuilder.nodeBuilder; import static org.elasticsearch.test.ElasticsearchTestCase.assertBusy; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout; import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; /** * InternalTestCluster manages a set of JVM private nodes and allows convenient access to them. * The cluster supports randomized configuration such that nodes started in the cluster will * automatically load asserting services tracking resources like file handles or open searchers. * <p> * The Cluster is bound to a test lifecycle where tests must call {@link #beforeTest(java.util.Random, double)} and * {@link #afterTest()} to initialize and reset the cluster in order to be more reproducible. The term "more" relates * to the async nature of Elasticsearch in combination with randomized testing. Once Threads and asynchronous calls * are involved reproducibility is very limited. This class should only be used through {@link ElasticsearchIntegrationTest}. * </p> */ public final class InternalTestCluster extends TestCluster { private final ESLogger logger = Loggers.getLogger(getClass()); static SettingsSource DEFAULT_SETTINGS_SOURCE = SettingsSource.EMPTY; /** * A boolean value to enable or disable mock modules. This is useful to test the * system without asserting modules that to make sure they don't hide any bugs in * production. * * @see ElasticsearchIntegrationTest */ public static final String TESTS_ENABLE_MOCK_MODULES = "tests.enable_mock_modules"; /** * A node level setting that holds a per node random seed that is consistent across node restarts */ public static final String SETTING_CLUSTER_NODE_SEED = "test.cluster.node.seed"; private static final boolean ENABLE_MOCK_MODULES = RandomizedTest.systemPropertyAsBoolean(TESTS_ENABLE_MOCK_MODULES, true); static final int DEFAULT_MIN_NUM_DATA_NODES = 2; static final int DEFAULT_MAX_NUM_DATA_NODES = 6; static final int DEFAULT_NUM_CLIENT_NODES = -1; static final int DEFAULT_MIN_NUM_CLIENT_NODES = 0; static final int DEFAULT_MAX_NUM_CLIENT_NODES = 1; static final boolean DEFAULT_ENABLE_RANDOM_BENCH_NODES = true; static final boolean DEFAULT_ENABLE_HTTP_PIPELINING = true; public static final String NODE_MODE = nodeMode(); /* sorted map to make traverse order reproducible, concurrent since we do checks on it not within a sync block */ private final NavigableMap<String, NodeAndClient> nodes = new TreeMap<>(); private final Set<Path> dataDirToClean = new HashSet<>(); private final String clusterName; private final AtomicBoolean open = new AtomicBoolean(true); private final Settings defaultSettings; private AtomicInteger nextNodeId = new AtomicInteger(0); /* Each shared node has a node seed that is used to start up the node and get default settings * this is important if a node is randomly shut down in a test since the next test relies on a * fully shared cluster to be more reproducible */ private final long[] sharedNodesSeeds; private final int numSharedDataNodes; private final int numSharedClientNodes; private final boolean enableRandomBenchNodes; private final SettingsSource settingsSource; private final ExecutorService executor; private final boolean hasFilterCache; /** * All nodes started by the cluster will have their name set to nodePrefix followed by a positive number */ private final String nodePrefix; private ServiceDisruptionScheme activeDisruptionScheme; public InternalTestCluster(long clusterSeed, int minNumDataNodes, int maxNumDataNodes, String clusterName, int numClientNodes, boolean enableRandomBenchNodes, boolean enableHttpPipelining, int jvmOrdinal, String nodePrefix) { this(clusterSeed, minNumDataNodes, maxNumDataNodes, clusterName, DEFAULT_SETTINGS_SOURCE, numClientNodes, enableRandomBenchNodes, enableHttpPipelining, jvmOrdinal, nodePrefix); } public InternalTestCluster(long clusterSeed, int minNumDataNodes, int maxNumDataNodes, String clusterName, SettingsSource settingsSource, int numClientNodes, boolean enableRandomBenchNodes, boolean enableHttpPipelining, int jvmOrdinal, String nodePrefix) { super(clusterSeed); this.clusterName = clusterName; if (minNumDataNodes < 0 || maxNumDataNodes < 0) { throw new IllegalArgumentException("minimum and maximum number of data nodes must be >= 0"); } if (maxNumDataNodes < minNumDataNodes) { throw new IllegalArgumentException("maximum number of data nodes must be >= minimum number of data nodes"); } Random random = new Random(clusterSeed); this.numSharedDataNodes = RandomInts.randomIntBetween(random, minNumDataNodes, maxNumDataNodes); assert this.numSharedDataNodes >= 0; //for now all shared data nodes are also master eligible if (numSharedDataNodes == 0) { this.numSharedClientNodes = 0; } else { if (numClientNodes < 0) { this.numSharedClientNodes = RandomInts.randomIntBetween(random, DEFAULT_MIN_NUM_CLIENT_NODES, DEFAULT_MAX_NUM_CLIENT_NODES); } else { this.numSharedClientNodes = numClientNodes; } } assert this.numSharedClientNodes >= 0; this.enableRandomBenchNodes = enableRandomBenchNodes; this.nodePrefix = nodePrefix; assert nodePrefix != null; /* * TODO * - we might want start some master only nodes? * - we could add a flag that returns a client to the master all the time? * - we could add a flag that never returns a client to the master * - along those lines use a dedicated node that is master eligible and let all other nodes be only data nodes */ sharedNodesSeeds = new long[numSharedDataNodes + numSharedClientNodes]; for (int i = 0; i < sharedNodesSeeds.length; i++) { sharedNodesSeeds[i] = random.nextLong(); } logger.info("Setup InternalTestCluster [{}] with seed [{}] using [{}] data nodes and [{}] client nodes", clusterName, SeedUtils.formatSeed(clusterSeed), numSharedDataNodes, numSharedClientNodes); this.settingsSource = settingsSource; Builder builder = ImmutableSettings.settingsBuilder(); if (random.nextInt(5) == 0) { // sometimes set this // randomize (multi/single) data path, special case for 0, don't set it at all... final int numOfDataPaths = random.nextInt(5); if (numOfDataPaths > 0) { StringBuilder dataPath = new StringBuilder(); for (int i = 0; i < numOfDataPaths; i++) { dataPath.append(new File("data/d" + i).getAbsolutePath()).append(','); } builder.put("path.data", dataPath.toString()); } } final int basePort = 9300 + (100 * (jvmOrdinal+1)); builder.put("transport.tcp.port", basePort + "-" + (basePort+100)); builder.put("http.port", basePort+101 + "-" + (basePort+200)); builder.put("config.ignore_system_properties", true); builder.put("node.mode", NODE_MODE); builder.put("script.disable_dynamic", false); builder.put("http.pipelining", enableHttpPipelining); builder.put("plugins." + PluginsService.LOAD_PLUGIN_FROM_CLASSPATH, false); if (Strings.hasLength(System.getProperty("es.logger.level"))) { builder.put("logger.level", System.getProperty("es.logger.level")); } if (Strings.hasLength(System.getProperty("es.logger.prefix"))) { builder.put("logger.prefix", System.getProperty("es.logger.level")); } // Default the watermarks to absurdly low to prevent the tests // from failing on nodes without enough disk space builder.put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, "1b"); builder.put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, "1b"); defaultSettings = builder.build(); executor = EsExecutors.newCached(0, TimeUnit.SECONDS, EsExecutors.daemonThreadFactory("test_" + clusterName)); this.hasFilterCache = random.nextBoolean(); } public static String nodeMode() { Builder builder = ImmutableSettings.builder(); if (Strings.isEmpty(System.getProperty("es.node.mode")) && Strings.isEmpty(System.getProperty("es.node.local"))) { return "local"; // default if nothing is specified } if (Strings.hasLength(System.getProperty("es.node.mode"))) { builder.put("node.mode", System.getProperty("es.node.mode")); } if (Strings.hasLength(System.getProperty("es.node.local"))) { builder.put("node.local", System.getProperty("es.node.local")); } if (DiscoveryNode.localNode(builder.build())) { return "local"; } else { return "network"; } } @Override public String getClusterName() { return clusterName; } public String[] getNodeNames() { return nodes.keySet().toArray(Strings.EMPTY_ARRAY); } private static boolean isLocalTransportConfigured() { if ("local".equals(System.getProperty("es.node.mode", "network"))) { return true; } return Boolean.parseBoolean(System.getProperty("es.node.local", "false")); } private Settings getSettings(int nodeOrdinal, long nodeSeed, Settings others) { Builder builder = ImmutableSettings.settingsBuilder().put(defaultSettings) .put(getRandomNodeSettings(nodeSeed)) .put(FilterCacheModule.FilterCacheSettings.FILTER_CACHE_TYPE, hasFilterCache() ? WeightedFilterCache.class : NoneFilterCache.class); Settings settings = settingsSource.node(nodeOrdinal); if (settings != null) { if (settings.get(ClusterName.SETTING) != null) { throw new ElasticsearchIllegalStateException("Tests must not set a '" + ClusterName.SETTING + "' as a node setting set '" + ClusterName.SETTING + "': [" + settings.get(ClusterName.SETTING) + "]"); } builder.put(settings); } if (others != null) { builder.put(others); } builder.put(ClusterName.SETTING, clusterName); return builder.build(); } private static Settings getRandomNodeSettings(long seed) { Random random = new Random(seed); Builder builder = ImmutableSettings.settingsBuilder() /* use RAM directories in 10% of the runs */ //.put("index.store.type", random.nextInt(10) == 0 ? MockRamIndexStoreModule.class.getName() : MockFSIndexStoreModule.class.getName()) // decrease the routing schedule so new nodes will be added quickly - some random value between 30 and 80 ms .put("cluster.routing.schedule", (30 + random.nextInt(50)) + "ms") // default to non gateway .put("gateway.type", "none") .put(SETTING_CLUSTER_NODE_SEED, seed); if (ENABLE_MOCK_MODULES && usually(random)) { builder.put("index.store.type", MockFSIndexStoreModule.class.getName()); // no RAM dir for now! builder.put(IndexEngineModule.EngineSettings.ENGINE_TYPE, MockEngineModule.class.getName()); builder.put(PageCacheRecyclerModule.CACHE_IMPL, MockPageCacheRecyclerModule.class.getName()); builder.put(BigArraysModule.IMPL, MockBigArraysModule.class.getName()); builder.put(TransportModule.TRANSPORT_SERVICE_TYPE_KEY, MockTransportService.class.getName()); } if (isLocalTransportConfigured()) { builder.put(TransportModule.TRANSPORT_TYPE_KEY, AssertingLocalTransport.class.getName()); } else { builder.put(Transport.TransportSettings.TRANSPORT_TCP_COMPRESS, rarely(random)); } builder.put("type", RandomPicks.randomFrom(random, CacheRecycler.Type.values())); if (random.nextBoolean()) { builder.put("cache.recycler.page.type", RandomPicks.randomFrom(random, CacheRecycler.Type.values())); } if (random.nextInt(10) == 0) { // 10% of the nodes have a very frequent check interval builder.put(SearchService.KEEPALIVE_INTERVAL_KEY, TimeValue.timeValueMillis(10 + random.nextInt(2000))); } else if (random.nextInt(10) != 0) { // 90% of the time - 10% of the time we don't set anything builder.put(SearchService.KEEPALIVE_INTERVAL_KEY, TimeValue.timeValueSeconds(10 + random.nextInt(5 * 60))); } if (random.nextBoolean()) { // sometimes set a builder.put(SearchService.DEFAUTL_KEEPALIVE_KEY, TimeValue.timeValueSeconds(100 + random.nextInt(5 * 60))); } if (random.nextBoolean()) { // change threadpool types to make sure we don't have components that rely on the type of thread pools for (String name : Arrays.asList(ThreadPool.Names.BULK, ThreadPool.Names.FLUSH, ThreadPool.Names.GET, ThreadPool.Names.INDEX, ThreadPool.Names.MANAGEMENT, ThreadPool.Names.MERGE, ThreadPool.Names.OPTIMIZE, ThreadPool.Names.PERCOLATE, ThreadPool.Names.REFRESH, ThreadPool.Names.SEARCH, ThreadPool.Names.SNAPSHOT, ThreadPool.Names.SUGGEST, ThreadPool.Names.WARMER)) { if (random.nextBoolean()) { final String type = RandomPicks.randomFrom(random, Arrays.asList("fixed", "cached", "scaling")); builder.put(ThreadPool.THREADPOOL_GROUP + name + ".type", type); } } } if (random.nextInt(10) == 0) { builder.put(EsExecutors.PROCESSORS, 1 + random.nextInt(AbstractRandomizedTest.TESTS_PROCESSORS)); } else { builder.put(EsExecutors.PROCESSORS, AbstractRandomizedTest.TESTS_PROCESSORS); } if (random.nextBoolean()) { if (random.nextBoolean()) { builder.put("indices.fielddata.cache.size", 1 + random.nextInt(1000), ByteSizeUnit.MB); } if (random.nextBoolean()) { builder.put("indices.fielddata.cache.expire", TimeValue.timeValueMillis(1 + random.nextInt(10000))); } } // randomize netty settings if (random.nextBoolean()) { builder.put(NettyTransport.WORKER_COUNT, random.nextInt(3) + 1); builder.put(NettyTransport.CONNECTIONS_PER_NODE_RECOVERY, random.nextInt(2) + 1); builder.put(NettyTransport.CONNECTIONS_PER_NODE_BULK, random.nextInt(3) + 1); builder.put(NettyTransport.CONNECTIONS_PER_NODE_REG, random.nextInt(6) + 1); } if (random.nextBoolean()) { builder.put(MappingUpdatedAction.INDICES_MAPPING_ADDITIONAL_MAPPING_CHANGE_TIME, RandomInts.randomIntBetween(random, 0, 500) /*milliseconds*/); } if (random.nextInt(10) == 0) { builder.put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_TYPE_SETTING, "noop"); builder.put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING, "noop"); } return builder.build(); } public static String clusterName(String prefix, String childVMId, long clusterSeed) { StringBuilder builder = new StringBuilder(prefix); builder.append('-').append(NetworkUtils.getLocalHostName("__default_host__")); builder.append("-CHILD_VM=[").append(childVMId).append(']'); builder.append("-CLUSTER_SEED=[").append(clusterSeed).append(']'); // if multiple maven task run on a single host we better have an identifier that doesn't rely on input params builder.append("-HASH=[").append(SeedUtils.formatSeed(System.nanoTime())).append(']'); return builder.toString(); } private void ensureOpen() { if (!open.get()) { throw new RuntimeException("Cluster is already closed"); } } private synchronized NodeAndClient getOrBuildRandomNode() { ensureOpen(); NodeAndClient randomNodeAndClient = getRandomNodeAndClient(); if (randomNodeAndClient != null) { return randomNodeAndClient; } NodeAndClient buildNode = buildNode(); buildNode.node().start(); publishNode(buildNode); return buildNode; } private synchronized NodeAndClient getRandomNodeAndClient() { Predicate<NodeAndClient> all = Predicates.alwaysTrue(); return getRandomNodeAndClient(all); } private synchronized NodeAndClient getRandomNodeAndClient(Predicate<NodeAndClient> predicate) { ensureOpen(); Collection<NodeAndClient> values = Collections2.filter(nodes.values(), predicate); if (!values.isEmpty()) { int whichOne = random.nextInt(values.size()); for (NodeAndClient nodeAndClient : values) { if (whichOne-- == 0) { return nodeAndClient; } } } return null; } /** * Ensures that at least <code>n</code> data nodes are present in the cluster. * if more nodes than <code>n</code> are present this method will not * stop any of the running nodes. */ public void ensureAtLeastNumDataNodes(int n) { List<ListenableFuture<String>> futures = Lists.newArrayList(); synchronized (this) { int size = numDataNodes(); for (int i = size; i < n; i++) { logger.info("increasing cluster size from {} to {}", size, n); futures.add(startNodeAsync()); } } try { Futures.allAsList(futures).get(); } catch (Exception e) { throw new ElasticsearchException("failed to start nodes", e); } if (!futures.isEmpty()) { synchronized (this) { assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForNodes(Integer.toString(nodes.size())).get()); } } } /** * Ensures that at most <code>n</code> are up and running. * If less nodes that <code>n</code> are running this method * will not start any additional nodes. */ public synchronized void ensureAtMostNumDataNodes(int n) throws IOException { int size = numDataNodes(); if (size <= n) { return; } // prevent killing the master if possible and client nodes final Iterator<NodeAndClient> values = n == 0 ? nodes.values().iterator() : Iterators.filter(nodes.values().iterator(), Predicates.and(new DataNodePredicate(), Predicates.not(new MasterNodePredicate(getMasterName())))); final Iterator<NodeAndClient> limit = Iterators.limit(values, size - n); logger.info("changing cluster size from {} to {}, {} data nodes", size(), n + numSharedClientNodes, n); Set<NodeAndClient> nodesToRemove = new HashSet<>(); while (limit.hasNext()) { NodeAndClient next = limit.next(); nodesToRemove.add(next); removeDisruptionSchemeFromNode(next); next.close(); } for (NodeAndClient toRemove : nodesToRemove) { nodes.remove(toRemove.name); } if (!nodesToRemove.isEmpty() && size() > 0) { assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForNodes(Integer.toString(nodes.size())).get()); } } private NodeAndClient buildNode(Settings settings, Version version) { int ord = nextNodeId.getAndIncrement(); return buildNode(ord, random.nextLong(), settings, version); } private NodeAndClient buildNode() { int ord = nextNodeId.getAndIncrement(); return buildNode(ord, random.nextLong(), null, Version.CURRENT); } private NodeAndClient buildNode(int nodeId, long seed, Settings settings, Version version) { assert Thread.holdsLock(this); ensureOpen(); settings = getSettings(nodeId, seed, settings); String name = buildNodeName(nodeId); assert !nodes.containsKey(name); Settings finalSettings = settingsBuilder() .put(settings) .put("name", name) .put("discovery.id.seed", seed) .put("tests.mock.version", version) .build(); Node node = nodeBuilder().settings(finalSettings).build(); return new NodeAndClient(name, node); } private String buildNodeName(int id) { return nodePrefix + id; } /** * Returns the common node name prefix for this test cluster. */ public String nodePrefix() { return nodePrefix; } @Override public synchronized Client client() { ensureOpen(); /* Randomly return a client to one of the nodes in the cluster */ return getOrBuildRandomNode().client(random); } /** * Returns a node client to a data node in the cluster. * Note: use this with care tests should not rely on a certain nodes client. */ public synchronized Client dataNodeClient() { ensureOpen(); /* Randomly return a client to one of the nodes in the cluster */ return getRandomNodeAndClient(new DataNodePredicate()).client(random); } /** * Returns a node client to the current master node. * Note: use this with care tests should not rely on a certain nodes client. */ public synchronized Client masterClient() { ensureOpen(); NodeAndClient randomNodeAndClient = getRandomNodeAndClient(new MasterNodePredicate(getMasterName())); if (randomNodeAndClient != null) { return randomNodeAndClient.nodeClient(); // ensure node client master is requested } Assert.fail("No master client found"); return null; // can't happen } /** * Returns a node client to random node but not the master. This method will fail if no non-master client is available. */ public synchronized Client nonMasterClient() { ensureOpen(); NodeAndClient randomNodeAndClient = getRandomNodeAndClient(Predicates.not(new MasterNodePredicate(getMasterName()))); if (randomNodeAndClient != null) { return randomNodeAndClient.nodeClient(); // ensure node client non-master is requested } Assert.fail("No non-master client found"); return null; // can't happen } /** * Returns a client to a node started with "node.client: true" */ public synchronized Client clientNodeClient() { ensureOpen(); NodeAndClient randomNodeAndClient = getRandomNodeAndClient(new ClientNodePredicate()); if (randomNodeAndClient != null) { return randomNodeAndClient.client(random); } int nodeId = nextNodeId.getAndIncrement(); Settings settings = getSettings(nodeId, random.nextLong(), ImmutableSettings.EMPTY); startNodeClient(settings); return getRandomNodeAndClient(new ClientNodePredicate()).client(random); } public synchronized Client startNodeClient(Settings settings) { ensureOpen(); // currently unused Builder builder = settingsBuilder().put(settings).put("node.client", true); if (size() == 0) { // if we are the first node - don't wait for a state builder.put("discovery.initial_state_timeout", 0); } String name = startNode(builder); return nodes.get(name).nodeClient(); } /** * Returns a transport client */ public synchronized Client transportClient() { ensureOpen(); // randomly return a transport client going to one of the nodes in the cluster return getOrBuildRandomNode().transportClient(); } /** * Returns a node client to a given node. */ public synchronized Client client(String nodeName) { ensureOpen(); NodeAndClient nodeAndClient = nodes.get(nodeName); if (nodeAndClient != null) { return nodeAndClient.client(random); } Assert.fail("No node found with name: [" + nodeName + "]"); return null; // can't happen } /** * Returns a "smart" node client to a random node in the cluster */ public synchronized Client smartClient() { NodeAndClient randomNodeAndClient = getRandomNodeAndClient(); if (randomNodeAndClient != null) { return randomNodeAndClient.nodeClient(); } Assert.fail("No smart client found"); return null; // can't happen } /** * Returns a random node that applies to the given predicate. * The predicate can filter nodes based on the nodes settings. * If all nodes are filtered out this method will return <code>null</code> */ public synchronized Client client(final Predicate<Settings> filterPredicate) { ensureOpen(); final NodeAndClient randomNodeAndClient = getRandomNodeAndClient(new Predicate<NodeAndClient>() { @Override public boolean apply(NodeAndClient nodeAndClient) { return filterPredicate.apply(nodeAndClient.node.settings()); } }); if (randomNodeAndClient != null) { return randomNodeAndClient.client(random); } return null; } @Override public void close() { if (this.open.compareAndSet(true, false)) { if (activeDisruptionScheme != null) { activeDisruptionScheme.testClusterClosed(); activeDisruptionScheme = null; } IOUtils.closeWhileHandlingException(nodes.values()); nodes.clear(); executor.shutdownNow(); } } private final class NodeAndClient implements Closeable { private InternalNode node; private Client nodeClient; private Client transportClient; private final AtomicBoolean closed = new AtomicBoolean(false); private final String name; NodeAndClient(String name, Node node) { this.node = (InternalNode) node; this.name = name; } Node node() { if (closed.get()) { throw new RuntimeException("already closed"); } return node; } Client client(Random random) { if (closed.get()) { throw new RuntimeException("already closed"); } double nextDouble = random.nextDouble(); if (nextDouble < transportClientRatio) { if (logger.isTraceEnabled()) { logger.trace("Using transport client for node [{}] sniff: [{}]", node.settings().get("name"), false); } return getOrBuildTransportClient(); } else { return getOrBuildNodeClient(); } } Client nodeClient() { if (closed.get()) { throw new RuntimeException("already closed"); } return getOrBuildNodeClient(); } Client transportClient() { if (closed.get()) { throw new RuntimeException("already closed"); } return getOrBuildTransportClient(); } private Client getOrBuildNodeClient() { if (nodeClient != null) { return nodeClient; } return nodeClient = node.client(); } private Client getOrBuildTransportClient() { if (transportClient != null) { return transportClient; } /* no sniff client for now - doesn't work will all tests since it might throw NoNodeAvailableException if nodes are shut down. * we first need support of transportClientRatio as annotations or so */ return transportClient = TransportClientFactory.noSniff(settingsSource.transportClient()).client(node, clusterName); } void resetClient() throws IOException { if (closed.get()) { throw new RuntimeException("already closed"); } Releasables.close(nodeClient, transportClient); nodeClient = null; transportClient = null; } void closeNode() { registerDataPath(); node.close(); } void restart(RestartCallback callback) throws Exception { assert callback != null; resetClient(); if (!node.isClosed()) { closeNode(); } Settings newSettings = callback.onNodeStopped(name); if (newSettings == null) { newSettings = ImmutableSettings.EMPTY; } if (callback.clearData(name)) { NodeEnvironment nodeEnv = getInstanceFromNode(NodeEnvironment.class, node); if (nodeEnv.hasNodeFile()) { XIOUtils.rm(nodeEnv.nodeDataPaths()); } } node = (InternalNode) nodeBuilder().settings(node.settings()).settings(newSettings).node(); } void registerDataPath() { NodeEnvironment nodeEnv = getInstanceFromNode(NodeEnvironment.class, node); if (nodeEnv.hasNodeFile()) { dataDirToClean.addAll(Arrays.asList(nodeEnv.nodeDataPaths())); } } @Override public void close() throws IOException { resetClient(); closed.set(true); closeNode(); } } public static final String TRANSPORT_CLIENT_PREFIX = "transport_client_"; static class TransportClientFactory { private static TransportClientFactory NO_SNIFF_CLIENT_FACTORY = new TransportClientFactory(false, ImmutableSettings.EMPTY); private static TransportClientFactory SNIFF_CLIENT_FACTORY = new TransportClientFactory(true, ImmutableSettings.EMPTY); private final boolean sniff; private final Settings settings; public static TransportClientFactory noSniff(Settings settings) { if (settings == null || settings.names().isEmpty()) { return NO_SNIFF_CLIENT_FACTORY; } return new TransportClientFactory(false, settings); } public static TransportClientFactory sniff(Settings settings) { if (settings == null || settings.names().isEmpty()) { return SNIFF_CLIENT_FACTORY; } return new TransportClientFactory(true, settings); } TransportClientFactory(boolean sniff, Settings settings) { this.sniff = sniff; this.settings = settings != null ? settings : ImmutableSettings.EMPTY; } public Client client(Node node, String clusterName) { TransportAddress addr = ((InternalNode) node).injector().getInstance(TransportService.class).boundAddress().publishAddress(); Settings nodeSettings = node.settings(); Builder builder = settingsBuilder() .put("client.transport.nodes_sampler_interval", "1s") .put("name", TRANSPORT_CLIENT_PREFIX + node.settings().get("name")) .put("plugins." + PluginsService.LOAD_PLUGIN_FROM_CLASSPATH, false) .put(ClusterName.SETTING, clusterName).put("client.transport.sniff", sniff) .put("node.mode", nodeSettings.get("node.mode", NODE_MODE)) .put("node.local", nodeSettings.get("node.local", "")) .put("logger.prefix", nodeSettings.get("logger.prefix", "")) .put("logger.level", nodeSettings.get("logger.level", "INFO")) .put("config.ignore_system_properties", true) .put(settings); TransportClient client = new TransportClient(builder.build()); client.addTransportAddress(addr); return client; } } @Override public synchronized void beforeTest(Random random, double transportClientRatio) throws IOException { super.beforeTest(random, transportClientRatio); reset(true); } private synchronized void reset(boolean wipeData) throws IOException { // clear all rules for mock transport services for (NodeAndClient nodeAndClient : nodes.values()) { TransportService transportService = nodeAndClient.node.injector().getInstance(TransportService.class); if (transportService instanceof MockTransportService) { ((MockTransportService) transportService).clearAllRules(); } } randomlyResetClients(); if (wipeData) { wipeDataDirectories(); } if (nextNodeId.get() == sharedNodesSeeds.length && nodes.size() == sharedNodesSeeds.length) { logger.debug("Cluster hasn't changed - moving out - nodes: [{}] nextNodeId: [{}] numSharedNodes: [{}]", nodes.keySet(), nextNodeId.get(), sharedNodesSeeds.length); return; } logger.debug("Cluster is NOT consistent - restarting shared nodes - nodes: [{}] nextNodeId: [{}] numSharedNodes: [{}]", nodes.keySet(), nextNodeId.get(), sharedNodesSeeds.length); Set<NodeAndClient> sharedNodes = new HashSet<>(); assert sharedNodesSeeds.length == numSharedDataNodes + numSharedClientNodes; boolean changed = false; for (int i = 0; i < numSharedDataNodes; i++) { String buildNodeName = buildNodeName(i); NodeAndClient nodeAndClient = nodes.get(buildNodeName); if (nodeAndClient == null) { changed = true; nodeAndClient = buildNode(i, sharedNodesSeeds[i], null, Version.CURRENT); nodeAndClient.node.start(); logger.info("Start Shared Node [{}] not shared", nodeAndClient.name); } sharedNodes.add(nodeAndClient); } for (int i = numSharedDataNodes; i < numSharedDataNodes + numSharedClientNodes; i++) { String buildNodeName = buildNodeName(i); NodeAndClient nodeAndClient = nodes.get(buildNodeName); if (nodeAndClient == null) { changed = true; Builder clientSettingsBuilder = ImmutableSettings.builder().put("node.client", true); if (enableRandomBenchNodes && usually(random)) { //client nodes might also be bench nodes clientSettingsBuilder.put("node.bench", true); } nodeAndClient = buildNode(i, sharedNodesSeeds[i], clientSettingsBuilder.build(), Version.CURRENT); nodeAndClient.node.start(); logger.info("Start Shared Node [{}] not shared", nodeAndClient.name); } sharedNodes.add(nodeAndClient); } if (!changed && sharedNodes.size() == nodes.size()) { logger.debug("Cluster is consistent - moving out - nodes: [{}] nextNodeId: [{}] numSharedNodes: [{}]", nodes.keySet(), nextNodeId.get(), sharedNodesSeeds.length); if (size() > 0) { client().admin().cluster().prepareHealth().setWaitForNodes(Integer.toString(sharedNodesSeeds.length)).get(); } return; // we are consistent - return } for (NodeAndClient nodeAndClient : sharedNodes) { nodes.remove(nodeAndClient.name); } // trash the remaining nodes final Collection<NodeAndClient> toShutDown = nodes.values(); for (NodeAndClient nodeAndClient : toShutDown) { logger.debug("Close Node [{}] not shared", nodeAndClient.name); nodeAndClient.close(); } nodes.clear(); for (NodeAndClient nodeAndClient : sharedNodes) { publishNode(nodeAndClient); } nextNodeId.set(sharedNodesSeeds.length); assert size() == sharedNodesSeeds.length; if (size() > 0) { client().admin().cluster().prepareHealth().setWaitForNodes(Integer.toString(sharedNodesSeeds.length)).get(); } logger.debug("Cluster is consistent again - nodes: [{}] nextNodeId: [{}] numSharedNodes: [{}]", nodes.keySet(), nextNodeId.get(), sharedNodesSeeds.length); } @Override public synchronized void afterTest() throws IOException { wipeDataDirectories(); randomlyResetClients(); /* reset all clients - each test gets its own client based on the Random instance created above. */ } private void randomlyResetClients() throws IOException { // only reset the clients on nightly tests, it causes heavy load... if (RandomizedTest.isNightly() && rarely(random)) { final Collection<NodeAndClient> nodesAndClients = nodes.values(); for (NodeAndClient nodeAndClient : nodesAndClients) { nodeAndClient.resetClient(); } } } private void wipeDataDirectories() { if (!dataDirToClean.isEmpty()) { try { for (Path path : dataDirToClean) { try { FileSystemUtils.deleteSubDirectories(path); logger.info("Successfully wiped data directory for node location: {}", path); } catch (IOException e) { logger.info("Failed to wipe data directory for node location: {}", path); } } } finally { dataDirToClean.clear(); } } } /** * Returns a reference to a random nodes {@link ClusterService} */ public synchronized ClusterService clusterService() { return getInstance(ClusterService.class); } /** * Returns an Iterable to all instances for the given class &gt;T&lt; across all nodes in the cluster. */ public synchronized <T> Iterable<T> getInstances(Class<T> clazz) { List<T> instances = new ArrayList<>(nodes.size()); for (NodeAndClient nodeAndClient : nodes.values()) { instances.add(getInstanceFromNode(clazz, nodeAndClient.node)); } return instances; } /** * Returns an Iterable to all instances for the given class &gt;T&lt; across all data nodes in the cluster. */ public synchronized <T> Iterable<T> getDataNodeInstances(Class<T> clazz) { return getInstances(clazz, new DataNodePredicate()); } private synchronized <T> Iterable<T> getInstances(Class<T> clazz, Predicate<NodeAndClient> predicate) { Iterable<NodeAndClient> filteredNodes = Iterables.filter(nodes.values(), predicate); List<T> instances = new ArrayList<>(); for (NodeAndClient nodeAndClient : filteredNodes) { instances.add(getInstanceFromNode(clazz, nodeAndClient.node)); } return instances; } /** * Returns a reference to the given nodes instances of the given class &gt;T&lt; */ public synchronized <T> T getInstance(Class<T> clazz, final String node) { final Predicate<InternalTestCluster.NodeAndClient> predicate; if (node != null) { predicate = new Predicate<InternalTestCluster.NodeAndClient>() { public boolean apply(NodeAndClient nodeAndClient) { return node.equals(nodeAndClient.name); } }; } else { predicate = Predicates.alwaysTrue(); } return getInstance(clazz, predicate); } public synchronized <T> T getDataNodeInstance(Class<T> clazz) { return getInstance(clazz, new DataNodePredicate()); } private synchronized <T> T getInstance(Class<T> clazz, Predicate<NodeAndClient> predicate) { NodeAndClient randomNodeAndClient = getRandomNodeAndClient(predicate); assert randomNodeAndClient != null; return getInstanceFromNode(clazz, randomNodeAndClient.node); } /** * Returns a reference to a random nodes instances of the given class &gt;T&lt; */ public synchronized <T> T getInstance(Class<T> clazz) { return getInstance(clazz, Predicates.<NodeAndClient>alwaysTrue()); } private synchronized <T> T getInstanceFromNode(Class<T> clazz, InternalNode node) { return node.injector().getInstance(clazz); } @Override public synchronized int size() { return this.nodes.size(); } @Override public InetSocketAddress[] httpAddresses() { List<InetSocketAddress> addresses = Lists.newArrayList(); for (HttpServerTransport httpServerTransport : getInstances(HttpServerTransport.class)) { addresses.add(((InetSocketTransportAddress) httpServerTransport.boundAddress().publishAddress()).address()); } return addresses.toArray(new InetSocketAddress[addresses.size()]); } /** * Stops a random data node in the cluster. Returns true if a node was found to stop, false otherwise. */ public synchronized boolean stopRandomDataNode() throws IOException { ensureOpen(); NodeAndClient nodeAndClient = getRandomNodeAndClient(new DataNodePredicate()); if (nodeAndClient != null) { logger.info("Closing random node [{}] ", nodeAndClient.name); removeDisruptionSchemeFromNode(nodeAndClient); nodes.remove(nodeAndClient.name); nodeAndClient.close(); return true; } return false; } /** * Stops a random node in the cluster that applies to the given filter or non if the non of the nodes applies to the * filter. */ public synchronized void stopRandomNode(final Predicate<Settings> filter) throws IOException { ensureOpen(); NodeAndClient nodeAndClient = getRandomNodeAndClient(new Predicate<InternalTestCluster.NodeAndClient>() { @Override public boolean apply(NodeAndClient nodeAndClient) { return filter.apply(nodeAndClient.node.settings()); } }); if (nodeAndClient != null) { logger.info("Closing filtered random node [{}] ", nodeAndClient.name); removeDisruptionSchemeFromNode(nodeAndClient); nodes.remove(nodeAndClient.name); nodeAndClient.close(); } } /** * Stops the current master node forcefully */ public synchronized void stopCurrentMasterNode() throws IOException { ensureOpen(); assert size() > 0; String masterNodeName = getMasterName(); assert nodes.containsKey(masterNodeName); logger.info("Closing master node [{}] ", masterNodeName); removeDisruptionSchemeFromNode(nodes.get(masterNodeName)); NodeAndClient remove = nodes.remove(masterNodeName); remove.close(); } /** * Stops the any of the current nodes but not the master node. */ public void stopRandomNonMasterNode() throws IOException { NodeAndClient nodeAndClient = getRandomNodeAndClient(Predicates.not(new MasterNodePredicate(getMasterName()))); if (nodeAndClient != null) { logger.info("Closing random non master node [{}] current master [{}] ", nodeAndClient.name, getMasterName()); removeDisruptionSchemeFromNode(nodeAndClient); nodes.remove(nodeAndClient.name); nodeAndClient.close(); } } /** * Restarts a random node in the cluster */ public void restartRandomNode() throws Exception { restartRandomNode(EMPTY_CALLBACK); } /** * Restarts a random node in the cluster and calls the callback during restart. */ public void restartRandomNode(RestartCallback callback) throws Exception { restartRandomNode(Predicates.<NodeAndClient>alwaysTrue(), callback); } /** * Restarts a random data node in the cluster */ public void restartRandomDataNode() throws Exception { restartRandomNode(EMPTY_CALLBACK); } /** * Restarts a random data node in the cluster and calls the callback during restart. */ public void restartRandomDataNode(RestartCallback callback) throws Exception { restartRandomNode(new DataNodePredicate(), callback); } /** * Restarts a random node in the cluster and calls the callback during restart. */ private void restartRandomNode(Predicate<NodeAndClient> predicate, RestartCallback callback) throws Exception { ensureOpen(); NodeAndClient nodeAndClient = getRandomNodeAndClient(predicate); if (nodeAndClient != null) { logger.info("Restarting random node [{}] ", nodeAndClient.name); nodeAndClient.restart(callback); } } private void restartAllNodes(boolean rollingRestart, RestartCallback callback) throws Exception { ensureOpen(); List<NodeAndClient> toRemove = new ArrayList<>(); try { for (NodeAndClient nodeAndClient : nodes.values()) { if (!callback.doRestart(nodeAndClient.name)) { logger.info("Closing node [{}] during restart", nodeAndClient.name); toRemove.add(nodeAndClient); if (activeDisruptionScheme != null) { activeDisruptionScheme.removeFromNode(nodeAndClient.name, this); } nodeAndClient.close(); } } } finally { for (NodeAndClient nodeAndClient : toRemove) { nodes.remove(nodeAndClient.name); } } logger.info("Restarting remaining nodes rollingRestart [{}]", rollingRestart); if (rollingRestart) { int numNodesRestarted = 0; for (NodeAndClient nodeAndClient : nodes.values()) { callback.doAfterNodes(numNodesRestarted++, nodeAndClient.nodeClient()); logger.info("Restarting node [{}] ", nodeAndClient.name); if (activeDisruptionScheme != null) { activeDisruptionScheme.removeFromNode(nodeAndClient.name, this); } nodeAndClient.restart(callback); if (activeDisruptionScheme != null) { activeDisruptionScheme.applyToNode(nodeAndClient.name, this); } } } else { int numNodesRestarted = 0; for (NodeAndClient nodeAndClient : nodes.values()) { callback.doAfterNodes(numNodesRestarted++, nodeAndClient.nodeClient()); logger.info("Stopping node [{}] ", nodeAndClient.name); if (activeDisruptionScheme != null) { activeDisruptionScheme.removeFromNode(nodeAndClient.name, this); } nodeAndClient.closeNode(); } for (NodeAndClient nodeAndClient : nodes.values()) { logger.info("Starting node [{}] ", nodeAndClient.name); if (activeDisruptionScheme != null) { activeDisruptionScheme.removeFromNode(nodeAndClient.name, this); } nodeAndClient.restart(callback); if (activeDisruptionScheme != null) { activeDisruptionScheme.applyToNode(nodeAndClient.name, this); } } } } private static final RestartCallback EMPTY_CALLBACK = new RestartCallback() { public Settings onNodeStopped(String node) { return null; } }; /** * Restarts all nodes in the cluster. It first stops all nodes and then restarts all the nodes again. */ public void fullRestart() throws Exception { fullRestart(EMPTY_CALLBACK); } /** * Restarts all nodes in a rolling restart fashion ie. only restarts on node a time. */ public void rollingRestart() throws Exception { rollingRestart(EMPTY_CALLBACK); } /** * Restarts all nodes in a rolling restart fashion ie. only restarts on node a time. */ public void rollingRestart(RestartCallback function) throws Exception { restartAllNodes(true, function); } /** * Restarts all nodes in the cluster. It first stops all nodes and then restarts all the nodes again. */ public void fullRestart(RestartCallback function) throws Exception { restartAllNodes(false, function); } /** * get the name of the current master node */ public String getMasterName() { try { ClusterState state = client().admin().cluster().prepareState().execute().actionGet().getState(); return state.nodes().masterNode().name(); } catch (Throwable e) { logger.warn("Can't fetch cluster state", e); throw new RuntimeException("Can't get master node " + e.getMessage(), e); } } synchronized Set<String> allDataNodesButN(int numNodes) { return nRandomDataNodes(numDataNodes() - numNodes); } private synchronized Set<String> nRandomDataNodes(int numNodes) { assert size() >= numNodes; NavigableMap<String, NodeAndClient> dataNodes = Maps.filterEntries(nodes, new EntryNodePredicate(new DataNodePredicate())); return Sets.newHashSet(Iterators.limit(dataNodes.keySet().iterator(), numNodes)); } /** * Returns a set of nodes that have at least one shard of the given index. */ public synchronized Set<String> nodesInclude(String index) { if (clusterService().state().routingTable().hasIndex(index)) { List<ShardRouting> allShards = clusterService().state().routingTable().allShards(index); DiscoveryNodes discoveryNodes = clusterService().state().getNodes(); Set<String> nodes = new HashSet<>(); for (ShardRouting shardRouting : allShards) { if (shardRouting.assignedToNode()) { DiscoveryNode discoveryNode = discoveryNodes.get(shardRouting.currentNodeId()); nodes.add(discoveryNode.getName()); } } return nodes; } return Collections.emptySet(); } /** * Starts a node with default settings and returns it's name. */ public synchronized String startNode() { return startNode(ImmutableSettings.EMPTY, Version.CURRENT); } /** * Starts a node with default settings ad the specified version and returns it's name. */ public synchronized String startNode(Version version) { return startNode(ImmutableSettings.EMPTY, version); } /** * Starts a node with the given settings builder and returns it's name. */ public synchronized String startNode(Settings.Builder settings) { return startNode(settings.build(), Version.CURRENT); } /** * Starts a node with the given settings and returns it's name. */ public synchronized String startNode(Settings settings) { return startNode(settings, Version.CURRENT); } /** * Starts a node with the given settings and version and returns it's name. */ public synchronized String startNode(Settings settings, Version version) { NodeAndClient buildNode = buildNode(settings, version); buildNode.node().start(); publishNode(buildNode); return buildNode.name; } /** * Starts a node in an async manner with the given settings and returns future with its name. */ public synchronized ListenableFuture<String> startNodeAsync() { return startNodeAsync(ImmutableSettings.EMPTY, Version.CURRENT); } /** * Starts a node in an async manner with the given settings and returns future with its name. */ public synchronized ListenableFuture<String> startNodeAsync(final Settings settings) { return startNodeAsync(settings, Version.CURRENT); } /** * Starts a node in an async manner with the given settings and version and returns future with its name. */ public synchronized ListenableFuture<String> startNodeAsync(final Settings settings, final Version version) { final SettableFuture<String> future = SettableFuture.create(); final NodeAndClient buildNode = buildNode(settings, version); Runnable startNode = new Runnable() { @Override public void run() { try { buildNode.node().start(); publishNode(buildNode); future.set(buildNode.name); } catch (Throwable t) { future.setException(t); } } }; executor.execute(startNode); return future; } /** * Starts multiple nodes in an async manner and returns future with its name. */ public synchronized ListenableFuture<List<String>> startNodesAsync(final int numNodes) { return startNodesAsync(numNodes, ImmutableSettings.EMPTY, Version.CURRENT); } /** * Starts multiple nodes in an async manner with the given settings and returns future with its name. */ public synchronized ListenableFuture<List<String>> startNodesAsync(final int numNodes, final Settings settings) { return startNodesAsync(numNodes, settings, Version.CURRENT); } /** * Starts multiple nodes in an async manner with the given settings and version and returns future with its name. */ public synchronized ListenableFuture<List<String>> startNodesAsync(final int numNodes, final Settings settings, final Version version) { List<ListenableFuture<String>> futures = Lists.newArrayList(); for (int i = 0; i < numNodes; i++) { futures.add(startNodeAsync(settings, version)); } return Futures.allAsList(futures); } /** * Starts multiple nodes (based on the number of settings provided) in an async manner, with explicit settings for each node. * The order of the node names returned matches the order of the settings provided. */ public synchronized ListenableFuture<List<String>> startNodesAsync(final Settings... settings) { List<ListenableFuture<String>> futures = Lists.newArrayList(); for (Settings setting : settings) { futures.add(startNodeAsync(setting, Version.CURRENT)); } return Futures.allAsList(futures); } private synchronized void publishNode(NodeAndClient nodeAndClient) { assert !nodeAndClient.node().isClosed(); NodeEnvironment nodeEnv = getInstanceFromNode(NodeEnvironment.class, nodeAndClient.node); if (nodeEnv.hasNodeFile()) { dataDirToClean.addAll(Arrays.asList(nodeEnv.nodeDataPaths())); } nodes.put(nodeAndClient.name, nodeAndClient); applyDisruptionSchemeToNode(nodeAndClient); } public void closeNonSharedNodes(boolean wipeData) throws IOException { reset(wipeData); } @Override public int numDataNodes() { return dataNodeAndClients().size(); } @Override public int numDataAndMasterNodes() { return dataAndMasterNodes().size(); } @Override public int numBenchNodes() { return benchNodeAndClients().size(); } @Override public boolean hasFilterCache() { return hasFilterCache; } public void setDisruptionScheme(ServiceDisruptionScheme scheme) { clearDisruptionScheme(); scheme.applyToCluster(this); activeDisruptionScheme = scheme; } public void clearDisruptionScheme() { if (activeDisruptionScheme != null) { TimeValue expectedHealingTime = activeDisruptionScheme.expectedTimeToHeal(); logger.info("Clearing active scheme {}, expected healing time {}", activeDisruptionScheme, expectedHealingTime); activeDisruptionScheme.removeFromCluster(this); // We don't what scheme is picked, certain schemes don't partition the cluster, but process slow, so we need // to to sleep, cluster health alone doesn't verify if these schemes have been cleared. if (expectedHealingTime != null && expectedHealingTime.millis() > 0) { try { Thread.sleep(expectedHealingTime.millis()); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } assertFalse("cluster failed to form after disruption was healed", client().admin().cluster().prepareHealth() .setWaitForNodes("" + nodes.size()) .setWaitForRelocatingShards(0) .get().isTimedOut()); } activeDisruptionScheme = null; } private void applyDisruptionSchemeToNode(NodeAndClient nodeAndClient) { if (activeDisruptionScheme != null) { assert nodes.containsKey(nodeAndClient.name); activeDisruptionScheme.applyToNode(nodeAndClient.name, this); } } private void removeDisruptionSchemeFromNode(NodeAndClient nodeAndClient) { if (activeDisruptionScheme != null) { assert nodes.containsKey(nodeAndClient.name); activeDisruptionScheme.removeFromNode(nodeAndClient.name, this); } } private synchronized Collection<NodeAndClient> dataNodeAndClients() { return Collections2.filter(nodes.values(), new DataNodePredicate()); } private synchronized Collection<NodeAndClient> dataAndMasterNodes() { return Collections2.filter(nodes.values(), new DataOrMasterNodePredicate()); } private static final class DataNodePredicate implements Predicate<NodeAndClient> { @Override public boolean apply(NodeAndClient nodeAndClient) { return DiscoveryNode.dataNode(nodeAndClient.node.settings()); } } private static final class DataOrMasterNodePredicate implements Predicate<NodeAndClient> { @Override public boolean apply(NodeAndClient nodeAndClient) { return DiscoveryNode.dataNode(nodeAndClient.node.settings()) || DiscoveryNode.masterNode(nodeAndClient.node.settings()); } } private static final class MasterNodePredicate implements Predicate<NodeAndClient> { private final String masterNodeName; public MasterNodePredicate(String masterNodeName) { this.masterNodeName = masterNodeName; } @Override public boolean apply(NodeAndClient nodeAndClient) { return masterNodeName.equals(nodeAndClient.name); } } private static final class ClientNodePredicate implements Predicate<NodeAndClient> { @Override public boolean apply(NodeAndClient nodeAndClient) { return DiscoveryNode.clientNode(nodeAndClient.node.settings()); } } private synchronized Collection<NodeAndClient> benchNodeAndClients() { return Collections2.filter(nodes.values(), new BenchNodePredicate()); } private static final class BenchNodePredicate implements Predicate<NodeAndClient> { @Override public boolean apply(NodeAndClient nodeAndClient) { return nodeAndClient.node.settings().getAsBoolean("node.bench", false); } } private static final class EntryNodePredicate implements Predicate<Map.Entry<String, NodeAndClient>> { private final Predicate<NodeAndClient> delegateNodePredicate; EntryNodePredicate(Predicate<NodeAndClient> delegateNodePredicate) { this.delegateNodePredicate = delegateNodePredicate; } @Override public boolean apply(Map.Entry<String, NodeAndClient> entry) { return delegateNodePredicate.apply(entry.getValue()); } } @Override public synchronized Iterator<Client> iterator() { ensureOpen(); final Iterator<NodeAndClient> iterator = nodes.values().iterator(); return new Iterator<Client>() { @Override public boolean hasNext() { return iterator.hasNext(); } @Override public Client next() { return iterator.next().client(random); } @Override public void remove() { throw new UnsupportedOperationException(""); } }; } /** * Returns a predicate that only accepts settings of nodes with one of the given names. */ public static Predicate<Settings> nameFilter(String... nodeName) { return new NodeNamePredicate(new HashSet<>(Arrays.asList(nodeName))); } private static final class NodeNamePredicate implements Predicate<Settings> { private final HashSet<String> nodeNames; public NodeNamePredicate(HashSet<String> nodeNames) { this.nodeNames = nodeNames; } @Override public boolean apply(Settings settings) { return nodeNames.contains(settings.get("name")); } } /** * An abstract class that is called during {@link #rollingRestart(InternalTestCluster.RestartCallback)} * and / or {@link #fullRestart(InternalTestCluster.RestartCallback)} to execute actions at certain * stages of the restart. */ public static abstract class RestartCallback { /** * Executed once the give node name has been stopped. */ public Settings onNodeStopped(String nodeName) throws Exception { return ImmutableSettings.EMPTY; } /** * Executed for each node before the <tt>n+1</tt> node is restarted. The given client is * an active client to the node that will be restarted next. */ public void doAfterNodes(int n, Client client) throws Exception { } /** * If this returns <code>true</code> all data for the node with the given node name will be cleared including * gateways and all index data. Returns <code>false</code> by default. */ public boolean clearData(String nodeName) { return false; } /** * If this returns <code>false</code> the node with the given node name will not be restarted. It will be * closed and removed from the cluster. Returns <code>true</code> by default. */ public boolean doRestart(String nodeName) { return true; } } public Settings getDefaultSettings() { return defaultSettings; } @Override public void ensureEstimatedStats() { if (size() > 0) { // Checks that the breakers have been reset without incurring a // network request, because a network request can increment one // of the breakers for (NodeAndClient nodeAndClient : nodes.values()) { final String name = nodeAndClient.name; final CircuitBreakerService breakerService = getInstanceFromNode(CircuitBreakerService.class, nodeAndClient.node); CircuitBreaker fdBreaker = breakerService.getBreaker(CircuitBreaker.Name.FIELDDATA); assertThat("Fielddata breaker not reset to 0 on node: " + name, fdBreaker.getUsed(), equalTo(0L)); // Anything that uses transport or HTTP can increase the // request breaker (because they use bigarrays), because of // that the breaker can sometimes be incremented from ping // requests from other clusters because Jenkins is running // multiple ES testing jobs in parallel on the same machine. // To combat this we check whether the breaker has reached 0 // in an assertBusy loop, so it will try for 10 seconds and // fail if it never reached 0 try { assertBusy(new Runnable() { @Override public void run() { CircuitBreaker reqBreaker = breakerService.getBreaker(CircuitBreaker.Name.REQUEST); assertThat("Request breaker not reset to 0 on node: " + name, reqBreaker.getUsed(), equalTo(0L)); } }); } catch (Exception e) { fail("Exception during check for request breaker reset to 0: " + e); } NodeService nodeService = getInstanceFromNode(NodeService.class, nodeAndClient.node); NodeStats stats = nodeService.stats(CommonStatsFlags.ALL, false, false, false, false, false, false, false, false, false); assertThat("Fielddata size must be 0 on node: " + stats.getNode(), stats.getIndices().getFieldData().getMemorySizeInBytes(), equalTo(0l)); assertThat("Filter cache size must be 0 on node: " + stats.getNode(), stats.getIndices().getFilterCache().getMemorySizeInBytes(), equalTo(0l)); assertThat("FixedBitSet cache size must be 0 on node: " + stats.getNode(), stats.getIndices().getSegments().getFixedBitSetMemoryInBytes(), equalTo(0l)); } } } @Override public void assertAfterTest() throws IOException { super.assertAfterTest(); for (NodeEnvironment env : this.getInstances(NodeEnvironment.class)) { Set<ShardId> shardIds = env.lockedShards(); for (ShardId id : shardIds) { try { env.shardLock(id, TimeUnit.SECONDS.toMillis(5)).close(); } catch (IOException ex) { fail("Shard " + id + " is still locked after 5 sec waiting"); } } } } }
[TEST] Speed up recoveries if tests.nighly=true Conflicts: src/test/java/org/elasticsearch/test/InternalTestCluster.java
src/test/java/org/elasticsearch/test/InternalTestCluster.java
[TEST] Speed up recoveries if tests.nighly=true
Java
apache-2.0
ebf1abab806a28bfcec74e20c26f632efdbaa410
0
objektwerks/java,objektwerks/java
package objektwerks; import static org.junit.jupiter.api.Assertions.assertEquals; import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; class BasicTest { private static final Logger logger = LoggerFactory.getLogger(BasicTest.class); static { logger.info("*** Test logging is working!"); } @Test void basic() { assertEquals(1 + 1, 2); } }
src/test/java/objektwerks/BasicTest.java
package objektwerks; import ch.qos.logback.classic.Logger; import static org.junit.jupiter.api.Assertions.assertEquals; import org.junit.jupiter.api.Test; import org.slf4j.LoggerFactory; class BasicTest { private static final Logger logger = (Logger) LoggerFactory.getLogger(BasicTest.class); static { logger.info("*** Test logging is working!"); } @Test void basic() { assertEquals(1 + 1, 2); } }
refactored logger in BasicTest
src/test/java/objektwerks/BasicTest.java
refactored logger in BasicTest
Java
apache-2.0
f5cafcc8ba255a54cc4df07ccbf4397f5856e643
0
IrrilevantHappyLlamas/Runnest
package ch.epfl.sweng.project; import org.junit.Assert; import org.junit.Test; import java.util.Date; import ch.epfl.sweng.project.Model.Challenge; import ch.epfl.sweng.project.Model.Message; public class MessageTest { @Test public void defaultConstructorDontThrowsException() { Message testMessage = new Message("me", "you", "me", "you", Message.Type.TEXT, "Hello, world!"); Assert.assertTrue(true); } @Test(expected = IllegalArgumentException.class) public void defaultConstructorThrowsExceptionWithIllegalTo() { Message testMessage = new Message(null, "you", "me", "you", Message.Type.TEXT, "Hello, world!"); } @Test(expected = IllegalArgumentException.class) public void defaultConstructorThrowsExceptionWithEmptyTo() { Message testMessage = new Message("", "you", "me", "you", Message.Type.TEXT, "Hello, world!"); } @Test(expected = IllegalArgumentException.class) public void defaultConstructorThrowsExceptionWithIllegalFrom() { Message testMessage = new Message("me", null, "me", "you", Message.Type.TEXT, "Hello, world!"); } @Test(expected = IllegalArgumentException.class) public void defaultConstructorThrowsExceptionWithEmptyFrom() { Message testMessage = new Message("me", "", "me", "you", Message.Type.TEXT, "Hello, world!"); } @Test(expected = IllegalArgumentException.class) public void defaultConstructorThrowsExceptionWithIllegalSender() { Message testMessage = new Message("me", "you", null, "you", Message.Type.TEXT, "Hello, world!"); } @Test(expected = IllegalArgumentException.class) public void defaultConstructorThrowsExceptionWithEmptySender() { Message testMessage = new Message("me", "you", "", "you", Message.Type.TEXT, "Hello, world!"); } @Test(expected = IllegalArgumentException.class) public void defaultConstructorThrowsExceptionWithIllegalAddressee() { Message testMessage = new Message("me", "you", "me", null, Message.Type.TEXT, "Hello, world!"); } @Test(expected = IllegalArgumentException.class) public void defaultConstructorThrowsExceptionWithEmptyAddresseee() { Message testMessage = new Message("me", "you", "me", "", Message.Type.TEXT, "Hello, world!"); } @Test(expected = IllegalArgumentException.class) public void defaultConstructorThrowsExceptionWithIllegalType() { Message testMessage = new Message("me", "you", "me", "you", null, "Hello, world!"); } @Test(expected = IllegalArgumentException.class) public void defaultConstructorThrowsExceptionWithIllegalMessage() { Message testMessage = new Message("me", "you", "me", "you", Message.Type.CHALLENGE_REQUEST, null); } @Test(expected = IllegalArgumentException.class) public void defaultConstructorThrowsExceptionWithEmptyMessage() { Message testMessage = new Message("me", "you", "me", "you", Message.Type.CHALLENGE_RESPONSE, ""); } @Test(expected = IllegalArgumentException.class) public void constructorWithTimeThrowsExceptionWithNullTime() { Message testMessage = new Message("me", "you", "me", "you", Message.Type.MEMO, "Hello, world!", null); } @Test(expected = IllegalArgumentException.class) public void cantSendMessageToYourself() { Message testMessage = new Message("me", "me", "me", "me", Message.Type.TEXT, "Hello, world!"); } @Test(expected = IllegalArgumentException.class) public void constructorChallengeParamsThrowsExceptionWithNullChallengeType() { Date time = new Date(); Message testMessage = new Message("me", "you", "me", "you", Message.Type.TEXT, "msg", time, 0, 1, null); } @Test(expected = IllegalArgumentException.class) public void constructorScheduleMemoThrowsExceptionWithNullChallengeType() { Date time = new Date(); Message testMessage = new Message("me", "you", "me", "you", Message.Type.MEMO, "msg", time, null); } @Test(expected = IllegalArgumentException.class) public void constructorScheduleMemoThrowsExceptionWithIncoherentMessageType() { Date time = new Date(); Message testMessage = new Message("me", "you", "me", "you", Message.Type.TEXT, "msg", time, Challenge.Type.DISTANCE); } @Test public void getters() { String from = "me"; String to = "you"; String sender = "me"; String addressee = "you"; Message.Type type = Message.Type.TEXT; String msgTxt = "Hello, world!"; Date time = new Date(); int firstValue = 0; int secondValue = 1; Challenge.Type challengeType = Challenge.Type.TIME; Message message = new Message(from, to, sender, addressee, type, msgTxt, time, firstValue, secondValue, challengeType); Assert.assertTrue(from.equals(message.getFrom())); Assert.assertTrue(to.equals(message.getTo())); Assert.assertTrue(sender.equals(message.getSender())); Assert.assertTrue(addressee.equals(message.getAddressee())); Assert.assertTrue(type.equals(message.getType())); Assert.assertTrue(msgTxt.equals(message.getMessage())); Assert.assertTrue(time.equals(message.getTime())); Assert.assertEquals(firstValue, message.getFirstValue()); Assert.assertEquals(secondValue, message.getSecondValue()); Assert.assertEquals(challengeType, message.getChallengeType()); String expectedId = from.hashCode() + "_" + time.hashCode(); Assert.assertTrue(expectedId.equals(message.getUid())); } }
app/src/androidTest/java/ch/epfl/sweng/project/MessageTest.java
package ch.epfl.sweng.project; import org.junit.Assert; import org.junit.Test; import java.util.Date; import ch.epfl.sweng.project.Model.Challenge; import ch.epfl.sweng.project.Model.Message; public class MessageTest { @Test public void defaultConstructorDontThrowsException() { Message testMessage = new Message("me", "you", "me", "you", Message.Type.TEXT, "Hello, world!"); Assert.assertTrue(true); } @Test(expected = IllegalArgumentException.class) public void defaultConstructorThrowsExceptionWithIllegalTo() { Message testMessage = new Message(null, "you", "me", "you", Message.Type.TEXT, "Hello, world!"); } @Test(expected = IllegalArgumentException.class) public void defaultConstructorThrowsExceptionWithEmptyTo() { Message testMessage = new Message("", "you", "me", "you", Message.Type.TEXT, "Hello, world!"); } @Test(expected = IllegalArgumentException.class) public void defaultConstructorThrowsExceptionWithIllegalFrom() { Message testMessage = new Message("me", null, "me", "you", Message.Type.TEXT, "Hello, world!"); } @Test(expected = IllegalArgumentException.class) public void defaultConstructorThrowsExceptionWithEmptyFrom() { Message testMessage = new Message("me", "", "me", "you", Message.Type.TEXT, "Hello, world!"); } @Test(expected = IllegalArgumentException.class) public void defaultConstructorThrowsExceptionWithIllegalSender() { Message testMessage = new Message("me", "you", null, "you", Message.Type.TEXT, "Hello, world!"); } @Test(expected = IllegalArgumentException.class) public void defaultConstructorThrowsExceptionWithEmptySender() { Message testMessage = new Message("me", "you", "", "you", Message.Type.TEXT, "Hello, world!"); } @Test(expected = IllegalArgumentException.class) public void defaultConstructorThrowsExceptionWithIllegalAddressee() { Message testMessage = new Message("me", "you", "me", null, Message.Type.TEXT, "Hello, world!"); } @Test(expected = IllegalArgumentException.class) public void defaultConstructorThrowsExceptionWithEmptyAddresseee() { Message testMessage = new Message("me", "you", "me", "", Message.Type.TEXT, "Hello, world!"); } @Test(expected = IllegalArgumentException.class) public void defaultConstructorThrowsExceptionWithIllegalType() { Message testMessage = new Message("me", "you", "me", "you", null, "Hello, world!"); } @Test(expected = IllegalArgumentException.class) public void defaultConstructorThrowsExceptionWithIllegalMessage() { Message testMessage = new Message("me", "you", "me", "you", Message.Type.CHALLENGE_REQUEST, null); } @Test(expected = IllegalArgumentException.class) public void defaultConstructorThrowsExceptionWithEmptyMessage() { Message testMessage = new Message("me", "you", "me", "you", Message.Type.CHALLENGE_RESPONSE, ""); } @Test(expected = IllegalArgumentException.class) public void constructorWithTimeThrowsExceptionWithNullTime() { Message testMessage = new Message("me", "you", "me", "you", Message.Type.MEMO, "Hello, world!", null); } @Test(expected = IllegalArgumentException.class) public void cantSendMessageToYourself() { Message testMessage = new Message("me", "me", "me", "me", Message.Type.TEXT, "Hello, world!"); } @Test(expected = IllegalArgumentException.class) public void constructorChallengeParamsThrowsExceptionWithNullChallengeType() { Date time = new Date(); Message testMessage = new Message("me", "you", "me", "you", Message.Type.TEXT, "msg", time, 0, 1, null); } @Test(expected = IllegalArgumentException.class) public void constructorScheduleMemoThrowsExceptionWithNullChallengeType() { Date time = new Date(); Message testMessage = new Message("me", "you", "me", "you", Message.Type.TEXT, "msg", time, null); } @Test(expected = IllegalArgumentException.class) public void constructorScheduleMemoThrowsExceptionWithIncoherentMessageType() { Date time = new Date(); Message testMessage = new Message("me", "you", "me", "you", Message.Type.TEXT, "msg", time, Challenge.Type.DISTANCE); } @Test public void getters() { String from = "me"; String to = "you"; String sender = "me"; String addressee = "you"; Message.Type type = Message.Type.TEXT; String msgTxt = "Hello, world!"; Date time = new Date(); int firstValue = 0; int secondValue = 1; Challenge.Type challengeType = Challenge.Type.TIME; Message message = new Message(from, to, sender, addressee, type, msgTxt, time, firstValue, secondValue, challengeType); Assert.assertTrue(from.equals(message.getFrom())); Assert.assertTrue(to.equals(message.getTo())); Assert.assertTrue(sender.equals(message.getSender())); Assert.assertTrue(addressee.equals(message.getAddressee())); Assert.assertTrue(type.equals(message.getType())); Assert.assertTrue(msgTxt.equals(message.getMessage())); Assert.assertTrue(time.equals(message.getTime())); Assert.assertEquals(firstValue, message.getFirstValue()); Assert.assertEquals(secondValue, message.getSecondValue()); Assert.assertEquals(challengeType, message.getChallengeType()); String expectedId = from.hashCode() + "_" + time.hashCode(); Assert.assertTrue(expectedId.equals(message.getUid())); } }
Fix Message test that was throwing exception before the one expected
app/src/androidTest/java/ch/epfl/sweng/project/MessageTest.java
Fix Message test that was throwing exception before the one expected
Java
apache-2.0
99828297f177159473e4bbabfcd103ed1e0f7c26
0
slapperwan/gh4a,slapperwan/gh4a
/* * Copyright 2011 Azwan Adli Abdullah * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gh4a.fragment; import android.app.Activity; import android.content.Intent; import android.graphics.Typeface; import android.os.Bundle; import android.os.Parcelable; import androidx.activity.result.ActivityResultLauncher; import androidx.activity.result.contract.ActivityResultContracts; import androidx.coordinatorlayout.widget.CoordinatorLayout; import androidx.recyclerview.widget.RecyclerView; import android.text.SpannableString; import android.text.style.StyleSpan; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.FrameLayout; import android.widget.ImageView; import android.widget.TextView; import com.gh4a.BaseActivity; import com.gh4a.Gh4Application; import com.gh4a.R; import com.gh4a.ServiceFactory; import com.gh4a.activities.UserActivity; import com.gh4a.adapter.RootAdapter; import com.gh4a.adapter.timeline.TimelineItemAdapter; import com.gh4a.model.TimelineItem; import com.gh4a.utils.ActivityResultHelpers; import com.gh4a.utils.ApiHelpers; import com.gh4a.utils.AvatarHandler; import com.gh4a.utils.HttpImageGetter; import com.gh4a.utils.IntentUtils; import com.gh4a.utils.RxUtils; import com.gh4a.utils.StringUtils; import com.gh4a.utils.UiUtils; import com.gh4a.widget.EditorBottomSheet; import com.gh4a.widget.ReactionBar; import com.meisolsson.githubsdk.model.GitHubCommentBase; import com.meisolsson.githubsdk.model.Issue; import com.meisolsson.githubsdk.model.IssueEventType; import com.meisolsson.githubsdk.model.Label; import com.meisolsson.githubsdk.model.Reaction; import com.meisolsson.githubsdk.model.Reactions; import com.meisolsson.githubsdk.model.User; import com.meisolsson.githubsdk.model.request.CommentRequest; import com.meisolsson.githubsdk.model.request.ReactionRequest; import com.meisolsson.githubsdk.service.reactions.ReactionService; import com.meisolsson.githubsdk.service.issues.IssueCommentService; import java.util.Arrays; import java.util.List; import java.util.Set; import io.reactivex.Single; import retrofit2.Response; public abstract class IssueFragmentBase extends ListDataBaseFragment<TimelineItem> implements View.OnClickListener, TimelineItemAdapter.OnCommentAction, ConfirmationDialogFragment.Callback, EditorBottomSheet.Callback, EditorBottomSheet.Listener, ReactionBar.Callback, ReactionBar.Item, ReactionBar.ReactionDetailsCache.Listener { protected static final List<IssueEventType> INTERESTING_EVENTS = Arrays.asList( IssueEventType.Closed, IssueEventType.Reopened, IssueEventType.Merged, IssueEventType.Referenced, IssueEventType.Assigned, IssueEventType.Unassigned, IssueEventType.Labeled, IssueEventType.Unlabeled, IssueEventType.Locked, IssueEventType.Unlocked, IssueEventType.Milestoned, IssueEventType.Demilestoned, IssueEventType.Renamed, IssueEventType.HeadRefDeleted, IssueEventType.HeadRefRestored, IssueEventType.HeadRefForcePushed, IssueEventType.CommentDeleted, IssueEventType.ReviewRequested, IssueEventType.ReviewRequestRemoved, IssueEventType.ConvertToDraft, IssueEventType.ReadyForReview ); protected View mListHeaderView; protected Issue mIssue; protected String mRepoOwner; protected String mRepoName; private IntentUtils.InitialCommentMarker mInitialComment; private boolean mIsCollaborator; private boolean mListShown; private ReactionBar.AddReactionMenuHelper mReactionMenuHelper; private final ReactionBar.ReactionDetailsCache mReactionDetailsCache = new ReactionBar.ReactionDetailsCache(this); private TimelineItemAdapter mAdapter; private HttpImageGetter mImageGetter; private EditorBottomSheet mBottomSheet; protected final ActivityResultLauncher<Intent> mEditLauncher = registerForActivityResult( new ActivityResultContracts.StartActivityForResult(), new ActivityResultHelpers.ActivityResultSuccessCallback(() -> { reloadEvents(true); getActivity().setResult(Activity.RESULT_OK); })); protected static Bundle buildArgs(String repoOwner, String repoName, Issue issue, boolean isCollaborator, IntentUtils.InitialCommentMarker initialComment) { Bundle args = new Bundle(); args.putString("owner", repoOwner); args.putString("repo", repoName); args.putParcelable("issue", issue); args.putBoolean("collaborator", isCollaborator); args.putParcelable("initial_comment", initialComment); return args; } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); Bundle args = getArguments(); mRepoOwner = args.getString("owner"); mRepoName = args.getString("repo"); mIssue = args.getParcelable("issue"); mIsCollaborator = args.getBoolean("collaborator"); mInitialComment = args.getParcelable("initial_comment"); args.remove("initial_comment"); setHasOptionsMenu(true); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View listContent = super.onCreateView(inflater, container, savedInstanceState); View v = inflater.inflate(R.layout.comment_list, container, false); FrameLayout listContainer = v.findViewById(R.id.list_container); listContainer.addView(listContent); mBottomSheet = v.findViewById(R.id.bottom_sheet); mBottomSheet.setCallback(this); mBottomSheet.setResizingView(listContainer); mBottomSheet.setListener(this); mImageGetter = new HttpImageGetter(inflater.getContext()); updateCommentSectionVisibility(v); updateCommentLockState(); return v; } @Override public void onViewCreated(View view, Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); if (mInitialComment == null) { // We want to make the user able to read the issue/PR while the rest of the conversation is still loading, // but at the same time we want to avoid item pop-in when the conversation loads quickly View contentContainer = view.findViewById(R.id.content_container); contentContainer.postDelayed( () -> UiUtils.updateViewVisibility(contentContainer, isResumed(), true), 800); } BaseActivity activity = getBaseActivity(); activity.addAppBarOffsetListener(mBottomSheet); mBottomSheet.post(() -> { // Fix an issue where the bottom sheet is initially located outside of the visible screen area mBottomSheet.resetPeekHeight(activity.getAppBarTotalScrollRange()); }); } @Override public void onDestroyView() { super.onDestroyView(); mReactionDetailsCache.destroy(); mImageGetter.destroy(); mImageGetter = null; if (mAdapter != null) { mAdapter.destroy(); mAdapter = null; } getBaseActivity().removeAppBarOffsetListener(mBottomSheet); } @Override protected void onRecyclerViewInflated(RecyclerView view, LayoutInflater inflater) { super.onRecyclerViewInflated(view, inflater); mListHeaderView = inflater.inflate(R.layout.issue_comment_list_header, view, false); mAdapter.setHeaderView(mListHeaderView); View loadingView = inflater.inflate(R.layout.list_loading_view, view, false); showLoadingIndicator(loadingView); } @Override public void onActivityCreated(Bundle savedInstanceState) { fillData(); fillLabels(mIssue.labels()); updateCommentLockState(); super.onActivityCreated(savedInstanceState); } @Override public boolean onBackPressed() { if (mBottomSheet != null && mBottomSheet.isInAdvancedMode()) { mBottomSheet.setAdvancedMode(false); return true; } return false; } @Override public void onRefresh() { if (mListHeaderView != null) { getActivity().invalidateOptionsMenu(); fillLabels(null); } if (mImageGetter != null) { mImageGetter.clearHtmlCache(); } mReactionDetailsCache.clear(); super.onRefresh(); } @Override public void onResume() { super.onResume(); mImageGetter.resume(); mAdapter.resume(); } @Override public void onPause() { super.onPause(); mImageGetter.pause(); mAdapter.pause(); } @Override public boolean canChildScrollUp() { return (mBottomSheet != null && mBottomSheet.isExpanded()) || super.canChildScrollUp(); } @Override public CoordinatorLayout getRootLayout() { return getBaseActivity().getRootLayout(); } @Override protected void setHighlightColors(int colorAttrId, int statusBarColorAttrId) { super.setHighlightColors(colorAttrId, statusBarColorAttrId); mBottomSheet.setHighlightColor(colorAttrId); } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { super.onCreateOptionsMenu(menu, inflater); inflater.inflate(R.menu.issue_fragment_menu, menu); MenuItem reactItem = menu.findItem(R.id.react); if (isLocked()) { reactItem.setVisible(false); } else { inflater.inflate(R.menu.reaction_menu, reactItem.getSubMenu()); if (mReactionMenuHelper == null) { mReactionMenuHelper = new ReactionBar.AddReactionMenuHelper(getActivity(), reactItem.getSubMenu(), this, this, mReactionDetailsCache); } else { mReactionMenuHelper.updateFromMenu(reactItem.getSubMenu()); } mReactionMenuHelper.startLoadingIfNeeded(); } } @Override public boolean onOptionsItemSelected(MenuItem item) { if (mReactionMenuHelper != null && mReactionMenuHelper.onItemClick(item)) { return true; } return super.onOptionsItemSelected(item); } public void reloadEvents(boolean alsoClearCaches) { if (mAdapter != null && !alsoClearCaches) { // Don't clear adapter's cache, we're only interested in the new event mAdapter.suppressCacheClearOnNextClear(); } super.onRefresh(); } @Override protected RootAdapter<TimelineItem, ? extends RecyclerView.ViewHolder> onCreateAdapter() { mAdapter = new TimelineItemAdapter(getActivity(), mRepoOwner, mRepoName, mIssue.number(), mIssue.pullRequest() != null, true, this); mAdapter.setLocked(isLocked()); return mAdapter; } @Override protected void onAddData(RootAdapter<TimelineItem, ?> adapter, List<TimelineItem> data) { super.onAddData(adapter, data); if (mInitialComment != null) { for (int i = 0; i < data.size(); i++) { TimelineItem item = data.get(i); if (item instanceof TimelineItem.TimelineComment) { TimelineItem.TimelineComment comment = (TimelineItem.TimelineComment) item; if (mInitialComment.matches(comment.comment().id(), comment.getCreatedAt())) { scrollToAndHighlightPosition(i + 1 /* adjust for header view */); break; } } else if (item instanceof TimelineItem.TimelineReview) { TimelineItem.TimelineReview review = (TimelineItem.TimelineReview) item; if (mInitialComment.matches(review.review().id(), review.getCreatedAt())) { scrollToAndHighlightPosition(i + 1 /* adjust for header view */); break; } } } mInitialComment = null; } updateMentionUsers(); removeLoadingIndicator(adapter); } private void showLoadingIndicator(View loadingView) { loadingView.setVisibility(View.VISIBLE); mAdapter.setFooterView(loadingView, null); } private void removeLoadingIndicator(RootAdapter<TimelineItem, ?> adapter) { adapter.setFooterView(null, null); } @Override protected int getEmptyTextResId() { return 0; } @Override protected void updateEmptyState() { // we're never empty -> don't call super } @Override protected void setContentShown(boolean shown) { super.setContentShown(shown); mListShown = shown; updateCommentSectionVisibility(getView()); } private void updateCommentSectionVisibility(View v) { if (v == null) { return; } int commentVisibility = mListShown && Gh4Application.get().isAuthorized() ? View.VISIBLE : View.GONE; mBottomSheet.setVisibility(commentVisibility); } private boolean isLocked() { return mIssue.locked() && !mIsCollaborator; } private void updateMentionUsers() { Set<User> users = mAdapter.getUsers(); if (mIssue.user() != null) { users.add(mIssue.user()); } mBottomSheet.setMentionUsers(users); } private void updateCommentLockState() { mBottomSheet.setLocked(isLocked(), R.string.comment_editor_locked_hint); } private void fillData() { ImageView ivGravatar = mListHeaderView.findViewById(R.id.iv_gravatar); AvatarHandler.assignAvatar(ivGravatar, mIssue.user()); ivGravatar.setTag(mIssue.user()); ivGravatar.setOnClickListener(this); TextView tvExtra = mListHeaderView.findViewById(R.id.tv_extra); tvExtra.setText(ApiHelpers.getUserLoginWithType(getActivity(), mIssue.user())); tvExtra.setOnClickListener(this); tvExtra.setTag(mIssue.user()); TextView tvTimestamp = mListHeaderView.findViewById(R.id.tv_timestamp); tvTimestamp.setText(StringUtils.formatRelativeTime(getActivity(), mIssue.createdAt(), true)); String body = mIssue.bodyHtml(); TextView descriptionView = mListHeaderView.findViewById(R.id.tv_desc); if (!StringUtils.isBlank(body)) { mImageGetter.bind(descriptionView, body, mIssue.id()); if (!isLocked()) { descriptionView.setCustomSelectionActionModeCallback( new UiUtils.QuoteActionModeCallback(descriptionView) { @Override public void onTextQuoted(CharSequence text) { quoteText(text); } }); } else { descriptionView.setCustomSelectionActionModeCallback(null); } } else { SpannableString noDescriptionString = new SpannableString(getString(R.string.issue_no_description)); noDescriptionString.setSpan(new StyleSpan(Typeface.ITALIC), 0, noDescriptionString.length(), 0); descriptionView.setText(noDescriptionString); } View milestoneGroup = mListHeaderView.findViewById(R.id.milestone_container); if (mIssue.milestone() != null) { TextView tvMilestone = mListHeaderView.findViewById(R.id.tv_milestone); tvMilestone.setText(mIssue.milestone().title()); milestoneGroup.setVisibility(View.VISIBLE); } else { milestoneGroup.setVisibility(View.GONE); } View assigneeGroup = mListHeaderView.findViewById(R.id.assignee_container); List<User> assignees = mIssue.assignees(); if (assignees != null && !assignees.isEmpty()) { ViewGroup assigneeContainer = mListHeaderView.findViewById(R.id.assignee_list); LayoutInflater inflater = getLayoutInflater(); assigneeContainer.removeAllViews(); for (User assignee : assignees) { View row = inflater.inflate(R.layout.row_assignee, assigneeContainer, false); TextView tvAssignee = row.findViewById(R.id.tv_assignee); tvAssignee.setText(ApiHelpers.getUserLogin(getActivity(), assignee)); ImageView ivAssignee = row.findViewById(R.id.iv_assignee); AvatarHandler.assignAvatar(ivAssignee, assignee); ivAssignee.setTag(assignee); ivAssignee.setOnClickListener(this); assigneeContainer.addView(row); } assigneeGroup.setVisibility(View.VISIBLE); } else { assigneeGroup.setVisibility(View.GONE); } ReactionBar reactions = mListHeaderView.findViewById(R.id.reactions); reactions.setCallback(this, this); reactions.setDetailsCache(mReactionDetailsCache); reactions.setReactions(mIssue.reactions()); assignHighlightColor(); bindSpecialViews(mListHeaderView); } private void fillLabels(List<Label> labels) { View labelGroup = mListHeaderView.findViewById(R.id.label_container); if (labels != null && !labels.isEmpty()) { TextView labelView = mListHeaderView.findViewById(R.id.labels); labelView.setText(UiUtils.formatLabelList(getActivity(), labels)); labelGroup.setVisibility(View.VISIBLE); } else { labelGroup.setVisibility(View.GONE); } } @Override public Object getCacheKey() { return mIssue.id(); } @Override public Single<List<Reaction>> loadReactionDetails(ReactionBar.Item item, boolean bypassCache) { final ReactionService service = ServiceFactory.get(ReactionService.class, bypassCache); return ApiHelpers.PageIterator .toSingle(page -> service.getIssueReactions(mRepoOwner, mRepoName, mIssue.number(), page)); } @Override public boolean canAddReaction() { return !isLocked(); } @Override public Single<Reaction> addReaction(ReactionBar.Item item, String content) { ReactionService service = ServiceFactory.get(ReactionService.class, false); ReactionRequest request = ReactionRequest.builder().content(content).build(); return service.createIssueReaction(mRepoOwner, mRepoName, mIssue.number(), request) .map(ApiHelpers::throwOnFailure); } @Override public Single<List<Reaction>> loadReactionDetails(final GitHubCommentBase comment, boolean bypassCache) { final ReactionService service = ServiceFactory.get(ReactionService.class, bypassCache); return ApiHelpers.PageIterator .toSingle(page -> service.getIssueCommentReactions(mRepoOwner, mRepoName, comment.id(), page)); } @Override public Single<Reaction> addReaction(GitHubCommentBase comment, String content) { ReactionService service = ServiceFactory.get(ReactionService.class, false); ReactionRequest request = ReactionRequest.builder().content(content).build(); return service.createIssueCommentReaction(mRepoOwner, mRepoName,comment.id(), request) .map(ApiHelpers::throwOnFailure); } @Override public void onReactionsUpdated(ReactionBar.Item item, Reactions reactions) { mIssue = mIssue.toBuilder().reactions(reactions).build(); if (mListHeaderView != null) { ReactionBar bar = mListHeaderView.findViewById(R.id.reactions); bar.setReactions(reactions); } if (mReactionMenuHelper != null) { mReactionMenuHelper.update(); getActivity().invalidateOptionsMenu(); } } @Override public void onClick(View v) { if (v.getId() == R.id.tv_extra) { User user = (User) v.getTag(); addText(StringUtils.formatMention(getContext(), user)); return; } Intent intent = UserActivity.makeIntent(getActivity(), (User) v.getTag()); if (intent != null) { startActivity(intent); } } @Override public void quoteText(CharSequence text) { mBottomSheet.addQuote(text); } @Override public void addText(CharSequence text) { mBottomSheet.addText(text); } @Override public Single<?> onEditorDoSend(String comment) { IssueCommentService service = ServiceFactory.get(IssueCommentService.class, false); CommentRequest request = CommentRequest.builder().body(comment).build(); return service.createIssueComment(mRepoOwner, mRepoName, mIssue.number(), request) .map(ApiHelpers::throwOnFailure); } @Override public void onEditorTextSent() { // reload comments if (isAdded()) { reloadEvents(false); } getActivity().setResult(Activity.RESULT_OK); } @Override public int getEditorErrorMessageResId() { return R.string.issue_error_comment; } @Override public void deleteComment(final GitHubCommentBase comment) { ConfirmationDialogFragment.show(this, R.string.delete_comment_message, R.string.delete, comment, "deleteconfirm"); } @Override public void onConfirmed(String tag, Parcelable data) { GitHubCommentBase comment = (GitHubCommentBase) data; handleDeleteComment(comment); } @Override public String getShareSubject(GitHubCommentBase comment) { return getString(R.string.share_comment_subject, comment.id(), mIssue.number(), mRepoOwner + "/" + mRepoName); } @Override public void onToggleAdvancedMode(boolean advancedMode) { getBaseActivity().collapseAppBar(); getBaseActivity().setAppBarLocked(advancedMode); mBottomSheet.resetPeekHeight(0); } @Override public void onScrollingInBasicEditor(boolean scrolling) { getBaseActivity().setAppBarLocked(scrolling); } @Override public void onReplyCommentSelected(long replyToId) { // Not used in this screen } @Override public long getSelectedReplyCommentId() { // Not used in this screen return 0; } protected abstract void bindSpecialViews(View headerView); protected abstract void assignHighlightColor(); protected abstract Single<Response<Void>> doDeleteComment(GitHubCommentBase comment); private void handleDeleteComment(GitHubCommentBase comment) { doDeleteComment(comment) .map(ApiHelpers::mapToBooleanOrThrowOnFailure) .compose(RxUtils.wrapForBackgroundTask(getBaseActivity(), R.string.deleting_msg, R.string.error_delete_comment)) .subscribe(result -> { reloadEvents(false); getActivity().setResult(Activity.RESULT_OK); }, error -> handleActionFailure("Deleting comment failed", error)); } }
app/src/main/java/com/gh4a/fragment/IssueFragmentBase.java
/* * Copyright 2011 Azwan Adli Abdullah * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gh4a.fragment; import android.app.Activity; import android.content.Intent; import android.graphics.Typeface; import android.os.Bundle; import android.os.Parcelable; import androidx.activity.result.ActivityResultLauncher; import androidx.activity.result.contract.ActivityResultContracts; import androidx.coordinatorlayout.widget.CoordinatorLayout; import androidx.recyclerview.widget.RecyclerView; import android.text.SpannableString; import android.text.style.StyleSpan; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.FrameLayout; import android.widget.ImageView; import android.widget.TextView; import com.gh4a.BaseActivity; import com.gh4a.Gh4Application; import com.gh4a.R; import com.gh4a.ServiceFactory; import com.gh4a.activities.UserActivity; import com.gh4a.adapter.RootAdapter; import com.gh4a.adapter.timeline.TimelineItemAdapter; import com.gh4a.model.TimelineItem; import com.gh4a.utils.ActivityResultHelpers; import com.gh4a.utils.ApiHelpers; import com.gh4a.utils.AvatarHandler; import com.gh4a.utils.HttpImageGetter; import com.gh4a.utils.IntentUtils; import com.gh4a.utils.RxUtils; import com.gh4a.utils.StringUtils; import com.gh4a.utils.UiUtils; import com.gh4a.widget.EditorBottomSheet; import com.gh4a.widget.ReactionBar; import com.meisolsson.githubsdk.model.GitHubCommentBase; import com.meisolsson.githubsdk.model.Issue; import com.meisolsson.githubsdk.model.IssueEventType; import com.meisolsson.githubsdk.model.Label; import com.meisolsson.githubsdk.model.Reaction; import com.meisolsson.githubsdk.model.Reactions; import com.meisolsson.githubsdk.model.User; import com.meisolsson.githubsdk.model.request.CommentRequest; import com.meisolsson.githubsdk.model.request.ReactionRequest; import com.meisolsson.githubsdk.service.reactions.ReactionService; import com.meisolsson.githubsdk.service.issues.IssueCommentService; import java.util.Arrays; import java.util.List; import java.util.Set; import io.reactivex.Single; import retrofit2.Response; public abstract class IssueFragmentBase extends ListDataBaseFragment<TimelineItem> implements View.OnClickListener, TimelineItemAdapter.OnCommentAction, ConfirmationDialogFragment.Callback, EditorBottomSheet.Callback, EditorBottomSheet.Listener, ReactionBar.Callback, ReactionBar.Item, ReactionBar.ReactionDetailsCache.Listener { protected static final List<IssueEventType> INTERESTING_EVENTS = Arrays.asList( IssueEventType.Closed, IssueEventType.Reopened, IssueEventType.Merged, IssueEventType.Referenced, IssueEventType.Assigned, IssueEventType.Unassigned, IssueEventType.Labeled, IssueEventType.Unlabeled, IssueEventType.Locked, IssueEventType.Unlocked, IssueEventType.Milestoned, IssueEventType.Demilestoned, IssueEventType.Renamed, IssueEventType.HeadRefDeleted, IssueEventType.HeadRefRestored, IssueEventType.HeadRefForcePushed, IssueEventType.CommentDeleted, IssueEventType.ReviewRequested, IssueEventType.ReviewRequestRemoved, IssueEventType.ConvertToDraft, IssueEventType.ReadyForReview ); protected View mListHeaderView; protected Issue mIssue; protected String mRepoOwner; protected String mRepoName; private IntentUtils.InitialCommentMarker mInitialComment; private boolean mIsCollaborator; private boolean mListShown; private ReactionBar.AddReactionMenuHelper mReactionMenuHelper; private final ReactionBar.ReactionDetailsCache mReactionDetailsCache = new ReactionBar.ReactionDetailsCache(this); private TimelineItemAdapter mAdapter; private HttpImageGetter mImageGetter; private EditorBottomSheet mBottomSheet; protected final ActivityResultLauncher<Intent> mEditLauncher = registerForActivityResult( new ActivityResultContracts.StartActivityForResult(), new ActivityResultHelpers.ActivityResultSuccessCallback(() -> { reloadEvents(true); getActivity().setResult(Activity.RESULT_OK); })); protected static Bundle buildArgs(String repoOwner, String repoName, Issue issue, boolean isCollaborator, IntentUtils.InitialCommentMarker initialComment) { Bundle args = new Bundle(); args.putString("owner", repoOwner); args.putString("repo", repoName); args.putParcelable("issue", issue); args.putBoolean("collaborator", isCollaborator); args.putParcelable("initial_comment", initialComment); return args; } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); Bundle args = getArguments(); mRepoOwner = args.getString("owner"); mRepoName = args.getString("repo"); mIssue = args.getParcelable("issue"); mIsCollaborator = args.getBoolean("collaborator"); mInitialComment = args.getParcelable("initial_comment"); args.remove("initial_comment"); setHasOptionsMenu(true); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View listContent = super.onCreateView(inflater, container, savedInstanceState); View v = inflater.inflate(R.layout.comment_list, container, false); FrameLayout listContainer = v.findViewById(R.id.list_container); listContainer.addView(listContent); mBottomSheet = v.findViewById(R.id.bottom_sheet); mBottomSheet.setCallback(this); mBottomSheet.setResizingView(listContainer); mBottomSheet.setListener(this); mImageGetter = new HttpImageGetter(inflater.getContext()); updateCommentSectionVisibility(v); updateCommentLockState(); return v; } @Override public void onViewCreated(View view, Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); // We want to make the user able to read the issue/PR while the rest of the conversation is still loading if (mInitialComment == null) { view.findViewById(R.id.content_container).setVisibility(View.VISIBLE); } BaseActivity activity = getBaseActivity(); activity.addAppBarOffsetListener(mBottomSheet); mBottomSheet.post(() -> { // Fix an issue where the bottom sheet is initially located outside of the visible screen area mBottomSheet.resetPeekHeight(activity.getAppBarTotalScrollRange()); }); } @Override public void onDestroyView() { super.onDestroyView(); mReactionDetailsCache.destroy(); mImageGetter.destroy(); mImageGetter = null; if (mAdapter != null) { mAdapter.destroy(); mAdapter = null; } getBaseActivity().removeAppBarOffsetListener(mBottomSheet); } @Override protected void onRecyclerViewInflated(RecyclerView view, LayoutInflater inflater) { super.onRecyclerViewInflated(view, inflater); mListHeaderView = inflater.inflate(R.layout.issue_comment_list_header, view, false); mAdapter.setHeaderView(mListHeaderView); View loadingView = inflater.inflate(R.layout.list_loading_view, view, false); showLoadingIndicator(loadingView); } @Override public void onActivityCreated(Bundle savedInstanceState) { fillData(); fillLabels(mIssue.labels()); updateCommentLockState(); super.onActivityCreated(savedInstanceState); } @Override public boolean onBackPressed() { if (mBottomSheet != null && mBottomSheet.isInAdvancedMode()) { mBottomSheet.setAdvancedMode(false); return true; } return false; } @Override public void onRefresh() { if (mListHeaderView != null) { getActivity().invalidateOptionsMenu(); fillLabels(null); } if (mImageGetter != null) { mImageGetter.clearHtmlCache(); } mReactionDetailsCache.clear(); super.onRefresh(); } @Override public void onResume() { super.onResume(); mImageGetter.resume(); mAdapter.resume(); } @Override public void onPause() { super.onPause(); mImageGetter.pause(); mAdapter.pause(); } @Override public boolean canChildScrollUp() { return (mBottomSheet != null && mBottomSheet.isExpanded()) || super.canChildScrollUp(); } @Override public CoordinatorLayout getRootLayout() { return getBaseActivity().getRootLayout(); } @Override protected void setHighlightColors(int colorAttrId, int statusBarColorAttrId) { super.setHighlightColors(colorAttrId, statusBarColorAttrId); mBottomSheet.setHighlightColor(colorAttrId); } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { super.onCreateOptionsMenu(menu, inflater); inflater.inflate(R.menu.issue_fragment_menu, menu); MenuItem reactItem = menu.findItem(R.id.react); if (isLocked()) { reactItem.setVisible(false); } else { inflater.inflate(R.menu.reaction_menu, reactItem.getSubMenu()); if (mReactionMenuHelper == null) { mReactionMenuHelper = new ReactionBar.AddReactionMenuHelper(getActivity(), reactItem.getSubMenu(), this, this, mReactionDetailsCache); } else { mReactionMenuHelper.updateFromMenu(reactItem.getSubMenu()); } mReactionMenuHelper.startLoadingIfNeeded(); } } @Override public boolean onOptionsItemSelected(MenuItem item) { if (mReactionMenuHelper != null && mReactionMenuHelper.onItemClick(item)) { return true; } return super.onOptionsItemSelected(item); } public void reloadEvents(boolean alsoClearCaches) { if (mAdapter != null && !alsoClearCaches) { // Don't clear adapter's cache, we're only interested in the new event mAdapter.suppressCacheClearOnNextClear(); } super.onRefresh(); } @Override protected RootAdapter<TimelineItem, ? extends RecyclerView.ViewHolder> onCreateAdapter() { mAdapter = new TimelineItemAdapter(getActivity(), mRepoOwner, mRepoName, mIssue.number(), mIssue.pullRequest() != null, true, this); mAdapter.setLocked(isLocked()); return mAdapter; } @Override protected void onAddData(RootAdapter<TimelineItem, ?> adapter, List<TimelineItem> data) { super.onAddData(adapter, data); if (mInitialComment != null) { for (int i = 0; i < data.size(); i++) { TimelineItem item = data.get(i); if (item instanceof TimelineItem.TimelineComment) { TimelineItem.TimelineComment comment = (TimelineItem.TimelineComment) item; if (mInitialComment.matches(comment.comment().id(), comment.getCreatedAt())) { scrollToAndHighlightPosition(i + 1 /* adjust for header view */); break; } } else if (item instanceof TimelineItem.TimelineReview) { TimelineItem.TimelineReview review = (TimelineItem.TimelineReview) item; if (mInitialComment.matches(review.review().id(), review.getCreatedAt())) { scrollToAndHighlightPosition(i + 1 /* adjust for header view */); break; } } } mInitialComment = null; } updateMentionUsers(); removeLoadingIndicator(adapter); } private void showLoadingIndicator(View loadingView) { loadingView.setVisibility(View.VISIBLE); mAdapter.setFooterView(loadingView, null); } private void removeLoadingIndicator(RootAdapter<TimelineItem, ?> adapter) { adapter.setFooterView(null, null); } @Override protected int getEmptyTextResId() { return 0; } @Override protected void updateEmptyState() { // we're never empty -> don't call super } @Override protected void setContentShown(boolean shown) { super.setContentShown(shown); mListShown = shown; updateCommentSectionVisibility(getView()); } private void updateCommentSectionVisibility(View v) { if (v == null) { return; } int commentVisibility = mListShown && Gh4Application.get().isAuthorized() ? View.VISIBLE : View.GONE; mBottomSheet.setVisibility(commentVisibility); } private boolean isLocked() { return mIssue.locked() && !mIsCollaborator; } private void updateMentionUsers() { Set<User> users = mAdapter.getUsers(); if (mIssue.user() != null) { users.add(mIssue.user()); } mBottomSheet.setMentionUsers(users); } private void updateCommentLockState() { mBottomSheet.setLocked(isLocked(), R.string.comment_editor_locked_hint); } private void fillData() { ImageView ivGravatar = mListHeaderView.findViewById(R.id.iv_gravatar); AvatarHandler.assignAvatar(ivGravatar, mIssue.user()); ivGravatar.setTag(mIssue.user()); ivGravatar.setOnClickListener(this); TextView tvExtra = mListHeaderView.findViewById(R.id.tv_extra); tvExtra.setText(ApiHelpers.getUserLoginWithType(getActivity(), mIssue.user())); tvExtra.setOnClickListener(this); tvExtra.setTag(mIssue.user()); TextView tvTimestamp = mListHeaderView.findViewById(R.id.tv_timestamp); tvTimestamp.setText(StringUtils.formatRelativeTime(getActivity(), mIssue.createdAt(), true)); String body = mIssue.bodyHtml(); TextView descriptionView = mListHeaderView.findViewById(R.id.tv_desc); if (!StringUtils.isBlank(body)) { mImageGetter.bind(descriptionView, body, mIssue.id()); if (!isLocked()) { descriptionView.setCustomSelectionActionModeCallback( new UiUtils.QuoteActionModeCallback(descriptionView) { @Override public void onTextQuoted(CharSequence text) { quoteText(text); } }); } else { descriptionView.setCustomSelectionActionModeCallback(null); } } else { SpannableString noDescriptionString = new SpannableString(getString(R.string.issue_no_description)); noDescriptionString.setSpan(new StyleSpan(Typeface.ITALIC), 0, noDescriptionString.length(), 0); descriptionView.setText(noDescriptionString); } View milestoneGroup = mListHeaderView.findViewById(R.id.milestone_container); if (mIssue.milestone() != null) { TextView tvMilestone = mListHeaderView.findViewById(R.id.tv_milestone); tvMilestone.setText(mIssue.milestone().title()); milestoneGroup.setVisibility(View.VISIBLE); } else { milestoneGroup.setVisibility(View.GONE); } View assigneeGroup = mListHeaderView.findViewById(R.id.assignee_container); List<User> assignees = mIssue.assignees(); if (assignees != null && !assignees.isEmpty()) { ViewGroup assigneeContainer = mListHeaderView.findViewById(R.id.assignee_list); LayoutInflater inflater = getLayoutInflater(); assigneeContainer.removeAllViews(); for (User assignee : assignees) { View row = inflater.inflate(R.layout.row_assignee, assigneeContainer, false); TextView tvAssignee = row.findViewById(R.id.tv_assignee); tvAssignee.setText(ApiHelpers.getUserLogin(getActivity(), assignee)); ImageView ivAssignee = row.findViewById(R.id.iv_assignee); AvatarHandler.assignAvatar(ivAssignee, assignee); ivAssignee.setTag(assignee); ivAssignee.setOnClickListener(this); assigneeContainer.addView(row); } assigneeGroup.setVisibility(View.VISIBLE); } else { assigneeGroup.setVisibility(View.GONE); } ReactionBar reactions = mListHeaderView.findViewById(R.id.reactions); reactions.setCallback(this, this); reactions.setDetailsCache(mReactionDetailsCache); reactions.setReactions(mIssue.reactions()); assignHighlightColor(); bindSpecialViews(mListHeaderView); } private void fillLabels(List<Label> labels) { View labelGroup = mListHeaderView.findViewById(R.id.label_container); if (labels != null && !labels.isEmpty()) { TextView labelView = mListHeaderView.findViewById(R.id.labels); labelView.setText(UiUtils.formatLabelList(getActivity(), labels)); labelGroup.setVisibility(View.VISIBLE); } else { labelGroup.setVisibility(View.GONE); } } @Override public Object getCacheKey() { return mIssue.id(); } @Override public Single<List<Reaction>> loadReactionDetails(ReactionBar.Item item, boolean bypassCache) { final ReactionService service = ServiceFactory.get(ReactionService.class, bypassCache); return ApiHelpers.PageIterator .toSingle(page -> service.getIssueReactions(mRepoOwner, mRepoName, mIssue.number(), page)); } @Override public boolean canAddReaction() { return !isLocked(); } @Override public Single<Reaction> addReaction(ReactionBar.Item item, String content) { ReactionService service = ServiceFactory.get(ReactionService.class, false); ReactionRequest request = ReactionRequest.builder().content(content).build(); return service.createIssueReaction(mRepoOwner, mRepoName, mIssue.number(), request) .map(ApiHelpers::throwOnFailure); } @Override public Single<List<Reaction>> loadReactionDetails(final GitHubCommentBase comment, boolean bypassCache) { final ReactionService service = ServiceFactory.get(ReactionService.class, bypassCache); return ApiHelpers.PageIterator .toSingle(page -> service.getIssueCommentReactions(mRepoOwner, mRepoName, comment.id(), page)); } @Override public Single<Reaction> addReaction(GitHubCommentBase comment, String content) { ReactionService service = ServiceFactory.get(ReactionService.class, false); ReactionRequest request = ReactionRequest.builder().content(content).build(); return service.createIssueCommentReaction(mRepoOwner, mRepoName,comment.id(), request) .map(ApiHelpers::throwOnFailure); } @Override public void onReactionsUpdated(ReactionBar.Item item, Reactions reactions) { mIssue = mIssue.toBuilder().reactions(reactions).build(); if (mListHeaderView != null) { ReactionBar bar = mListHeaderView.findViewById(R.id.reactions); bar.setReactions(reactions); } if (mReactionMenuHelper != null) { mReactionMenuHelper.update(); getActivity().invalidateOptionsMenu(); } } @Override public void onClick(View v) { if (v.getId() == R.id.tv_extra) { User user = (User) v.getTag(); addText(StringUtils.formatMention(getContext(), user)); return; } Intent intent = UserActivity.makeIntent(getActivity(), (User) v.getTag()); if (intent != null) { startActivity(intent); } } @Override public void quoteText(CharSequence text) { mBottomSheet.addQuote(text); } @Override public void addText(CharSequence text) { mBottomSheet.addText(text); } @Override public Single<?> onEditorDoSend(String comment) { IssueCommentService service = ServiceFactory.get(IssueCommentService.class, false); CommentRequest request = CommentRequest.builder().body(comment).build(); return service.createIssueComment(mRepoOwner, mRepoName, mIssue.number(), request) .map(ApiHelpers::throwOnFailure); } @Override public void onEditorTextSent() { // reload comments if (isAdded()) { reloadEvents(false); } getActivity().setResult(Activity.RESULT_OK); } @Override public int getEditorErrorMessageResId() { return R.string.issue_error_comment; } @Override public void deleteComment(final GitHubCommentBase comment) { ConfirmationDialogFragment.show(this, R.string.delete_comment_message, R.string.delete, comment, "deleteconfirm"); } @Override public void onConfirmed(String tag, Parcelable data) { GitHubCommentBase comment = (GitHubCommentBase) data; handleDeleteComment(comment); } @Override public String getShareSubject(GitHubCommentBase comment) { return getString(R.string.share_comment_subject, comment.id(), mIssue.number(), mRepoOwner + "/" + mRepoName); } @Override public void onToggleAdvancedMode(boolean advancedMode) { getBaseActivity().collapseAppBar(); getBaseActivity().setAppBarLocked(advancedMode); mBottomSheet.resetPeekHeight(0); } @Override public void onScrollingInBasicEditor(boolean scrolling) { getBaseActivity().setAppBarLocked(scrolling); } @Override public void onReplyCommentSelected(long replyToId) { // Not used in this screen } @Override public long getSelectedReplyCommentId() { // Not used in this screen return 0; } protected abstract void bindSpecialViews(View headerView); protected abstract void assignHighlightColor(); protected abstract Single<Response<Void>> doDeleteComment(GitHubCommentBase comment); private void handleDeleteComment(GitHubCommentBase comment) { doDeleteComment(comment) .map(ApiHelpers::mapToBooleanOrThrowOnFailure) .compose(RxUtils.wrapForBackgroundTask(getBaseActivity(), R.string.deleting_msg, R.string.error_delete_comment)) .subscribe(result -> { reloadEvents(false); getActivity().setResult(Activity.RESULT_OK); }, error -> handleActionFailure("Deleting comment failed", error)); } }
Avoid pop-in when issue/PR conversation loads quickly
app/src/main/java/com/gh4a/fragment/IssueFragmentBase.java
Avoid pop-in when issue/PR conversation loads quickly
Java
apache-2.0
5467072253a7a5e46abd2af9e58ebd6c1fe81981
0
balarj/rmend-commons
package com.brajagopal.rmend.data.beans; import com.google.common.collect.HashMultimap; import com.google.common.collect.TreeMultimap; import com.google.gson.*; import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.ComparatorUtils; import org.apache.commons.lang3.StringUtils; import org.apache.log4j.Logger; import org.joda.time.DateTime; import java.lang.reflect.Type; import java.nio.charset.Charset; import java.util.*; /** * @author <bxr4261> */ public class DocumentBean extends BaseContent { private static Logger logger = Logger.getLogger(DocumentBean.class); private long documentNumber; private String docId; private String title; private String document; private String contentMD5Sum; private Collection<String> topics; private HashMultimap<ContentType, BaseContent> contentBeans; public DocumentBean() { this(ContentType.DOCUMENT_INFO); } private DocumentBean(ContentType _contentType) { super(_contentType); } @Override public void process(Map<String, ? extends Object> _value) { Map<String, String> infoValue = ((Map<String, String>)_value.get("info")); String documentId = infoValue.get("docId"); String docBody = infoValue.get("document"); String[] docElements = docBody.split("\\n", 2); if (docElements.length == 2) { this.title = docElements[0]; this.document = StringUtils.trim(docElements[1]); } else { this.title = ""; this.document = StringUtils.toEncodedString(docBody.getBytes(), Charset.forName("UTF8")); } this.docId = documentId.substring(documentId.lastIndexOf("/") + 1, documentId.length()); this.contentMD5Sum = DigestUtils.md5Hex(this.document); this.documentNumber = System.currentTimeMillis() - new DateTime("2015-06-01").getMillis(); contentBeans = HashMultimap.create(); } @Override public BaseContent getInstance() { return new DocumentBean(); } @Override public double getScore() { throw new UnsupportedOperationException(); } @Override public String toString() { return "DocumentBean {" + "docId='" + getDocId() + '\'' + ", docTitle='" + getTitle() + '\'' + ", topic='" + getTopic() + '\'' + ", documentNumber='" + getDocumentNumber() + '\'' + ", contentMD5Sum='" + getContentMD5Sum() + '\'' + ", contentBeans=" + getContentBeans() + '}'; } public String getDocId() { return docId; } public String getDocument() { return document; } public String getContentMD5Sum() { return contentMD5Sum; } public long getDocumentNumber() { return documentNumber; } public String getTitle() { return title; } @Override public Boolean isForEndUserDisplay() { return null; } public Collection<BaseContent> getContentBeans() { if (contentBeans != null) { return contentBeans.values(); } return null; } public Collection<String> getTopic() { return topics; } public void setContentBeans(Collection<BaseContent> _contentBeans) { for (BaseContent bean : _contentBeans) { contentBeans.put(bean.getContentType(), bean); } Collection<BaseContent> topics = contentBeans.get(ContentType.TOPICS); if (topics.size() > 0) { this.topics = new ArrayList<String>(); for (BaseContent topic : topics) { this.topics.add(topic.getName()); } } else { this.topics = Arrays.asList("NA"); } } public int getEntitySize() { return this.getContentBeans().size(); } public Map<ContentType, Collection<BaseContent>> getContentBeansByType() { return contentBeans.asMap(); } public TreeMultimap<ContentType, BaseContent> getRelevantBeans(int _numResult) { return getTopNRelevantBeans(_numResult); } public TreeMultimap<ContentType, BaseContent> getRelevantBeans() { return getTopNRelevantBeans(2); } private TreeMultimap<ContentType, BaseContent> getTopNRelevantBeans(int _numResult) { TreeMultimap<ContentType, BaseContent> retVal = TreeMultimap.create(ComparatorUtils.NATURAL_COMPARATOR, BaseContent.CONTENT_COMPARATOR); for (Map.Entry<ContentType, Collection<BaseContent>> beansByType : getContentBeansByType().entrySet()) { List<BaseContent> sortedValues = new ArrayList<BaseContent>(beansByType.getValue()); Collections.sort(sortedValues, (Comparator<? super BaseContent>) BaseContent.CONTENT_COMPARATOR); if (sortedValues.size() > _numResult) { sortedValues.subList(_numResult, sortedValues.size()).clear(); // trim to top 2 from each kind } retVal.putAll(beansByType.getKey(), sortedValues); } return retVal; } public static class DocumentSerDe implements JsonSerializer<DocumentBean>, JsonDeserializer<DocumentBean> { @Override public JsonElement serialize(final DocumentBean bean, Type type, JsonSerializationContext jsonSerializationContext) { JsonObject root = new JsonObject(); root.addProperty("docId", bean.docId); root.addProperty("title", bean.title); root.addProperty("md5sum", bean.contentMD5Sum); root.addProperty("docBody", bean.document); final JsonArray jsonTopicsArray = new JsonArray(); for (final String topic : bean.getTopic()) { final JsonPrimitive topicPrimitive = new JsonPrimitive(topic); jsonTopicsArray.add(topicPrimitive); } root.add("topics", jsonTopicsArray); root.addProperty("docNum", bean.documentNumber); Collection<BaseContent> filteredContentBeans = new ArrayList<BaseContent>(); for (Map.Entry<ContentType, Collection<BaseContent>> entry : bean.getContentBeansByType().entrySet()) { SortedSet<BaseContent> sortedSet = new TreeSet<BaseContent>((Comparator<? super BaseContent>) BaseContent.CONTENT_COMPARATOR); for (final BaseContent contentBean : entry.getValue()) { try { if (contentBean.isForEndUserDisplay()) { sortedSet.add(contentBean); } } catch (UnsupportedOperationException e) {} } if (sortedSet.size() > 0) { filteredContentBeans.addAll( sortedSet.headSet( CollectionUtils.get( sortedSet, ((sortedSet.size() >= 3) ? 2 : sortedSet.size()) ) ) ); } } final JsonArray jsonContentBeanArray = new JsonArray(); for (BaseContent contentBean : filteredContentBeans) { jsonContentBeanArray.add(new JsonPrimitive(contentBean.getType() + ":" + contentBean.getName())); } root.add("contentBeans", jsonContentBeanArray); return root; } @Override public DocumentBean deserialize(final JsonElement jsonElement, final Type type, final JsonDeserializationContext jsonDeserializationContext) throws JsonParseException { DocumentBean bean = new DocumentBean(); final JsonObject root = jsonElement.getAsJsonObject(); bean.docId = root.get("docId").getAsString(); bean.title = root.get("title").getAsString(); bean.contentMD5Sum = root.get("md5sum").getAsString(); final Collection<String> topics = new ArrayList<String>(); final JsonArray jsonTopicsArray = root.get("topics").getAsJsonArray(); for (final JsonElement _jsonElement : jsonTopicsArray) { topics.add(_jsonElement.getAsString()); } bean.topics = topics; bean.documentNumber = root.get("docNum").getAsLong(); bean.document = root.get("docBody").getAsString(); final JsonArray jsonContentBeanArray = root.get("contentBeans").getAsJsonArray(); HashMultimap<ContentType, BaseContent> contentBeans = null; if (jsonContentBeanArray.size() > 0) { contentBeans = HashMultimap.create(); for (final JsonElement _jsonElement : jsonContentBeanArray) { Map content = new Gson().fromJson(_jsonElement.getAsString(), Map.class); try { BaseContent baseContent = BaseContent.getChildInstance(content); contentBeans.put(baseContent.getContentType(), baseContent); } catch (Exception e) {} } } bean.contentBeans = contentBeans; return bean; } } }
src/main/java/com/brajagopal/rmend/data/beans/DocumentBean.java
package com.brajagopal.rmend.data.beans; import com.google.common.collect.HashMultimap; import com.google.common.collect.TreeMultimap; import com.google.gson.*; import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.collections4.ComparatorUtils; import org.apache.commons.lang3.StringUtils; import org.apache.log4j.Logger; import org.joda.time.DateTime; import java.lang.reflect.Type; import java.nio.charset.Charset; import java.util.*; /** * @author <bxr4261> */ public class DocumentBean extends BaseContent { private static Logger logger = Logger.getLogger(DocumentBean.class); private long documentNumber; private String docId; private String title; private String document; private String contentMD5Sum; private Collection<String> topics; private HashMultimap<ContentType, BaseContent> contentBeans; public DocumentBean() { this(ContentType.DOCUMENT_INFO); } private DocumentBean(ContentType _contentType) { super(_contentType); } @Override public void process(Map<String, ? extends Object> _value) { Map<String, String> infoValue = ((Map<String, String>)_value.get("info")); String documentId = infoValue.get("docId"); String docBody = infoValue.get("document"); String[] docElements = docBody.split("\\n", 2); if (docElements.length == 2) { this.title = docElements[0]; this.document = StringUtils.trim(docElements[1]); } else { this.title = ""; this.document = StringUtils.toEncodedString(docBody.getBytes(), Charset.forName("UTF8")); } this.docId = documentId.substring(documentId.lastIndexOf("/") + 1, documentId.length()); this.contentMD5Sum = DigestUtils.md5Hex(this.document); this.documentNumber = System.currentTimeMillis() - new DateTime("2015-06-01").getMillis(); contentBeans = HashMultimap.create(); } @Override public BaseContent getInstance() { return new DocumentBean(); } @Override public double getScore() { throw new UnsupportedOperationException(); } @Override public String toString() { return "DocumentBean {" + "docId='" + getDocId() + '\'' + ", docTitle='" + getTitle() + '\'' + ", topic='" + getTopic() + '\'' + ", documentNumber='" + getDocumentNumber() + '\'' + ", contentMD5Sum='" + getContentMD5Sum() + '\'' + ", contentBeans=" + getContentBeans() + '}'; } public String getDocId() { return docId; } public String getDocument() { return document; } public String getContentMD5Sum() { return contentMD5Sum; } public long getDocumentNumber() { return documentNumber; } public String getTitle() { return title; } @Override public Boolean isForEndUserDisplay() { return null; } public Collection<BaseContent> getContentBeans() { if (contentBeans != null) { return contentBeans.values(); } return null; } public Collection<String> getTopic() { return topics; } public void setContentBeans(Collection<BaseContent> _contentBeans) { for (BaseContent bean : _contentBeans) { contentBeans.put(bean.getContentType(), bean); } Collection<BaseContent> topics = contentBeans.get(ContentType.TOPICS); if (topics.size() > 0) { this.topics = new ArrayList<String>(); for (BaseContent topic : topics) { this.topics.add(topic.getName()); } } else { this.topics = Arrays.asList("NA"); } } public int getEntitySize() { return this.getContentBeans().size(); } public Map<ContentType, Collection<BaseContent>> getContentBeansByType() { return contentBeans.asMap(); } public TreeMultimap<ContentType, BaseContent> getRelevantBeans(int _numResult) { return getTopNRelevantBeans(_numResult); } public TreeMultimap<ContentType, BaseContent> getRelevantBeans() { return getTopNRelevantBeans(2); } private TreeMultimap<ContentType, BaseContent> getTopNRelevantBeans(int _numResult) { TreeMultimap<ContentType, BaseContent> retVal = TreeMultimap.create(ComparatorUtils.NATURAL_COMPARATOR, BaseContent.CONTENT_COMPARATOR); for (Map.Entry<ContentType, Collection<BaseContent>> beansByType : getContentBeansByType().entrySet()) { List<BaseContent> sortedValues = new ArrayList<BaseContent>(beansByType.getValue()); Collections.sort(sortedValues, (Comparator<? super BaseContent>) BaseContent.CONTENT_COMPARATOR); if (sortedValues.size() > _numResult) { sortedValues.subList(_numResult, sortedValues.size()).clear(); // trim to top 2 from each kind } retVal.putAll(beansByType.getKey(), sortedValues); } return retVal; } public static class DocumentSerDe implements JsonSerializer<DocumentBean>, JsonDeserializer<DocumentBean> { @Override public JsonElement serialize(final DocumentBean bean, Type type, JsonSerializationContext jsonSerializationContext) { JsonObject root = new JsonObject(); root.addProperty("docId", bean.docId); root.addProperty("title", bean.title); root.addProperty("md5sum", bean.contentMD5Sum); root.addProperty("docBody", bean.document); final JsonArray jsonTopicsArray = new JsonArray(); for (final String topic : bean.getTopic()) { final JsonPrimitive topicPrimitive = new JsonPrimitive(topic); jsonTopicsArray.add(topicPrimitive); } root.add("topics", jsonTopicsArray); root.addProperty("docNum", bean.documentNumber); final JsonArray jsonContentBeanArray = new JsonArray(); for (Map.Entry<ContentType, Collection<BaseContent>> entry : bean.getContentBeansByType().entrySet()) { for (final BaseContent contentBean : entry.getValue()) { try { jsonContentBeanArray.add(new JsonPrimitive(new Gson().toJson(contentBean))); } catch (UnsupportedOperationException e) {} } } root.add("contentBeans", jsonContentBeanArray); return root; } @Override public DocumentBean deserialize(final JsonElement jsonElement, final Type type, final JsonDeserializationContext jsonDeserializationContext) throws JsonParseException { DocumentBean bean = new DocumentBean(); final JsonObject root = jsonElement.getAsJsonObject(); bean.docId = root.get("docId").getAsString(); bean.title = root.get("title").getAsString(); bean.contentMD5Sum = root.get("md5sum").getAsString(); final Collection<String> topics = new ArrayList<String>(); final JsonArray jsonTopicsArray = root.get("topics").getAsJsonArray(); for (final JsonElement _jsonElement : jsonTopicsArray) { topics.add(_jsonElement.getAsString()); } bean.topics = topics; bean.documentNumber = root.get("docNum").getAsLong(); bean.document = root.get("docBody").getAsString(); final JsonArray jsonContentBeanArray = root.get("contentBeans").getAsJsonArray(); HashMultimap<ContentType, BaseContent> contentBeans = null; if (jsonContentBeanArray.size() > 0) { contentBeans = HashMultimap.create(); for (final JsonElement _jsonElement : jsonContentBeanArray) { Map content = new Gson().fromJson(_jsonElement.getAsString(), Map.class); try { BaseContent baseContent = BaseContent.getChildInstance(content); contentBeans.put(baseContent.getContentType(), baseContent); } catch (Exception e) {} } } bean.contentBeans = contentBeans; return bean; } } }
Refactored the DocumentBean to populate the contentBeans property with top2 values per entityType.
src/main/java/com/brajagopal/rmend/data/beans/DocumentBean.java
Refactored the DocumentBean to populate the contentBeans property with top2 values per entityType.
Java
apache-2.0
c332d9a3d4d9485cfe60fb8dd9db7dbde971642c
0
mrjoel/gitblit,mystygage/gitblit,gitblit/gitblit,gitblit/gitblit,mrjoel/gitblit,two-ack/gitblit,fuero/gitblit,two-ack/gitblit,vitalif/gitblit,lucamilanesio/gitblit,fzs/gitblit,vitalif/gitblit,vitalif/gitblit,lucamilanesio/gitblit,mrjoel/gitblit,mystygage/gitblit,RainerW/gitblit,RainerW/gitblit,two-ack/gitblit,gitblit/gitblit,mystygage/gitblit,two-ack/gitblit,mystygage/gitblit,lucamilanesio/gitblit,RainerW/gitblit,mrjoel/gitblit,fzs/gitblit,paulsputer/gitblit,fuero/gitblit,paulsputer/gitblit,fuero/gitblit,paulsputer/gitblit,two-ack/gitblit,paladox/gitblit,paladox/gitblit,paladox/gitblit,gitblit/gitblit,vitalif/gitblit,RainerW/gitblit,mystygage/gitblit,lucamilanesio/gitblit,RainerW/gitblit,paladox/gitblit,mrjoel/gitblit,fzs/gitblit,paladox/gitblit,fzs/gitblit,paulsputer/gitblit,gitblit/gitblit,fuero/gitblit,fuero/gitblit,vitalif/gitblit,paulsputer/gitblit,lucamilanesio/gitblit,fzs/gitblit
/* * Copyright 2011 gitblit.com. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gitblit.wicket.pages; import java.io.Serializable; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.wicket.Component; import org.apache.wicket.PageParameters; import org.apache.wicket.RestartResponseException; import org.apache.wicket.behavior.SimpleAttributeModifier; import org.apache.wicket.markup.html.basic.Label; import org.apache.wicket.markup.html.form.DropDownChoice; import org.apache.wicket.markup.html.form.TextField; import org.apache.wicket.markup.html.link.ExternalLink; import org.apache.wicket.markup.html.panel.Fragment; import org.apache.wicket.model.IModel; import org.apache.wicket.model.Model; import org.apache.wicket.request.target.basic.RedirectRequestTarget; import org.eclipse.jgit.diff.DiffEntry.ChangeType; import org.eclipse.jgit.lib.PersonIdent; import org.eclipse.jgit.lib.Repository; import org.eclipse.jgit.revwalk.RevCommit; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.gitblit.Constants; import com.gitblit.GitBlitException; import com.gitblit.Keys; import com.gitblit.extensions.RepositoryNavLinkExtension; import com.gitblit.models.NavLink; import com.gitblit.models.NavLink.ExternalNavLink; import com.gitblit.models.NavLink.PageNavLink; import com.gitblit.models.ProjectModel; import com.gitblit.models.RefModel; import com.gitblit.models.RepositoryModel; import com.gitblit.models.SubmoduleModel; import com.gitblit.models.UserModel; import com.gitblit.models.UserRepositoryPreferences; import com.gitblit.servlet.PagesServlet; import com.gitblit.servlet.SyndicationServlet; import com.gitblit.utils.ArrayUtils; import com.gitblit.utils.BugtraqProcessor; import com.gitblit.utils.DeepCopier; import com.gitblit.utils.JGitUtils; import com.gitblit.utils.ModelUtils; import com.gitblit.utils.RefLogUtils; import com.gitblit.utils.StringUtils; import com.gitblit.wicket.CacheControl; import com.gitblit.wicket.GitBlitWebSession; import com.gitblit.wicket.SessionlessForm; import com.gitblit.wicket.WicketUtils; import com.gitblit.wicket.panels.LinkPanel; import com.gitblit.wicket.panels.NavigationPanel; import com.gitblit.wicket.panels.RefsPanel; import com.google.common.base.Optional; public abstract class RepositoryPage extends RootPage { protected final Logger logger = LoggerFactory.getLogger(getClass()); private final String PARAM_STAR = "star"; protected final String projectName; protected final String repositoryName; protected final String objectId; private transient Repository r; private RepositoryModel m; private Map<String, SubmoduleModel> submodules; private boolean showAdmin; private boolean isOwner; public RepositoryPage(PageParameters params) { super(params); repositoryName = WicketUtils.getRepositoryName(params); String root = StringUtils.getFirstPathElement(repositoryName); if (StringUtils.isEmpty(root)) { projectName = app().settings().getString(Keys.web.repositoryRootGroupName, "main"); } else { projectName = root; } objectId = WicketUtils.getObject(params); if (StringUtils.isEmpty(repositoryName)) { error(MessageFormat.format(getString("gb.repositoryNotSpecifiedFor"), getPageName()), true); } if (!getRepositoryModel().hasCommits && getClass() != EmptyRepositoryPage.class) { throw new RestartResponseException(EmptyRepositoryPage.class, params); } if (getRepositoryModel().isCollectingGarbage) { error(MessageFormat.format(getString("gb.busyCollectingGarbage"), getRepositoryModel().name), true); } if (objectId != null) { RefModel branch = null; if ((branch = JGitUtils.getBranch(getRepository(), objectId)) != null) { UserModel user = GitBlitWebSession.get().getUser(); if (user == null) { // workaround until get().getUser() is reviewed throughout the app user = UserModel.ANONYMOUS; } boolean canAccess = user.canView(getRepositoryModel(), branch.reference.getName()); if (!canAccess) { error(getString("gb.accessDenied"), true); } } } if (params.containsKey(PARAM_STAR)) { // set starred state boolean star = params.getBoolean(PARAM_STAR); UserModel user = GitBlitWebSession.get().getUser(); if (user != null && user.isAuthenticated) { UserRepositoryPreferences prefs = user.getPreferences().getRepositoryPreferences(getRepositoryModel().name); prefs.starred = star; try { app().gitblit().reviseUser(user.username, user); } catch (GitBlitException e) { logger.error("Failed to update user " + user.username, e); error(getString("gb.failedToUpdateUser"), false); } } } showAdmin = false; if (app().settings().getBoolean(Keys.web.authenticateAdminPages, true)) { boolean allowAdmin = app().settings().getBoolean(Keys.web.allowAdministration, false); showAdmin = allowAdmin && GitBlitWebSession.get().canAdmin(); } else { showAdmin = app().settings().getBoolean(Keys.web.allowAdministration, false); } isOwner = GitBlitWebSession.get().isLoggedIn() && (getRepositoryModel().isOwner(GitBlitWebSession.get().getUsername())); // register the available navigation links for this page and user List<NavLink> navLinks = registerNavLinks(); // standard navigation links NavigationPanel navigationPanel = new NavigationPanel("repositoryNavPanel", getRepoNavPageClass(), navLinks); add(navigationPanel); add(new ExternalLink("syndication", SyndicationServlet.asLink(getRequest() .getRelativePathPrefixToContextRoot(), getRepositoryName(), null, 0))); // add floating search form SearchForm searchForm = new SearchForm("searchForm", getRepositoryName()); add(searchForm); searchForm.setTranslatedAttributes(); // set stateless page preference setStatelessHint(true); } @Override protected Class<? extends BasePage> getRootNavPageClass() { return RepositoriesPage.class; } protected Class<? extends BasePage> getRepoNavPageClass() { return getClass(); } protected BugtraqProcessor bugtraqProcessor() { return new BugtraqProcessor(app().settings()); } private List<NavLink> registerNavLinks() { Repository r = getRepository(); RepositoryModel model = getRepositoryModel(); PageParameters params = null; PageParameters objectParams = null; if (!StringUtils.isEmpty(repositoryName)) { params = WicketUtils.newRepositoryParameter(getRepositoryName()); objectParams = params; // preserve the objectid iff the objectid directly (or indirectly) refers to a ref if (isCommitPage() && !StringUtils.isEmpty(objectId)) { RevCommit commit = JGitUtils.getCommit(r, objectId); if (commit != null) { String bestId = getBestCommitId(commit); if (!commit.getName().equals(bestId)) { objectParams = WicketUtils.newObjectParameter(getRepositoryName(), bestId); } } } } List<NavLink> navLinks = new ArrayList<NavLink>(); // standard links if (RefLogUtils.getRefLogBranch(r) == null) { navLinks.add(new PageNavLink("gb.summary", SummaryPage.class, params)); } else { navLinks.add(new PageNavLink("gb.summary", SummaryPage.class, params)); // pages.put("overview", new PageRegistration("gb.overview", OverviewPage.class, params)); navLinks.add(new PageNavLink("gb.reflog", ReflogPage.class, params)); } if (!model.hasCommits) { return navLinks; } navLinks.add(new PageNavLink("gb.commits", LogPage.class, objectParams)); navLinks.add(new PageNavLink("gb.tree", TreePage.class, objectParams)); if (app().tickets().isReady() && (app().tickets().isAcceptingNewTickets(model) || app().tickets().hasTickets(model))) { PageParameters tParams = WicketUtils.newOpenTicketsParameter(getRepositoryName()); navLinks.add(new PageNavLink("gb.tickets", TicketsPage.class, tParams)); } navLinks.add(new PageNavLink("gb.docs", DocsPage.class, objectParams, true)); if (app().settings().getBoolean(Keys.web.allowForking, true)) { navLinks.add(new PageNavLink("gb.forks", ForksPage.class, params, true)); } navLinks.add(new PageNavLink("gb.compare", ComparePage.class, params, true)); // conditional links // per-repository extra navlinks if (JGitUtils.getPagesBranch(r) != null) { ExternalNavLink pagesLink = new ExternalNavLink("gb.pages", PagesServlet.asLink( getRequest().getRelativePathPrefixToContextRoot(), getRepositoryName(), null), true); navLinks.add(pagesLink); } UserModel user = UserModel.ANONYMOUS; if (GitBlitWebSession.get().isLoggedIn()) { user = GitBlitWebSession.get().getUser(); } // add repository nav link extensions List<RepositoryNavLinkExtension> extensions = app().plugins().getExtensions(RepositoryNavLinkExtension.class); for (RepositoryNavLinkExtension ext : extensions) { navLinks.addAll(ext.getNavLinks(user, model)); } return navLinks; } protected boolean allowForkControls() { return app().settings().getBoolean(Keys.web.allowForking, true); } @Override protected void setupPage(String repositoryName, String pageName) { String projectName = StringUtils.getFirstPathElement(repositoryName); ProjectModel project = app().projects().getProjectModel(projectName); if (project.isUserProject()) { // user-as-project add(new LinkPanel("projectTitle", null, project.getDisplayName(), UserPage.class, WicketUtils.newUsernameParameter(project.name.substring(1)))); } else { // project add(new LinkPanel("projectTitle", null, project.name, ProjectPage.class, WicketUtils.newProjectParameter(project.name))); } String name = StringUtils.stripDotGit(repositoryName); if (!StringUtils.isEmpty(projectName) && name.startsWith(projectName)) { name = name.substring(projectName.length() + 1); } add(new LinkPanel("repositoryName", null, name, SummaryPage.class, WicketUtils.newRepositoryParameter(repositoryName))); UserModel user = GitBlitWebSession.get().getUser(); if (user == null) { user = UserModel.ANONYMOUS; } // indicate origin repository RepositoryModel model = getRepositoryModel(); if (StringUtils.isEmpty(model.originRepository)) { if (model.isMirror) { add(new Fragment("repoIcon", "mirrorIconFragment", this)); Fragment mirrorFrag = new Fragment("originRepository", "mirrorFragment", this); Label lbl = new Label("originRepository", MessageFormat.format(getString("gb.mirrorOf"), "<b>" + model.origin + "</b>")); mirrorFrag.add(lbl.setEscapeModelStrings(false)); add(mirrorFrag); } else { if (model.isBare) { add(new Fragment("repoIcon", "repoIconFragment", this)); } else { add(new Fragment("repoIcon", "cloneIconFragment", this)); } add(new Label("originRepository", Optional.of(model.description).or(""))); } } else { RepositoryModel origin = app().repositories().getRepositoryModel(model.originRepository); if (origin == null) { // no origin repository, show description if available if (model.isBare) { add(new Fragment("repoIcon", "repoIconFragment", this)); } else { add(new Fragment("repoIcon", "cloneIconFragment", this)); } add(new Label("originRepository", Optional.of(model.description).or(""))); } else if (!user.canView(origin)) { // show origin repository without link add(new Fragment("repoIcon", "forkIconFragment", this)); Fragment forkFrag = new Fragment("originRepository", "originFragment", this); forkFrag.add(new Label("originRepository", StringUtils.stripDotGit(model.originRepository))); add(forkFrag); } else { // link to origin repository add(new Fragment("repoIcon", "forkIconFragment", this)); Fragment forkFrag = new Fragment("originRepository", "originFragment", this); forkFrag.add(new LinkPanel("originRepository", null, StringUtils.stripDotGit(model.originRepository), SummaryPage.class, WicketUtils.newRepositoryParameter(model.originRepository))); add(forkFrag); } } // new ticket button if (user.isAuthenticated && app().tickets().isAcceptingNewTickets(getRepositoryModel())) { String newTicketUrl = getRequestCycle().urlFor(NewTicketPage.class, WicketUtils.newRepositoryParameter(repositoryName)).toString(); addToolbarButton("newTicketLink", "fa fa-ticket", getString("gb.new"), newTicketUrl); } else { add(new Label("newTicketLink").setVisible(false)); } // (un)star link allows a user to star a repository if (user.isAuthenticated && model.hasCommits) { PageParameters starParams = DeepCopier.copy(getPageParameters()); starParams.put(PARAM_STAR, !user.getPreferences().isStarredRepository(model.name)); String toggleStarUrl = getRequestCycle().urlFor(getClass(), starParams).toString(); if (user.getPreferences().isStarredRepository(model.name)) { // show unstar button add(new Label("starLink").setVisible(false)); addToolbarButton("unstarLink", "icon-star-empty", getString("gb.unstar"), toggleStarUrl); } else { // show star button addToolbarButton("starLink", "icon-star", getString("gb.star"), toggleStarUrl); add(new Label("unstarLink").setVisible(false)); } } else { // anonymous user add(new Label("starLink").setVisible(false)); add(new Label("unstarLink").setVisible(false)); } // fork controls if (!allowForkControls() || !user.isAuthenticated) { // must be logged-in to fork, hide all fork controls add(new ExternalLink("forkLink", "").setVisible(false)); add(new ExternalLink("myForkLink", "").setVisible(false)); } else { String fork = app().repositories().getFork(user.username, model.name); String userRepo = ModelUtils.getPersonalPath(user.username) + "/" + StringUtils.stripDotGit(StringUtils.getLastPathElement(model.name)); boolean hasUserRepo = app().repositories().hasRepository(userRepo); boolean hasFork = fork != null; boolean canFork = user.canFork(model) && model.hasCommits && !hasUserRepo; if (hasFork || !canFork) { // user not allowed to fork or fork already exists or repo forbids forking add(new ExternalLink("forkLink", "").setVisible(false)); if (hasFork && !fork.equals(model.name)) { // user has fork, view my fork link String url = getRequestCycle().urlFor(SummaryPage.class, WicketUtils.newRepositoryParameter(fork)).toString(); add(new ExternalLink("myForkLink", url)); } else { // no fork, hide view my fork link add(new ExternalLink("myForkLink", "").setVisible(false)); } } else if (canFork) { // can fork and we do not have one add(new ExternalLink("myForkLink", "").setVisible(false)); String url = getRequestCycle().urlFor(ForkPage.class, WicketUtils.newRepositoryParameter(model.name)).toString(); add(new ExternalLink("forkLink", url)); } } if (showAdmin || isOwner) { String url = getRequestCycle().urlFor(EditRepositoryPage.class, WicketUtils.newRepositoryParameter(model.name)).toString(); add(new ExternalLink("editLink", url)); } else { add(new Label("editLink").setVisible(false)); } super.setupPage(repositoryName, pageName); } protected void addToolbarButton(String wicketId, String iconClass, String label, String url) { Fragment button = new Fragment(wicketId, "toolbarLinkFragment", this); Label icon = new Label("icon"); WicketUtils.setCssClass(icon, iconClass); button.add(icon); button.add(new Label("label", label)); button.add(new SimpleAttributeModifier("href", url)); add(button); } protected void addSyndicationDiscoveryLink() { add(WicketUtils.syndicationDiscoveryLink(SyndicationServlet.getTitle(repositoryName, objectId), SyndicationServlet.asLink(getRequest() .getRelativePathPrefixToContextRoot(), repositoryName, objectId, 0))); } protected Repository getRepository() { if (r == null) { Repository r = app().repositories().getRepository(repositoryName); if (r == null) { error(getString("gb.canNotLoadRepository") + " " + repositoryName, true); return null; } this.r = r; } return r; } protected RepositoryModel getRepositoryModel() { if (m == null) { RepositoryModel model = app().repositories().getRepositoryModel( GitBlitWebSession.get().getUser(), repositoryName); if (model == null) { if (app().repositories().hasRepository(repositoryName, true)) { // has repository, but unauthorized authenticationError(getString("gb.unauthorizedAccessForRepository") + " " + repositoryName); } else { // does not have repository error(getString("gb.canNotLoadRepository") + " " + repositoryName, true); } return null; } m = model; } return m; } protected String getRepositoryName() { return getRepositoryModel().name; } protected RevCommit getCommit() { RevCommit commit = JGitUtils.getCommit(r, objectId); if (commit == null) { error(MessageFormat.format(getString("gb.failedToFindCommit"), objectId, repositoryName, getPageName()), null, LogPage.class, WicketUtils.newRepositoryParameter(repositoryName)); } getSubmodules(commit); return commit; } protected String getBestCommitId(RevCommit commit) { String head = null; try { head = r.resolve(getRepositoryModel().HEAD).getName(); } catch (Exception e) { } String id = commit.getName(); if (!StringUtils.isEmpty(head) && head.equals(id)) { // match default branch return Repository.shortenRefName(getRepositoryModel().HEAD); } // find first branch match for (RefModel ref : JGitUtils.getLocalBranches(r, false, -1)) { if (ref.getObjectId().getName().equals(id)) { return Repository.shortenRefName(ref.getName()); } } // return sha return id; } protected Map<String, SubmoduleModel> getSubmodules(RevCommit commit) { if (submodules == null) { submodules = new HashMap<String, SubmoduleModel>(); for (SubmoduleModel model : JGitUtils.getSubmodules(r, commit.getTree())) { submodules.put(model.path, model); } } return submodules; } protected SubmoduleModel getSubmodule(String path) { SubmoduleModel model = null; if (submodules != null) { model = submodules.get(path); } if (model == null) { // undefined submodule?! model = new SubmoduleModel(path.substring(path.lastIndexOf('/') + 1), path, path); model.hasSubmodule = false; model.gitblitPath = model.name; return model; } else { // extract the repository name from the clone url List<String> patterns = app().settings().getStrings(Keys.git.submoduleUrlPatterns); String submoduleName = StringUtils.extractRepositoryPath(model.url, patterns.toArray(new String[0])); // determine the current path for constructing paths relative // to the current repository String currentPath = ""; if (repositoryName.indexOf('/') > -1) { currentPath = repositoryName.substring(0, repositoryName.lastIndexOf('/') + 1); } // try to locate the submodule repository // prefer bare to non-bare names List<String> candidates = new ArrayList<String>(); // relative candidates.add(currentPath + StringUtils.stripDotGit(submoduleName)); candidates.add(candidates.get(candidates.size() - 1) + ".git"); // relative, no subfolder if (submoduleName.lastIndexOf('/') > -1) { String name = submoduleName.substring(submoduleName.lastIndexOf('/') + 1); candidates.add(currentPath + StringUtils.stripDotGit(name)); candidates.add(candidates.get(candidates.size() - 1) + ".git"); } // absolute candidates.add(StringUtils.stripDotGit(submoduleName)); candidates.add(candidates.get(candidates.size() - 1) + ".git"); // absolute, no subfolder if (submoduleName.lastIndexOf('/') > -1) { String name = submoduleName.substring(submoduleName.lastIndexOf('/') + 1); candidates.add(StringUtils.stripDotGit(name)); candidates.add(candidates.get(candidates.size() - 1) + ".git"); } // create a unique, ordered set of candidate paths Set<String> paths = new LinkedHashSet<String>(candidates); for (String candidate : paths) { if (app().repositories().hasRepository(candidate)) { model.hasSubmodule = true; model.gitblitPath = candidate; return model; } } // we do not have a copy of the submodule, but we need a path model.gitblitPath = candidates.get(0); return model; } } protected String getShortObjectId(String objectId) { return objectId.substring(0, app().settings().getInteger(Keys.web.shortCommitIdLength, 6)); } protected void addRefs(Repository r, RevCommit c) { add(new RefsPanel("refsPanel", repositoryName, c, JGitUtils.getAllRefs(r, getRepositoryModel().showRemoteBranches))); } protected void addFullText(String wicketId, String text) { RepositoryModel model = getRepositoryModel(); String content = bugtraqProcessor().processCommitMessage(r, model, text); String html; switch (model.commitMessageRenderer) { case MARKDOWN: String safeContent = app().xssFilter().relaxed(content); html = MessageFormat.format("<div class='commit_message'>{0}</div>", safeContent); break; default: html = MessageFormat.format("<pre class='commit_message'>{0}</pre>", content); break; } add(new Label(wicketId, html).setEscapeModelStrings(false)); } protected abstract String getPageName(); protected boolean isCommitPage() { return false; } protected Component createPersonPanel(String wicketId, PersonIdent identity, Constants.SearchType searchType) { String name = identity == null ? "" : identity.getName(); String address = identity == null ? "" : identity.getEmailAddress(); name = StringUtils.removeNewlines(name); address = StringUtils.removeNewlines(address); boolean showEmail = app().settings().getBoolean(Keys.web.showEmailAddresses, false); if (!showEmail || StringUtils.isEmpty(name) || StringUtils.isEmpty(address)) { String value = name; if (StringUtils.isEmpty(value)) { if (showEmail) { value = address; } else { value = getString("gb.missingUsername"); } } Fragment partial = new Fragment(wicketId, "partialPersonIdent", this); LinkPanel link = new LinkPanel("personName", "list", value, GitSearchPage.class, WicketUtils.newSearchParameter(repositoryName, objectId, value, searchType)); setPersonSearchTooltip(link, value, searchType); partial.add(link); return partial; } else { Fragment fullPerson = new Fragment(wicketId, "fullPersonIdent", this); LinkPanel nameLink = new LinkPanel("personName", "list", name, GitSearchPage.class, WicketUtils.newSearchParameter(repositoryName, objectId, name, searchType)); setPersonSearchTooltip(nameLink, name, searchType); fullPerson.add(nameLink); LinkPanel addressLink = new LinkPanel("personAddress", "hidden-phone list", "<" + address + ">", GitSearchPage.class, WicketUtils.newSearchParameter(repositoryName, objectId, address, searchType)); setPersonSearchTooltip(addressLink, address, searchType); fullPerson.add(addressLink); return fullPerson; } } protected void setPersonSearchTooltip(Component component, String value, Constants.SearchType searchType) { if (searchType.equals(Constants.SearchType.AUTHOR)) { WicketUtils.setHtmlTooltip(component, getString("gb.searchForAuthor") + " " + value); } else if (searchType.equals(Constants.SearchType.COMMITTER)) { WicketUtils.setHtmlTooltip(component, getString("gb.searchForCommitter") + " " + value); } } protected void setChangeTypeTooltip(Component container, ChangeType type) { switch (type) { case ADD: WicketUtils.setHtmlTooltip(container, getString("gb.addition")); break; case COPY: case RENAME: WicketUtils.setHtmlTooltip(container, getString("gb.rename")); break; case DELETE: WicketUtils.setHtmlTooltip(container, getString("gb.deletion")); break; case MODIFY: WicketUtils.setHtmlTooltip(container, getString("gb.modification")); break; } } @Override protected void onBeforeRender() { // dispose of repository object if (r != null) { r.close(); r = null; } // setup page header and footer setupPage(getRepositoryName(), "/ " + getPageName()); super.onBeforeRender(); } @Override protected void setLastModified() { if (getClass().isAnnotationPresent(CacheControl.class)) { CacheControl cacheControl = getClass().getAnnotation(CacheControl.class); switch (cacheControl.value()) { case REPOSITORY: RepositoryModel repository = getRepositoryModel(); if (repository != null) { setLastModified(repository.lastChange); } break; case COMMIT: RevCommit commit = getCommit(); if (commit != null) { Date commitDate = JGitUtils.getCommitDate(commit); setLastModified(commitDate); } break; default: super.setLastModified(); } } } protected PageParameters newRepositoryParameter() { return WicketUtils.newRepositoryParameter(repositoryName); } protected PageParameters newCommitParameter() { return WicketUtils.newObjectParameter(repositoryName, objectId); } protected PageParameters newCommitParameter(String commitId) { return WicketUtils.newObjectParameter(repositoryName, commitId); } public boolean isShowAdmin() { return showAdmin; } public boolean isOwner() { return isOwner; } private class SearchForm extends SessionlessForm<Void> implements Serializable { private static final long serialVersionUID = 1L; private final String repositoryName; private final IModel<String> searchBoxModel = new Model<String>(""); private final IModel<Constants.SearchType> searchTypeModel = new Model<Constants.SearchType>( Constants.SearchType.COMMIT); public SearchForm(String id, String repositoryName) { super(id, RepositoryPage.this.getClass(), RepositoryPage.this.getPageParameters()); this.repositoryName = repositoryName; DropDownChoice<Constants.SearchType> searchType = new DropDownChoice<Constants.SearchType>( "searchType", Arrays.asList(Constants.SearchType.values())); searchType.setModel(searchTypeModel); add(searchType.setVisible(app().settings().getBoolean(Keys.web.showSearchTypeSelection, false))); TextField<String> searchBox = new TextField<String>("searchBox", searchBoxModel); add(searchBox); } void setTranslatedAttributes() { WicketUtils.setHtmlTooltip(get("searchType"), getString("gb.searchTypeTooltip")); WicketUtils.setHtmlTooltip(get("searchBox"), MessageFormat.format(getString("gb.searchTooltip"), repositoryName)); WicketUtils.setInputPlaceholder(get("searchBox"), getString("gb.search")); } @Override public void onSubmit() { Constants.SearchType searchType = searchTypeModel.getObject(); String searchString = searchBoxModel.getObject(); if (StringUtils.isEmpty(searchString)) { // redirect to self to avoid wicket page update bug String absoluteUrl = getCanonicalUrl(); getRequestCycle().setRequestTarget(new RedirectRequestTarget(absoluteUrl)); return; } for (Constants.SearchType type : Constants.SearchType.values()) { if (searchString.toLowerCase().startsWith(type.name().toLowerCase() + ":")) { searchType = type; searchString = searchString.substring(type.name().toLowerCase().length() + 1) .trim(); break; } } Class<? extends BasePage> searchPageClass = GitSearchPage.class; RepositoryModel model = app().repositories().getRepositoryModel(repositoryName); if (app().settings().getBoolean(Keys.web.allowLuceneIndexing, true) && !ArrayUtils.isEmpty(model.indexedBranches)) { // this repository is Lucene-indexed searchPageClass = LuceneSearchPage.class; } // use an absolute url to workaround Wicket-Tomcat problems with // mounted url parameters (issue-111) PageParameters params = WicketUtils.newSearchParameter(repositoryName, null, searchString, searchType); String absoluteUrl = getCanonicalUrl(searchPageClass, params); getRequestCycle().setRequestTarget(new RedirectRequestTarget(absoluteUrl)); } } }
src/main/java/com/gitblit/wicket/pages/RepositoryPage.java
/* * Copyright 2011 gitblit.com. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gitblit.wicket.pages; import java.io.Serializable; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.wicket.Component; import org.apache.wicket.PageParameters; import org.apache.wicket.RestartResponseException; import org.apache.wicket.behavior.SimpleAttributeModifier; import org.apache.wicket.markup.html.basic.Label; import org.apache.wicket.markup.html.form.DropDownChoice; import org.apache.wicket.markup.html.form.TextField; import org.apache.wicket.markup.html.link.ExternalLink; import org.apache.wicket.markup.html.panel.Fragment; import org.apache.wicket.model.IModel; import org.apache.wicket.model.Model; import org.apache.wicket.request.target.basic.RedirectRequestTarget; import org.eclipse.jgit.diff.DiffEntry.ChangeType; import org.eclipse.jgit.lib.PersonIdent; import org.eclipse.jgit.lib.Repository; import org.eclipse.jgit.revwalk.RevCommit; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.gitblit.Constants; import com.gitblit.GitBlitException; import com.gitblit.Keys; import com.gitblit.extensions.RepositoryNavLinkExtension; import com.gitblit.models.NavLink; import com.gitblit.models.NavLink.ExternalNavLink; import com.gitblit.models.NavLink.PageNavLink; import com.gitblit.models.ProjectModel; import com.gitblit.models.RefModel; import com.gitblit.models.RepositoryModel; import com.gitblit.models.SubmoduleModel; import com.gitblit.models.UserModel; import com.gitblit.models.UserRepositoryPreferences; import com.gitblit.servlet.PagesServlet; import com.gitblit.servlet.SyndicationServlet; import com.gitblit.utils.ArrayUtils; import com.gitblit.utils.BugtraqProcessor; import com.gitblit.utils.DeepCopier; import com.gitblit.utils.JGitUtils; import com.gitblit.utils.RefLogUtils; import com.gitblit.utils.StringUtils; import com.gitblit.wicket.CacheControl; import com.gitblit.wicket.GitBlitWebSession; import com.gitblit.wicket.SessionlessForm; import com.gitblit.wicket.WicketUtils; import com.gitblit.wicket.panels.LinkPanel; import com.gitblit.wicket.panels.NavigationPanel; import com.gitblit.wicket.panels.RefsPanel; import com.google.common.base.Optional; public abstract class RepositoryPage extends RootPage { protected final Logger logger = LoggerFactory.getLogger(getClass()); private final String PARAM_STAR = "star"; protected final String projectName; protected final String repositoryName; protected final String objectId; private transient Repository r; private RepositoryModel m; private Map<String, SubmoduleModel> submodules; private boolean showAdmin; private boolean isOwner; public RepositoryPage(PageParameters params) { super(params); repositoryName = WicketUtils.getRepositoryName(params); String root = StringUtils.getFirstPathElement(repositoryName); if (StringUtils.isEmpty(root)) { projectName = app().settings().getString(Keys.web.repositoryRootGroupName, "main"); } else { projectName = root; } objectId = WicketUtils.getObject(params); if (StringUtils.isEmpty(repositoryName)) { error(MessageFormat.format(getString("gb.repositoryNotSpecifiedFor"), getPageName()), true); } if (!getRepositoryModel().hasCommits && getClass() != EmptyRepositoryPage.class) { throw new RestartResponseException(EmptyRepositoryPage.class, params); } if (getRepositoryModel().isCollectingGarbage) { error(MessageFormat.format(getString("gb.busyCollectingGarbage"), getRepositoryModel().name), true); } if (objectId != null) { RefModel branch = null; if ((branch = JGitUtils.getBranch(getRepository(), objectId)) != null) { UserModel user = GitBlitWebSession.get().getUser(); if (user == null) { // workaround until get().getUser() is reviewed throughout the app user = UserModel.ANONYMOUS; } boolean canAccess = user.canView(getRepositoryModel(), branch.reference.getName()); if (!canAccess) { error(getString("gb.accessDenied"), true); } } } if (params.containsKey(PARAM_STAR)) { // set starred state boolean star = params.getBoolean(PARAM_STAR); UserModel user = GitBlitWebSession.get().getUser(); if (user != null && user.isAuthenticated) { UserRepositoryPreferences prefs = user.getPreferences().getRepositoryPreferences(getRepositoryModel().name); prefs.starred = star; try { app().gitblit().reviseUser(user.username, user); } catch (GitBlitException e) { logger.error("Failed to update user " + user.username, e); error(getString("gb.failedToUpdateUser"), false); } } } showAdmin = false; if (app().settings().getBoolean(Keys.web.authenticateAdminPages, true)) { boolean allowAdmin = app().settings().getBoolean(Keys.web.allowAdministration, false); showAdmin = allowAdmin && GitBlitWebSession.get().canAdmin(); } else { showAdmin = app().settings().getBoolean(Keys.web.allowAdministration, false); } isOwner = GitBlitWebSession.get().isLoggedIn() && (getRepositoryModel().isOwner(GitBlitWebSession.get().getUsername())); // register the available navigation links for this page and user List<NavLink> navLinks = registerNavLinks(); // standard navigation links NavigationPanel navigationPanel = new NavigationPanel("repositoryNavPanel", getRepoNavPageClass(), navLinks); add(navigationPanel); add(new ExternalLink("syndication", SyndicationServlet.asLink(getRequest() .getRelativePathPrefixToContextRoot(), getRepositoryName(), null, 0))); // add floating search form SearchForm searchForm = new SearchForm("searchForm", getRepositoryName()); add(searchForm); searchForm.setTranslatedAttributes(); // set stateless page preference setStatelessHint(true); } @Override protected Class<? extends BasePage> getRootNavPageClass() { return RepositoriesPage.class; } protected Class<? extends BasePage> getRepoNavPageClass() { return getClass(); } protected BugtraqProcessor bugtraqProcessor() { return new BugtraqProcessor(app().settings()); } private List<NavLink> registerNavLinks() { Repository r = getRepository(); RepositoryModel model = getRepositoryModel(); PageParameters params = null; PageParameters objectParams = null; if (!StringUtils.isEmpty(repositoryName)) { params = WicketUtils.newRepositoryParameter(getRepositoryName()); objectParams = params; // preserve the objectid iff the objectid directly (or indirectly) refers to a ref if (isCommitPage() && !StringUtils.isEmpty(objectId)) { RevCommit commit = JGitUtils.getCommit(r, objectId); if (commit != null) { String bestId = getBestCommitId(commit); if (!commit.getName().equals(bestId)) { objectParams = WicketUtils.newObjectParameter(getRepositoryName(), bestId); } } } } List<NavLink> navLinks = new ArrayList<NavLink>(); // standard links if (RefLogUtils.getRefLogBranch(r) == null) { navLinks.add(new PageNavLink("gb.summary", SummaryPage.class, params)); } else { navLinks.add(new PageNavLink("gb.summary", SummaryPage.class, params)); // pages.put("overview", new PageRegistration("gb.overview", OverviewPage.class, params)); navLinks.add(new PageNavLink("gb.reflog", ReflogPage.class, params)); } if (!model.hasCommits) { return navLinks; } navLinks.add(new PageNavLink("gb.commits", LogPage.class, objectParams)); navLinks.add(new PageNavLink("gb.tree", TreePage.class, objectParams)); if (app().tickets().isReady() && (app().tickets().isAcceptingNewTickets(model) || app().tickets().hasTickets(model))) { PageParameters tParams = WicketUtils.newOpenTicketsParameter(getRepositoryName()); navLinks.add(new PageNavLink("gb.tickets", TicketsPage.class, tParams)); } navLinks.add(new PageNavLink("gb.docs", DocsPage.class, objectParams, true)); if (app().settings().getBoolean(Keys.web.allowForking, true)) { navLinks.add(new PageNavLink("gb.forks", ForksPage.class, params, true)); } navLinks.add(new PageNavLink("gb.compare", ComparePage.class, params, true)); // conditional links // per-repository extra navlinks if (JGitUtils.getPagesBranch(r) != null) { ExternalNavLink pagesLink = new ExternalNavLink("gb.pages", PagesServlet.asLink( getRequest().getRelativePathPrefixToContextRoot(), getRepositoryName(), null), true); navLinks.add(pagesLink); } UserModel user = UserModel.ANONYMOUS; if (GitBlitWebSession.get().isLoggedIn()) { user = GitBlitWebSession.get().getUser(); } // add repository nav link extensions List<RepositoryNavLinkExtension> extensions = app().plugins().getExtensions(RepositoryNavLinkExtension.class); for (RepositoryNavLinkExtension ext : extensions) { navLinks.addAll(ext.getNavLinks(user, model)); } return navLinks; } protected boolean allowForkControls() { return app().settings().getBoolean(Keys.web.allowForking, true); } @Override protected void setupPage(String repositoryName, String pageName) { String projectName = StringUtils.getFirstPathElement(repositoryName); ProjectModel project = app().projects().getProjectModel(projectName); if (project.isUserProject()) { // user-as-project add(new LinkPanel("projectTitle", null, project.getDisplayName(), UserPage.class, WicketUtils.newUsernameParameter(project.name.substring(1)))); } else { // project add(new LinkPanel("projectTitle", null, project.name, ProjectPage.class, WicketUtils.newProjectParameter(project.name))); } String name = StringUtils.stripDotGit(repositoryName); if (!StringUtils.isEmpty(projectName) && name.startsWith(projectName)) { name = name.substring(projectName.length() + 1); } add(new LinkPanel("repositoryName", null, name, SummaryPage.class, WicketUtils.newRepositoryParameter(repositoryName))); UserModel user = GitBlitWebSession.get().getUser(); if (user == null) { user = UserModel.ANONYMOUS; } // indicate origin repository RepositoryModel model = getRepositoryModel(); if (StringUtils.isEmpty(model.originRepository)) { if (model.isMirror) { add(new Fragment("repoIcon", "mirrorIconFragment", this)); Fragment mirrorFrag = new Fragment("originRepository", "mirrorFragment", this); Label lbl = new Label("originRepository", MessageFormat.format(getString("gb.mirrorOf"), "<b>" + model.origin + "</b>")); mirrorFrag.add(lbl.setEscapeModelStrings(false)); add(mirrorFrag); } else { if (model.isBare) { add(new Fragment("repoIcon", "repoIconFragment", this)); } else { add(new Fragment("repoIcon", "cloneIconFragment", this)); } add(new Label("originRepository", Optional.of(model.description).or(""))); } } else { RepositoryModel origin = app().repositories().getRepositoryModel(model.originRepository); if (origin == null) { // no origin repository, show description if available if (model.isBare) { add(new Fragment("repoIcon", "repoIconFragment", this)); } else { add(new Fragment("repoIcon", "cloneIconFragment", this)); } add(new Label("originRepository", Optional.of(model.description).or(""))); } else if (!user.canView(origin)) { // show origin repository without link add(new Fragment("repoIcon", "forkIconFragment", this)); Fragment forkFrag = new Fragment("originRepository", "originFragment", this); forkFrag.add(new Label("originRepository", StringUtils.stripDotGit(model.originRepository))); add(forkFrag); } else { // link to origin repository add(new Fragment("repoIcon", "forkIconFragment", this)); Fragment forkFrag = new Fragment("originRepository", "originFragment", this); forkFrag.add(new LinkPanel("originRepository", null, StringUtils.stripDotGit(model.originRepository), SummaryPage.class, WicketUtils.newRepositoryParameter(model.originRepository))); add(forkFrag); } } // new ticket button if (user.isAuthenticated && app().tickets().isAcceptingNewTickets(getRepositoryModel())) { String newTicketUrl = getRequestCycle().urlFor(NewTicketPage.class, WicketUtils.newRepositoryParameter(repositoryName)).toString(); addToolbarButton("newTicketLink", "fa fa-ticket", getString("gb.new"), newTicketUrl); } else { add(new Label("newTicketLink").setVisible(false)); } // (un)star link allows a user to star a repository if (user.isAuthenticated && model.hasCommits) { PageParameters starParams = DeepCopier.copy(getPageParameters()); starParams.put(PARAM_STAR, !user.getPreferences().isStarredRepository(model.name)); String toggleStarUrl = getRequestCycle().urlFor(getClass(), starParams).toString(); if (user.getPreferences().isStarredRepository(model.name)) { // show unstar button add(new Label("starLink").setVisible(false)); addToolbarButton("unstarLink", "icon-star-empty", getString("gb.unstar"), toggleStarUrl); } else { // show star button addToolbarButton("starLink", "icon-star", getString("gb.star"), toggleStarUrl); add(new Label("unstarLink").setVisible(false)); } } else { // anonymous user add(new Label("starLink").setVisible(false)); add(new Label("unstarLink").setVisible(false)); } // fork controls if (!allowForkControls() || !user.isAuthenticated) { // must be logged-in to fork, hide all fork controls add(new ExternalLink("forkLink", "").setVisible(false)); add(new ExternalLink("myForkLink", "").setVisible(false)); } else { String fork = app().repositories().getFork(user.username, model.name); boolean hasFork = fork != null; boolean canFork = user.canFork(model) && model.hasCommits; if (hasFork || !canFork) { // user not allowed to fork or fork already exists or repo forbids forking add(new ExternalLink("forkLink", "").setVisible(false)); if (hasFork && !fork.equals(model.name)) { // user has fork, view my fork link String url = getRequestCycle().urlFor(SummaryPage.class, WicketUtils.newRepositoryParameter(fork)).toString(); add(new ExternalLink("myForkLink", url)); } else { // no fork, hide view my fork link add(new ExternalLink("myForkLink", "").setVisible(false)); } } else if (canFork) { // can fork and we do not have one add(new ExternalLink("myForkLink", "").setVisible(false)); String url = getRequestCycle().urlFor(ForkPage.class, WicketUtils.newRepositoryParameter(model.name)).toString(); add(new ExternalLink("forkLink", url)); } } if (showAdmin || isOwner) { String url = getRequestCycle().urlFor(EditRepositoryPage.class, WicketUtils.newRepositoryParameter(model.name)).toString(); add(new ExternalLink("editLink", url)); } else { add(new Label("editLink").setVisible(false)); } super.setupPage(repositoryName, pageName); } protected void addToolbarButton(String wicketId, String iconClass, String label, String url) { Fragment button = new Fragment(wicketId, "toolbarLinkFragment", this); Label icon = new Label("icon"); WicketUtils.setCssClass(icon, iconClass); button.add(icon); button.add(new Label("label", label)); button.add(new SimpleAttributeModifier("href", url)); add(button); } protected void addSyndicationDiscoveryLink() { add(WicketUtils.syndicationDiscoveryLink(SyndicationServlet.getTitle(repositoryName, objectId), SyndicationServlet.asLink(getRequest() .getRelativePathPrefixToContextRoot(), repositoryName, objectId, 0))); } protected Repository getRepository() { if (r == null) { Repository r = app().repositories().getRepository(repositoryName); if (r == null) { error(getString("gb.canNotLoadRepository") + " " + repositoryName, true); return null; } this.r = r; } return r; } protected RepositoryModel getRepositoryModel() { if (m == null) { RepositoryModel model = app().repositories().getRepositoryModel( GitBlitWebSession.get().getUser(), repositoryName); if (model == null) { if (app().repositories().hasRepository(repositoryName, true)) { // has repository, but unauthorized authenticationError(getString("gb.unauthorizedAccessForRepository") + " " + repositoryName); } else { // does not have repository error(getString("gb.canNotLoadRepository") + " " + repositoryName, true); } return null; } m = model; } return m; } protected String getRepositoryName() { return getRepositoryModel().name; } protected RevCommit getCommit() { RevCommit commit = JGitUtils.getCommit(r, objectId); if (commit == null) { error(MessageFormat.format(getString("gb.failedToFindCommit"), objectId, repositoryName, getPageName()), null, LogPage.class, WicketUtils.newRepositoryParameter(repositoryName)); } getSubmodules(commit); return commit; } protected String getBestCommitId(RevCommit commit) { String head = null; try { head = r.resolve(getRepositoryModel().HEAD).getName(); } catch (Exception e) { } String id = commit.getName(); if (!StringUtils.isEmpty(head) && head.equals(id)) { // match default branch return Repository.shortenRefName(getRepositoryModel().HEAD); } // find first branch match for (RefModel ref : JGitUtils.getLocalBranches(r, false, -1)) { if (ref.getObjectId().getName().equals(id)) { return Repository.shortenRefName(ref.getName()); } } // return sha return id; } protected Map<String, SubmoduleModel> getSubmodules(RevCommit commit) { if (submodules == null) { submodules = new HashMap<String, SubmoduleModel>(); for (SubmoduleModel model : JGitUtils.getSubmodules(r, commit.getTree())) { submodules.put(model.path, model); } } return submodules; } protected SubmoduleModel getSubmodule(String path) { SubmoduleModel model = null; if (submodules != null) { model = submodules.get(path); } if (model == null) { // undefined submodule?! model = new SubmoduleModel(path.substring(path.lastIndexOf('/') + 1), path, path); model.hasSubmodule = false; model.gitblitPath = model.name; return model; } else { // extract the repository name from the clone url List<String> patterns = app().settings().getStrings(Keys.git.submoduleUrlPatterns); String submoduleName = StringUtils.extractRepositoryPath(model.url, patterns.toArray(new String[0])); // determine the current path for constructing paths relative // to the current repository String currentPath = ""; if (repositoryName.indexOf('/') > -1) { currentPath = repositoryName.substring(0, repositoryName.lastIndexOf('/') + 1); } // try to locate the submodule repository // prefer bare to non-bare names List<String> candidates = new ArrayList<String>(); // relative candidates.add(currentPath + StringUtils.stripDotGit(submoduleName)); candidates.add(candidates.get(candidates.size() - 1) + ".git"); // relative, no subfolder if (submoduleName.lastIndexOf('/') > -1) { String name = submoduleName.substring(submoduleName.lastIndexOf('/') + 1); candidates.add(currentPath + StringUtils.stripDotGit(name)); candidates.add(candidates.get(candidates.size() - 1) + ".git"); } // absolute candidates.add(StringUtils.stripDotGit(submoduleName)); candidates.add(candidates.get(candidates.size() - 1) + ".git"); // absolute, no subfolder if (submoduleName.lastIndexOf('/') > -1) { String name = submoduleName.substring(submoduleName.lastIndexOf('/') + 1); candidates.add(StringUtils.stripDotGit(name)); candidates.add(candidates.get(candidates.size() - 1) + ".git"); } // create a unique, ordered set of candidate paths Set<String> paths = new LinkedHashSet<String>(candidates); for (String candidate : paths) { if (app().repositories().hasRepository(candidate)) { model.hasSubmodule = true; model.gitblitPath = candidate; return model; } } // we do not have a copy of the submodule, but we need a path model.gitblitPath = candidates.get(0); return model; } } protected String getShortObjectId(String objectId) { return objectId.substring(0, app().settings().getInteger(Keys.web.shortCommitIdLength, 6)); } protected void addRefs(Repository r, RevCommit c) { add(new RefsPanel("refsPanel", repositoryName, c, JGitUtils.getAllRefs(r, getRepositoryModel().showRemoteBranches))); } protected void addFullText(String wicketId, String text) { RepositoryModel model = getRepositoryModel(); String content = bugtraqProcessor().processCommitMessage(r, model, text); String html; switch (model.commitMessageRenderer) { case MARKDOWN: String safeContent = app().xssFilter().relaxed(content); html = MessageFormat.format("<div class='commit_message'>{0}</div>", safeContent); break; default: html = MessageFormat.format("<pre class='commit_message'>{0}</pre>", content); break; } add(new Label(wicketId, html).setEscapeModelStrings(false)); } protected abstract String getPageName(); protected boolean isCommitPage() { return false; } protected Component createPersonPanel(String wicketId, PersonIdent identity, Constants.SearchType searchType) { String name = identity == null ? "" : identity.getName(); String address = identity == null ? "" : identity.getEmailAddress(); name = StringUtils.removeNewlines(name); address = StringUtils.removeNewlines(address); boolean showEmail = app().settings().getBoolean(Keys.web.showEmailAddresses, false); if (!showEmail || StringUtils.isEmpty(name) || StringUtils.isEmpty(address)) { String value = name; if (StringUtils.isEmpty(value)) { if (showEmail) { value = address; } else { value = getString("gb.missingUsername"); } } Fragment partial = new Fragment(wicketId, "partialPersonIdent", this); LinkPanel link = new LinkPanel("personName", "list", value, GitSearchPage.class, WicketUtils.newSearchParameter(repositoryName, objectId, value, searchType)); setPersonSearchTooltip(link, value, searchType); partial.add(link); return partial; } else { Fragment fullPerson = new Fragment(wicketId, "fullPersonIdent", this); LinkPanel nameLink = new LinkPanel("personName", "list", name, GitSearchPage.class, WicketUtils.newSearchParameter(repositoryName, objectId, name, searchType)); setPersonSearchTooltip(nameLink, name, searchType); fullPerson.add(nameLink); LinkPanel addressLink = new LinkPanel("personAddress", "hidden-phone list", "<" + address + ">", GitSearchPage.class, WicketUtils.newSearchParameter(repositoryName, objectId, address, searchType)); setPersonSearchTooltip(addressLink, address, searchType); fullPerson.add(addressLink); return fullPerson; } } protected void setPersonSearchTooltip(Component component, String value, Constants.SearchType searchType) { if (searchType.equals(Constants.SearchType.AUTHOR)) { WicketUtils.setHtmlTooltip(component, getString("gb.searchForAuthor") + " " + value); } else if (searchType.equals(Constants.SearchType.COMMITTER)) { WicketUtils.setHtmlTooltip(component, getString("gb.searchForCommitter") + " " + value); } } protected void setChangeTypeTooltip(Component container, ChangeType type) { switch (type) { case ADD: WicketUtils.setHtmlTooltip(container, getString("gb.addition")); break; case COPY: case RENAME: WicketUtils.setHtmlTooltip(container, getString("gb.rename")); break; case DELETE: WicketUtils.setHtmlTooltip(container, getString("gb.deletion")); break; case MODIFY: WicketUtils.setHtmlTooltip(container, getString("gb.modification")); break; } } @Override protected void onBeforeRender() { // dispose of repository object if (r != null) { r.close(); r = null; } // setup page header and footer setupPage(getRepositoryName(), "/ " + getPageName()); super.onBeforeRender(); } @Override protected void setLastModified() { if (getClass().isAnnotationPresent(CacheControl.class)) { CacheControl cacheControl = getClass().getAnnotation(CacheControl.class); switch (cacheControl.value()) { case REPOSITORY: RepositoryModel repository = getRepositoryModel(); if (repository != null) { setLastModified(repository.lastChange); } break; case COMMIT: RevCommit commit = getCommit(); if (commit != null) { Date commitDate = JGitUtils.getCommitDate(commit); setLastModified(commitDate); } break; default: super.setLastModified(); } } } protected PageParameters newRepositoryParameter() { return WicketUtils.newRepositoryParameter(repositoryName); } protected PageParameters newCommitParameter() { return WicketUtils.newObjectParameter(repositoryName, objectId); } protected PageParameters newCommitParameter(String commitId) { return WicketUtils.newObjectParameter(repositoryName, commitId); } public boolean isShowAdmin() { return showAdmin; } public boolean isOwner() { return isOwner; } private class SearchForm extends SessionlessForm<Void> implements Serializable { private static final long serialVersionUID = 1L; private final String repositoryName; private final IModel<String> searchBoxModel = new Model<String>(""); private final IModel<Constants.SearchType> searchTypeModel = new Model<Constants.SearchType>( Constants.SearchType.COMMIT); public SearchForm(String id, String repositoryName) { super(id, RepositoryPage.this.getClass(), RepositoryPage.this.getPageParameters()); this.repositoryName = repositoryName; DropDownChoice<Constants.SearchType> searchType = new DropDownChoice<Constants.SearchType>( "searchType", Arrays.asList(Constants.SearchType.values())); searchType.setModel(searchTypeModel); add(searchType.setVisible(app().settings().getBoolean(Keys.web.showSearchTypeSelection, false))); TextField<String> searchBox = new TextField<String>("searchBox", searchBoxModel); add(searchBox); } void setTranslatedAttributes() { WicketUtils.setHtmlTooltip(get("searchType"), getString("gb.searchTypeTooltip")); WicketUtils.setHtmlTooltip(get("searchBox"), MessageFormat.format(getString("gb.searchTooltip"), repositoryName)); WicketUtils.setInputPlaceholder(get("searchBox"), getString("gb.search")); } @Override public void onSubmit() { Constants.SearchType searchType = searchTypeModel.getObject(); String searchString = searchBoxModel.getObject(); if (StringUtils.isEmpty(searchString)) { // redirect to self to avoid wicket page update bug String absoluteUrl = getCanonicalUrl(); getRequestCycle().setRequestTarget(new RedirectRequestTarget(absoluteUrl)); return; } for (Constants.SearchType type : Constants.SearchType.values()) { if (searchString.toLowerCase().startsWith(type.name().toLowerCase() + ":")) { searchType = type; searchString = searchString.substring(type.name().toLowerCase().length() + 1) .trim(); break; } } Class<? extends BasePage> searchPageClass = GitSearchPage.class; RepositoryModel model = app().repositories().getRepositoryModel(repositoryName); if (app().settings().getBoolean(Keys.web.allowLuceneIndexing, true) && !ArrayUtils.isEmpty(model.indexedBranches)) { // this repository is Lucene-indexed searchPageClass = LuceneSearchPage.class; } // use an absolute url to workaround Wicket-Tomcat problems with // mounted url parameters (issue-111) PageParameters params = WicketUtils.newSearchParameter(repositoryName, null, searchString, searchType); String absoluteUrl = getCanonicalUrl(searchPageClass, params); getRequestCycle().setRequestTarget(new RedirectRequestTarget(absoluteUrl)); } } }
Do not display fork button if target repo already exists (fixes #944)
src/main/java/com/gitblit/wicket/pages/RepositoryPage.java
Do not display fork button if target repo already exists (fixes #944)
Java
apache-2.0
74e3ead9b2896cff3030e89b70b12cdbb8139cdd
0
chenrui2014/okio,v7lin/okio,angcyo/okio,tempbottle/okio,tejasmanohar/okio,tomkdir/okio,seven332/okio,cgdecker/okio,angcyo/okio,square/okio,square/okio,sunshinecoast/okio,jpxiong/okio,02110917/okio,xubuhang/okio,seven332/okio,patds/okio,kilink/okio,msdgwzhy6/okio,sunshinecoast/okio,tomkdir/okio,tejasmanohar/okio,xubuhang/okio,tempbottle/okio,nfuller/okio,jpxiong/okio,juoni/okio,palaniyappanBala/okio,CyanogenMod/android_external_square_okio,palaniyappanBala/okio,cgdecker/okio,msdgwzhy6/okio,Shedings/okio,Shedings/okio,juoni/okio,02110917/okio,kilink/okio,v7lin/okio,CyanogenMod/android_external_square_okio,nfuller/okio,patds/okio,chenrui2014/okio,square/okio
/* * Copyright (C) 2014 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package okio; import java.io.Closeable; import java.io.IOException; /** * Supplies a stream of bytes. Use this interface to read data from wherever * it's located: from the network, storage, or a buffer in memory. Sources may * be layered to transform supplied data, such as to decompress, decrypt, or * remove protocol framing. * * <p>Most applications shouldn't operate on a source directly, but rather * {@link BufferedSource} which is both more efficient and more convenient. Use * {@link Okio#buffer(Source)} to wrap any source with a buffer. * * <p>Sources are easy to test: just use an {@link Buffer} in your tests, and * fill it with the data your application is to read. * * <h3>Comparison with InputStream</h3> * This interface is functionally equivalent to {@link java.io.InputStream}. * * <p>{@code InputStream} requires multiple layers when consumed data is * heterogeneous: a {@code DataInputStream} for primitive values, a {@code * BufferedInputStream} for buffering, and {@code InputStreamReader} for * strings. This class uses {@code BufferedSource} for all of the above. * * <p>Source avoids the impossible-to-implement {@linkplain * java.io.InputStream#available available()} method. Instead callers specify * how many bytes they {@link BufferedSource#require require}. * * <p>Source omits the unsafe-to-compose {@linkplain java.io.InputStream#mark * mark and reset} state that's tracked by {@code InputStream}; callers instead * just buffer what they need. * * <p>When implementing a source, you need not worry about the {@linkplain * java.io.InputStream#read single-byte read} method that is awkward to * implement efficiently and that returns one of 257 possible values. * * <p>And source has a stronger {@code skip} method: {@link BufferedSource#skip} * won't return prematurely. * * <h3>Interop with InputStream</h3> * Use {@link Okio#source} to adapt an {@code InputStream} to a source. Use * {@link BufferedSource#inputStream} to adapt a source to an {@code * InputStream}. */ public interface Source extends Closeable { /** * Removes at least 1, and up to {@code byteCount} bytes from this and appends * them to {@code sink}. Returns the number of bytes read, or -1 if this * source is exhausted. */ long read(Buffer sink, long byteCount) throws IOException; /** Returns the timeout for this source. */ Timeout timeout(); /** * Closes this source and releases the resources held by this source. It is an * error to read a closed source. It is safe to close a source more than once. */ @Override void close() throws IOException; }
okio/src/main/java/okio/Source.java
/* * Copyright (C) 2014 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package okio; import java.io.Closeable; import java.io.IOException; /** * Supplies a stream of bytes. Use this interface to read data from wherever * it's located: from the network, storage, or a buffer in memory. Sources may * be layered to transform supplied data, such as to decompress, decrypt, or * remove protocol framing. * * <p>Most applications shouldn't operate on a source directly, but rather * {@link BufferedSource} which is both more efficient and more convenient. Use * {@link Okio#buffer(Source)} to wrap any source with a buffer. * * <p>Sources are easy to test: just use an {@link Buffer} in your tests, and * fill it with the data your application is to read. * * <h3>Comparison with InputStream</h3> * This interface is functionally equivalent to {@link java.io.InputStream}. * * <p>{@code InputStream} requires multiple layers when consumed data is * heterogeneous: a {@code DataOutputStream} for primitive values, a {@code * BufferedInputStream} for buffering, and {@code InputStreamReader} for * strings. This class uses {@code BufferedSource} for all of the above. * * <p>Source avoids the impossible-to-implement {@linkplain * java.io.InputStream#available available()} method. Instead callers specify * how many bytes they {@link BufferedSource#require require}. * * <p>Source omits the unsafe-to-compose {@linkplain java.io.InputStream#mark * mark and reset} state that's tracked by {@code InputStream}; callers instead * just buffer what they need. * * <p>When implementing a source, you need not worry about the {@linkplain * java.io.InputStream#read single-byte read} method that is awkward to * implement efficiently and that returns one of 257 possible values. * * <p>And source has a stronger {@code skip} method: {@link BufferedSource#skip} * won't return prematurely. * * <h3>Interop with InputStream</h3> * Use {@link Okio#source} to adapt an {@code InputStream} to a source. Use * {@link BufferedSource#inputStream} to adapt a source to an {@code * InputStream}. */ public interface Source extends Closeable { /** * Removes at least 1, and up to {@code byteCount} bytes from this and appends * them to {@code sink}. Returns the number of bytes read, or -1 if this * source is exhausted. */ long read(Buffer sink, long byteCount) throws IOException; /** Returns the timeout for this source. */ Timeout timeout(); /** * Closes this source and releases the resources held by this source. It is an * error to read a closed source. It is safe to close a source more than once. */ @Override void close() throws IOException; }
Javadoc fix
okio/src/main/java/okio/Source.java
Javadoc fix
Java
apache-2.0
a82d5d46cd161dc8618540157e2f2bc41ba7d2f7
0
Khyzad/PID-webservice,HawaiiStateDigitalArchives/PID-webservice,Khyzad/PID-webservice,HawaiiStateDigitalArchives/PID-webservice
package com.hida.controller; import com.hida.model.BadParameterException; import com.hida.model.DefaultSetting; import com.hida.model.NotEnoughPermutationsException; import com.hida.model.Pid; import com.hida.model.TokenType; import com.hida.service.MinterService; import org.springframework.stereotype.Controller; import org.springframework.ui.ModelMap; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import java.io.IOException; import java.sql.SQLException; import java.util.Map; import java.util.Set; import org.springframework.web.bind.annotation.ExceptionHandler; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.concurrent.locks.ReentrantLock; import javax.json.Json; import javax.json.JsonArray; import javax.json.JsonArrayBuilder; import javax.json.JsonBuilderFactory; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.codehaus.jackson.map.ObjectMapper; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.servlet.ModelAndView; /** * A controller class that paths the user to all jsp files in WEB_INF/jsp. * * @author lruffin */ @Controller @RequestMapping("/") public class MinterController { /* * Logger; logfile to be stored in resource folder */ private static final Logger Logger = LoggerFactory.getLogger(MinterController.class); /** * Creates a fair reentrant RequestLock to serialize each request * sequentially instead of concurrently. pids */ private static final ReentrantLock RequestLock = new ReentrantLock(true); /** * create a database to be used to create and count number of ids */ @Autowired private MinterService MinterService; /** * Redirects to the index after retrieving updated settings from the * administration panel. * * @param request HTTP request from the administration panel * @param response HTTP response that redirects to the administration panel * after updating the new settings. * @return The name of the page to redirect. * @throws SQLException * @throws BadParameterException Thrown whenever a bad parameter is * detected. * @throws ClassNotFoundException Thrown whenever a class does not exist. */ @RequestMapping(value = {"/confirmation"}, method = {RequestMethod.POST}) public String handleForm(HttpServletRequest request, HttpServletResponse response) throws ClassNotFoundException, SQLException, BadParameterException { try { // prevents other clients from accessing the database whenever the form is submitted RequestLock.lock(); DefaultSetting oldSetting = MinterService.getCurrentSetting(); DefaultSetting newSetting; Logger.info("in handleForm"); String prepend = request.getParameter("prepend"); String prefix = request.getParameter("idprefix"); String isAuto = request.getParameter("mintType"); String isRandom = request.getParameter("mintOrder"); String sansVowels = request.getParameter("vowels"); String rootLength = request.getParameter("idlength"); String digitToken; String lowerToken; String upperToken; String charMap; boolean auto = isAuto.equals("auto"); boolean random = isRandom.equals("random"); boolean vowels = sansVowels == null; // assign a non-null value to prepend, prefix, and rootLength if (prepend == null) { prepend = ""; } if (prefix == null) { prefix = ""; } if ((rootLength == null || rootLength.isEmpty()) && !auto) { rootLength = "1"; } int length = Integer.parseInt(rootLength); // assign values based on which minter type was selected if (auto) { digitToken = request.getParameter("digits"); lowerToken = request.getParameter("lowercase"); upperToken = request.getParameter("uppercase"); TokenType tokenType; // gets the tokenmap value if (digitToken != null && lowerToken == null && upperToken == null) { tokenType = TokenType.DIGIT; } else if (digitToken == null && lowerToken != null && upperToken == null) { tokenType = TokenType.LOWERCASE; } else if (digitToken == null && lowerToken == null && upperToken != null) { tokenType = TokenType.UPPERCASE; } else if (digitToken == null && lowerToken != null && upperToken != null) { tokenType = TokenType.MIXEDCASE; } else if (digitToken != null && lowerToken != null && upperToken == null) { tokenType = TokenType.LOWER_EXTENDED; } else if (digitToken == null && lowerToken == null && upperToken != null) { tokenType = TokenType.UPPER_EXTENDED; } else if (digitToken != null && lowerToken != null && upperToken != null) { tokenType = TokenType.MIXED_EXTENDED; } else { throw new BadParameterException(); } // create new defaultsetting bject newSetting = new DefaultSetting(prepend, prefix, tokenType, oldSetting.getCharMap(), length, vowels, auto, random); } else { charMap = request.getParameter("charmapping"); if (charMap == null || charMap.isEmpty()) { throw new BadParameterException(); } newSetting = new DefaultSetting(prepend, prefix, oldSetting.getTokenType(), charMap, oldSetting.getRootLength(), vowels, auto, random); } MinterService.updateCurrentSetting(newSetting); } finally { // unlocks RequestLock and gives access to longest waiting thread RequestLock.unlock(); Logger.warn("Request to update default settings finished, UNLOCKING MINTER"); } // redirect to the administration panel located at http://[domain]/ return "redirect:"; } /** * Creates a path to mint ids. If parameters aren't given then printPids * will resort to using the default values found in minter_config.properties * * @param requestedAmount requested number of ids to mint * @param model serves as a holder for the model so that attributes can be * added. * @param parameters parameters given by user to instill variety in ids * @return paths user to mint.jsp * @throws Exception catches all sorts of exceptions that may be thrown by * any methods */ @RequestMapping(value = {"/mint/{requestedAmount}"}, method = {RequestMethod.GET}) public String printPids(@PathVariable long requestedAmount, ModelMap model, @RequestParam Map<String, String> parameters) throws Exception { // ensure that only one thread access the minter at any given time RequestLock.lock(); Logger.warn("Request to Minter made, LOCKING MINTER"); // message variable to be sent to mint.jsp String message; try { // override default settings where applicable DefaultSetting tempSetting = overrideDefaultSetting(parameters, MinterService.getCurrentSetting()); // create the set of ids Set<Pid> idList = MinterService.mint(requestedAmount, tempSetting); // convert the set of ids into a json array message = convertListToJson(idList, tempSetting.getPrepend()); //Logger.info("Message from Minter: "+message); // print list of ids to screen model.addAttribute("message", message); } finally { // unlocks RequestLock and gives access to longest waiting thread RequestLock.unlock(); Logger.warn("Request to Minter Finished, UNLOCKING MINTER"); } // return to mint.jsp return "mint"; } /** * Maps to the admin panel on the home page. * * @return name of the index page */ @RequestMapping(value = {""}, method = {RequestMethod.GET}) public ModelAndView displayIndex() { ModelAndView model = new ModelAndView(); // retrieve default values stored in the database DefaultSetting defaultSetting = MinterService.getCurrentSetting(); // add the values to the settings page so that they can be displayed Logger.info("index page called"); model.addObject("prepend", defaultSetting.getPrepend()); model.addObject("prefix", defaultSetting.getPrefix()); model.addObject("charMap", defaultSetting.getCharMap()); model.addObject("tokenType", defaultSetting.getTokenType()); model.addObject("rootLength", defaultSetting.getRootLength()); model.addObject("isAuto", defaultSetting.isAuto()); model.addObject("isRandom", defaultSetting.isRandom()); model.addObject("sansVowel", defaultSetting.isSansVowels()); model.setViewName("settings"); return model; } /** * Returns a view that displays the error message of * NotEnoughPermutationsException. * * @param req The HTTP request. * @param exception NotEnoughPermutationsException. * @return The view of the error message in json format. */ @ExceptionHandler(NotEnoughPermutationsException.class) public ModelAndView handlePermutationError(HttpServletRequest req, Exception exception) { Logger.error("Request: " + req.getRequestURL() + " raised " + exception + " with message " + exception.getMessage()); ModelAndView mav = new ModelAndView(); mav.addObject("status", 400); mav.addObject("exception", exception.getClass().getSimpleName()); mav.addObject("message", exception.getMessage()); mav.setViewName("error"); return mav; } /** * Returns a view that displays the error message of BadParameterException. * * @param req The HTTP request. * @param exception BadParameterException. * @return The view of the error message in json format. */ @ExceptionHandler(BadParameterException.class) public ModelAndView handleBadParameterError(HttpServletRequest req, Exception exception) { Logger.error("Request: " + req.getRequestURL() + " raised " + exception); ModelAndView mav = new ModelAndView(); mav.addObject("status", 400); mav.addObject("exception", exception.getClass().getSimpleName()); mav.addObject("message", exception.getMessage()); Logger.error("Error with bad parameter: " + exception.getMessage()); mav.setViewName("error"); return mav; } /** * Throws any exception that may be caught within the program * * @param req the HTTP request * @param exception the caught exception * @return The view of the error message */ @ExceptionHandler(Exception.class) public ModelAndView handleGeneralError(HttpServletRequest req, Exception exception) { ModelAndView mav = new ModelAndView(); mav.addObject("status", 500); mav.addObject("exception", exception.getClass().getSimpleName()); mav.addObject("message", exception.getMessage()); Logger.error("General Error: " + exception.getMessage()); // display stack trace StackTraceElement[] stacktrace = exception.getStackTrace(); String traceList = ""; for (StackTraceElement s : stacktrace) { traceList += s.toString() + "\n"; } mav.addObject("stacktrace", traceList); mav.setViewName("error"); return mav; } /** * Overrides the default value of cached value with values given in the * parameter. If the parameters do not contain any of the valid parameters, * the default values are maintained. * * @param parameters List of parameters given by the client. * @param entity * @return The settings used for the particular session it was called. * @throws BadParameterException */ private DefaultSetting overrideDefaultSetting(final Map<String, String> parameters, final DefaultSetting entity) throws BadParameterException { String prepend = (parameters.containsKey("prepend")) ? parameters.get("prepend") : entity.getPrepend(); String prefix = (parameters.containsKey("prefix")) ? parameters.get("prefix") : entity.getPrefix(); int rootLength = (parameters.containsKey("rootLength")) ? Integer.parseInt(parameters.get("rootLength")) : entity.getRootLength(); String charMap = (parameters.containsKey("charMap")) ? parameters.get("charMap") : entity.getCharMap(); TokenType tokenType = (parameters.containsKey("tokenType")) ? getValidTokenType(parameters.get("tokenType")) : entity.getTokenType(); boolean isAuto = (parameters.containsKey("auto")) ? convertBoolean(parameters.get("auto"), "auto") : entity.isAuto(); boolean isRandom = (parameters.containsKey("random")) ? convertBoolean(parameters.get("random"), "random") : entity.isRandom(); boolean isSansVowels = (parameters.containsKey("sansVowels")) ? convertBoolean(parameters.get("sansVowels"), "sansVowels") : entity.isSansVowels(); return new DefaultSetting(prepend, prefix, tokenType, charMap, rootLength, isSansVowels, isAuto, isRandom); } /** * This method is used to check to see whether or not the given parameter is * explicitly equivalent to "true" or "false" and returns them respectively. * The method provided by the Boolean wrapper class converts all Strings * that do no explictly contain true to false. * * @param parameter the given string to convert. * @param parameterType the type of the parameter. * @throws BadParameterException Thrown whenever a malformed parameter is * formed or passed * @return the equivalent version of true or false. */ private boolean convertBoolean(String parameter, String parameterType) throws BadParameterException { if (parameter.equals("true")) { return true; } else if (parameter.equals("false")) { return false; } else { throw new BadParameterException(parameter, parameterType); } } /** * Creates a Json object based off a set of ids given in the parameter * * @param set A set of ids to display into JSON * @param prepend A value to attach to the beginning of every id. Typically * used to determine the format of the id. For example, ARK or DOI. * @return A reference to a String that contains Json set of ids * @throws IOException thrown whenever a file could not be found */ private String convertListToJson(Set<Pid> set, String prepend) throws IOException { // Jackson objects to format JSON strings String jsonString; ObjectMapper mapper = new ObjectMapper(); Object formattedJson; // Javax objects to create JSON strings JsonBuilderFactory factory = Json.createBuilderFactory(null); JsonArrayBuilder arrayBuilder = Json.createArrayBuilder(); JsonArray jsonArray; // convert the set of ids into a json array int counter = 0; for (Pid id : set) { arrayBuilder.add(factory.createObjectBuilder() .add("id", counter) .add("name", id.toString())); counter++; } jsonArray = arrayBuilder.build(); // format json array formattedJson = mapper.readValue(jsonArray.toString(), Object.class); jsonString = mapper.writerWithDefaultPrettyPrinter(). writeValueAsString(formattedJson); return jsonString; } /** * Attempts to convert a string into an equivalent enum TokenType. * * @param tokenType Designates what characters are contained in the id's * root. * @return Returns the enum type if succesful, throws BadParameterException * otherwise. * @throws BadParameterException thrown whenever a malformed or invalid * parameter is passed */ private TokenType getValidTokenType(String tokenType) throws BadParameterException { switch (tokenType) { case "DIGIT": return TokenType.DIGIT; case "LOWERCASE": return TokenType.LOWERCASE; case "UPPERCASE": return TokenType.UPPERCASE; case "MIXEDCASE": return TokenType.MIXEDCASE; case "LOWER_EXTENDED": return TokenType.LOWER_EXTENDED; case "UPPER_EXTENDED": return TokenType.UPPER_EXTENDED; case "MIXED_EXTENDED": return TokenType.MIXED_EXTENDED; default: throw new BadParameterException(tokenType, "TokenType"); } } }
Minter/src/main/java/com/hida/controller/MinterController.java
package com.hida.controller; import com.hida.model.BadParameterException; import com.hida.model.DefaultSetting; import com.hida.model.NotEnoughPermutationsException; import com.hida.model.Pid; import com.hida.model.TokenType; import com.hida.service.MinterService; import org.springframework.stereotype.Controller; import org.springframework.ui.ModelMap; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import java.io.IOException; import java.sql.SQLException; import java.util.Map; import java.util.Set; import org.springframework.web.bind.annotation.ExceptionHandler; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.concurrent.locks.ReentrantLock; import javax.json.Json; import javax.json.JsonArray; import javax.json.JsonArrayBuilder; import javax.json.JsonBuilderFactory; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.codehaus.jackson.map.ObjectMapper; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.servlet.ModelAndView; /** * A controller class that paths the user to all jsp files in WEB_INF/jsp. * * @author lruffin */ @Controller @RequestMapping("/") public class MinterController { /* * Logger; logfile to be stored in resource folder */ private static final Logger Logger = LoggerFactory.getLogger(MinterController.class); /** * Creates a fair reentrant RequestLock to serialize each request * sequentially instead of concurrently. pids */ private static final ReentrantLock RequestLock = new ReentrantLock(true); /** * create a database to be used to create and count number of ids */ @Autowired private MinterService MinterService; /** * Redirects to the index after retrieving updated settings from the * administration panel. * * @param request HTTP request from the administration panel * @param response HTTP response that redirects to the administration panel * after updating the new settings. * @return The name of the page to redirect. * @throws SQLException * @throws BadParameterException Thrown whenever a bad parameter is * detected. * @throws ClassNotFoundException Thrown whenever a class does not exist. */ @RequestMapping(value = {"/confirmation"}, method = {org.springframework.web.bind.annotation.RequestMethod.POST}) public String handleForm(HttpServletRequest request, HttpServletResponse response) throws ClassNotFoundException, SQLException, BadParameterException { try { // prevents other clients from accessing the database whenever the form is submitted RequestLock.lock(); DefaultSetting oldSetting = MinterService.getCurrentSetting(); DefaultSetting newSetting; // Logger.info("in handleForm"); String prepend = request.getParameter("prepend"); String prefix = request.getParameter("idprefix"); String isAuto = request.getParameter("mintType"); String isRandom = request.getParameter("mintOrder"); String sansVowels = request.getParameter("vowels"); String rootLength = request.getParameter("idlength"); String digitToken; String lowerToken; String upperToken; String charMap; boolean auto = isAuto.equals("auto"); boolean random = isRandom.equals("random"); boolean vowels = sansVowels == null; // assign a non-null value to prepend, prefix, and rootLength if (prepend == null) { prepend = ""; } if (prefix == null) { prefix = ""; } if ((rootLength == null || rootLength.isEmpty()) && !auto) { rootLength = "1"; } int length = Integer.parseInt(rootLength); // assign values based on which minter type was selected if (auto) { digitToken = request.getParameter("digits"); lowerToken = request.getParameter("lowercase"); upperToken = request.getParameter("uppercase"); TokenType tokenType; // gets the tokenmap value if (digitToken != null && lowerToken == null && upperToken == null) { tokenType = TokenType.DIGIT; } else if (digitToken == null && lowerToken != null && upperToken == null) { tokenType = TokenType.LOWERCASE; } else if (digitToken == null && lowerToken == null && upperToken != null) { tokenType = TokenType.UPPERCASE; } else if (digitToken == null && lowerToken != null && upperToken != null) { tokenType = TokenType.MIXEDCASE; } else if (digitToken != null && lowerToken != null && upperToken == null) { tokenType = TokenType.LOWER_EXTENDED; } else if (digitToken == null && lowerToken == null && upperToken != null) { tokenType = TokenType.UPPER_EXTENDED; } else if (digitToken != null && lowerToken != null && upperToken != null) { tokenType = TokenType.MIXED_EXTENDED; } else { throw new BadParameterException(); } newSetting = new DefaultSetting(prepend, prefix, tokenType, oldSetting.getCharMap(), length, vowels, auto, random); } else { charMap = request.getParameter("charmapping"); if (charMap == null || charMap.isEmpty()) { throw new BadParameterException(); } newSetting = new DefaultSetting(prepend, prefix, oldSetting.getTokenType(), charMap, oldSetting.getRootLength(), vowels, auto, random); } MinterService.updateCurrentSetting(newSetting); } finally { // unlocks RequestLock and gives access to longest waiting thread RequestLock.unlock(); Logger.warn("Request to update default settings finished, UNLOCKING MINTER"); } // redirect to the administration panel located at http://[domain]/ return "redirect:"; } /** * Creates a path to mint ids. If parameters aren't given then printPids * will resort to using the default values found in minter_config.properties * * @param requestedAmount requested number of ids to mint * @param model serves as a holder for the model so that attributes can be * added. * @param parameters parameters given by user to instill variety in ids * @return paths user to mint.jsp * @throws Exception catches all sorts of exceptions that may be thrown by * any methods */ @RequestMapping(value = {"/mint/{requestedAmount}"}, method = {org.springframework.web.bind.annotation.RequestMethod.GET}) public String printPids(@PathVariable long requestedAmount, ModelMap model, @RequestParam Map<String, String> parameters) throws Exception { // ensure that only one thread access the minter at any given time RequestLock.lock(); Logger.warn("Request to Minter made, LOCKING MINTER"); // message variable to be sent to mint.jsp String message; try { // override default settings where applicable DefaultSetting tempSetting = overrideDefaultSetting(parameters, MinterService.getCurrentSetting()); // create the set of ids Set<Pid> idList = MinterService.mint(requestedAmount, tempSetting); // convert the set of ids into a json array message = convertListToJson(idList, tempSetting.getPrepend()); //Logger.info("Message from Minter: "+message); // print list of ids to screen model.addAttribute("message", message); } finally { // unlocks RequestLock and gives access to longest waiting thread RequestLock.unlock(); Logger.warn("Request to Minter Finished, UNLOCKING MINTER"); } // return to mint.jsp return "mint"; } /** * Maps to the admin panel on the home page. * * @return name of the index page */ @RequestMapping(value = {""}, method = {org.springframework.web.bind.annotation.RequestMethod.GET}) public ModelAndView displayIndex() { ModelAndView model = new ModelAndView(); DefaultSetting defaultSetting = MinterService.getCurrentSetting(); Logger.info("index page called"); model.addObject("prepend", defaultSetting.getPrepend()); model.addObject("prefix", defaultSetting.getPrefix()); model.addObject("charMap", defaultSetting.getCharMap()); model.addObject("tokenType", defaultSetting.getTokenType()); model.addObject("rootLength", defaultSetting.getRootLength()); model.addObject("isAuto", defaultSetting.isAuto()); model.addObject("isRandom", defaultSetting.isRandom()); model.addObject("sansVowel", defaultSetting.isSansVowels()); model.setViewName("settings"); return model; } /** * Overrides the default value of cached value with values given in the * parameter. If the parameters do not contain any of the valid parameters, * the default values are maintained. * * @param parameters List of parameters given by the client. * @param entity * @return The settings used for the particular session it was called. * @throws BadParameterException */ private DefaultSetting overrideDefaultSetting(final Map<String, String> parameters, final DefaultSetting entity) throws BadParameterException { String prepend = (parameters.containsKey("prepend")) ? parameters.get("prepend") : entity.getPrepend(); String prefix = (parameters.containsKey("prefix")) ? parameters.get("prefix") : entity.getPrefix(); int rootLength = (parameters.containsKey("rootLength")) ? Integer.parseInt(parameters.get("rootLength")) : entity.getRootLength(); String charMap = (parameters.containsKey("charMap")) ? parameters.get("charMap") : entity.getCharMap(); TokenType tokenType = (parameters.containsKey("tokenType")) ? getValidTokenType(parameters.get("tokenType")) : entity.getTokenType(); boolean isAuto = (parameters.containsKey("auto")) ? convertBoolean(parameters.get("auto"), "auto") : entity.isAuto(); boolean isRandom = (parameters.containsKey("random")) ? convertBoolean(parameters.get("random"), "random") : entity.isRandom(); boolean isSansVowels = (parameters.containsKey("sansVowels")) ? convertBoolean(parameters.get("sansVowels"), "sansVowels") : entity.isSansVowels(); return new DefaultSetting(prepend, prefix, tokenType, charMap, rootLength, isSansVowels, isAuto, isRandom); } /** * This method is used to check to see whether or not the given parameter is * explicitly equivalent to "true" or "false" and returns them respectively. * The method provided by the Boolean wrapper class converts all Strings * that do no explictly contain true to false. * * @param parameter the given string to convert. * @param parameterType the type of the parameter. * @throws BadParameterException Thrown whenever a malformed parameter is * formed or passed * @return the equivalent version of true or false. */ private boolean convertBoolean(String parameter, String parameterType) throws BadParameterException { if (parameter.equals("true")) { return true; } else if (parameter.equals("false")) { return false; } else { throw new BadParameterException(parameter, parameterType); } } /** * Returns a view that displays the error message of * NotEnoughPermutationsException. * * @param req The HTTP request. * @param exception NotEnoughPermutationsException. * @return The view of the error message in json format. */ @ExceptionHandler(NotEnoughPermutationsException.class) public ModelAndView handlePermutationError(HttpServletRequest req, Exception exception) { //logger.error("Request: " + req.getRequestURL() + " raised " + exception); ModelAndView mav = new ModelAndView(); mav.addObject("status", 400); mav.addObject("exception", exception.getClass().getSimpleName()); mav.addObject("message", exception.getMessage()); Logger.error("Error with permutation: " + exception.getMessage()); mav.setViewName("error"); return mav; } /** * Returns a view that displays the error message of BadParameterException. * * @param req The HTTP request. * @param exception BadParameterException. * @return The view of the error message in json format. */ @ExceptionHandler(BadParameterException.class) public ModelAndView handleBadParameterError(HttpServletRequest req, Exception exception) { //logger.error("Request: " + req.getRequestURL() + " raised " + exception); ModelAndView mav = new ModelAndView(); mav.addObject("status", 400); mav.addObject("exception", exception.getClass().getSimpleName()); mav.addObject("message", exception.getMessage()); Logger.error("Error with bad parameter: " + exception.getMessage()); mav.setViewName("error"); return mav; } /** * Throws any exception that may be caught within the program * * @param req the HTTP request * @param exception the caught exception * @return The view of the error message */ @ExceptionHandler(Exception.class) public ModelAndView handleGeneralError(HttpServletRequest req, Exception exception) { ModelAndView mav = new ModelAndView(); mav.addObject("status", 500); mav.addObject("exception", exception.getClass().getSimpleName()); mav.addObject("message", exception.getMessage()); Logger.error("Error " + "General Error: " + exception.getMessage()); StackTraceElement[] stacktrace = exception.getStackTrace(); String traceList = ""; for(StackTraceElement s : stacktrace){ traceList += s.toString() + "\n"; } mav.addObject("stacktrace", traceList); mav.setViewName("error"); return mav; } /** * Creates a Json object based off a set of ids given in the parameter * * @param set A set of ids to display into JSON * @param prepend A value to attach to the beginning of every id. Typically * used to determine the format of the id. For example, ARK or DOI. * @return A reference to a String that contains Json set of ids * @throws IOException thrown whenever a file could not be found */ public String convertListToJson(Set<Pid> set, String prepend) throws IOException { // Jackson objects to format JSON strings String jsonString; ObjectMapper mapper = new ObjectMapper(); Object formattedJson; // Javax objects to create JSON strings JsonBuilderFactory factory = Json.createBuilderFactory(null); JsonArrayBuilder arrayBuilder = Json.createArrayBuilder(); JsonArray jsonArray; // convert the set of ids into a json array int counter = 0; for (Pid id : set) { arrayBuilder.add(factory.createObjectBuilder() .add("id", counter) .add("name", id.toString())); counter++; } jsonArray = arrayBuilder.build(); // format json array formattedJson = mapper.readValue(jsonArray.toString(), Object.class); jsonString = mapper.writerWithDefaultPrettyPrinter(). writeValueAsString(formattedJson); return jsonString; } /** * Attempts to convert a string into an equivalent enum TokenType. * * @param tokenType Designates what characters are contained in the id's * root. * @return Returns the enum type if succesful, throws BadParameterException * otherwise. * @throws BadParameterException thrown whenever a malformed or invalid * parameter is passed */ public final TokenType getValidTokenType(String tokenType) throws BadParameterException { switch (tokenType) { case "DIGIT": return TokenType.DIGIT; case "LOWERCASE": return TokenType.LOWERCASE; case "UPPERCASE": return TokenType.UPPERCASE; case "MIXEDCASE": return TokenType.MIXEDCASE; case "LOWER_EXTENDED": return TokenType.LOWER_EXTENDED; case "UPPER_EXTENDED": return TokenType.UPPER_EXTENDED; case "MIXED_EXTENDED": return TokenType.MIXED_EXTENDED; default: throw new BadParameterException(tokenType, "TokenType"); } } }
Clean MinterController summary: - moved the getters and setters to the bottom of the class - removed all instances of logging in the class - import RequestMethod - change method convertListToJson access modifier to private - change method getValidTokenType access modifier to private
Minter/src/main/java/com/hida/controller/MinterController.java
Clean MinterController
Java
bsd-2-clause
fd84f1a91bac873fdfba922b08d52882ededf58c
0
scifio/scifio
/* * #%L * OME Bio-Formats package for reading and converting biological file formats. * %% * Copyright (C) 2005 - 2012 Open Microscopy Environment: * - Board of Regents of the University of Wisconsin-Madison * - Glencoe Software, Inc. * - University of Dundee * %% * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as * published by the Free Software Foundation, either version 2 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public * License along with this program. If not, see * <http://www.gnu.org/licenses/gpl-2.0.html>. * #L% */ package loci.formats.in; import java.io.File; import java.io.IOException; import java.math.BigInteger; import java.util.Arrays; import java.util.Hashtable; import java.util.List; import java.util.Vector; import loci.common.DataTools; import loci.common.DateTools; import loci.common.Location; import loci.common.RandomAccessInputStream; import loci.common.Region; import loci.formats.CoreMetadata; import loci.formats.FilePattern; import loci.formats.FormatException; import loci.formats.FormatReader; import loci.formats.FormatTools; import loci.formats.ImageTools; import loci.formats.MetadataTools; import loci.formats.codec.BitWriter; import loci.formats.meta.MetadataStore; import loci.formats.tiff.IFD; import loci.formats.tiff.TiffParser; import ome.xml.model.primitives.Color; import ome.xml.model.primitives.NonNegativeInteger; import ome.xml.model.primitives.PositiveFloat; import ome.xml.model.primitives.PositiveInteger; import ome.xml.model.primitives.Timestamp; /** * MIASReader is the file format reader for Maia Scientific MIAS-2 datasets. * * <dl><dt><b>Source code:</b></dt> * <dd><a href="http://trac.openmicroscopy.org.uk/ome/browser/bioformats.git/components/bio-formats/src/loci/formats/in/MIASReader.java">Trac</a>, * <a href="http://git.openmicroscopy.org/?p=bioformats.git;a=blob;f=components/bio-formats/src/loci/formats/in/MIASReader.java;hb=HEAD">Gitweb</a></dd></dl> * * @author Melissa Linkert melissa at glencoesoftware.com */ public class MIASReader extends FormatReader { // -- Fields -- /** TIFF files - indexed by well and file. */ private String[][] tiffs; /** Delegate readers. */ private MinimalTiffReader[][] readers; /** Path to file containing analysis results for all plates. */ private String resultFile = null; private Vector<AnalysisFile> analysisFiles; private int[] wellNumber; private int tileRows, tileCols; private int tileWidth, tileHeight; private int wellColumns; private int[] bpp; private String templateFile; private Hashtable<String, String> overlayFiles = new Hashtable<String, String>(); private Hashtable<String, Integer> overlayPlanes = new Hashtable<String, Integer>(); /** Whether or not mask pixel data should be parsed in setId. */ private boolean parseMasks = false; /** Cached tile buffer to avoid re-allocations when reading tiles. */ private byte[] cachedTileBuffer; // -- Constructor -- /** Constructs a new MIAS reader. */ public MIASReader() { super("MIAS", new String[] {"tif", "tiff", "txt"}); suffixSufficient = false; domains = new String[] {FormatTools.HCS_DOMAIN}; hasCompanionFiles = true; datasetDescription = "One directory per plate containing one directory " + "per well, each with one or more .tif/.tiff files"; } // -- IFormatReader API methods -- /* @see loci.formats.IFormatReader#isSingleFile(String) */ public boolean isSingleFile(String id) throws FormatException, IOException { return false; } /* @see loci.formats.IFormatReader#isThisType(String, boolean) */ public boolean isThisType(String filename, boolean open) { if (!open) return super.isThisType(filename, open); // no file system access Location baseFile = new Location(filename).getAbsoluteFile(); Location wellDir = baseFile.getParentFile(); String wellName = wellDir.getName(); if (checkSuffix(filename, "txt")) { String name = baseFile.getName(); return wellName.equals("results") || wellName.equals("Batchresults") || name.equals("Nugenesistemplate.txt") || name.startsWith("mode"); } Location experiment = null; try { experiment = wellDir.getParentFile().getParentFile(); } catch (NullPointerException e) { } if (experiment == null) return false; boolean validName = wellName.startsWith("Well") || wellName.equals("results") || (wellName.length() == 1 && wellName.replaceAll("\\d", "").length() == 0); return validName && super.isThisType(filename, open); } /* @see loci.formats.IFormatReader#isThisType(RandomAccessInputStream) */ public boolean isThisType(RandomAccessInputStream stream) throws IOException { TiffParser tp = new TiffParser(stream); IFD ifd = tp.getFirstIFD(); if (ifd == null) return false; Object s = ifd.getIFDValue(IFD.SOFTWARE); if (s == null) return false; String software = null; if (s instanceof String[]) software = ((String[]) s)[0]; else software = s.toString(); return software.startsWith("eaZYX") || software.startsWith("SCIL_Image") || software.startsWith("IDL"); } /* @see loci.formats.IFormatReader#fileGroupOption(String) */ public int fileGroupOption(String id) throws FormatException, IOException { return FormatTools.MUST_GROUP; } /* @see loci.formats.IFormatReader#get8BitLookupTable() */ public byte[][] get8BitLookupTable() throws FormatException, IOException { FormatTools.assertId(currentId, true, 1); if (readers == null || readers[0][0].getCurrentFile() == null) { return null; } return readers[0][0].get8BitLookupTable(); } /* @see loci.formats.IFormatReader#get16BitLookupTable() */ public short[][] get16BitLookupTable() throws FormatException, IOException { FormatTools.assertId(currentId, true, 1); if (readers == null || readers[0][0].getCurrentFile() == null) { return null; } return readers[0][0].get16BitLookupTable(); } /** * @see loci.formats.IFormatReader#openBytes(int, byte[], int, int, int, int) */ public byte[] openBytes(int no, byte[] buf, int x, int y, int w, int h) throws FormatException, IOException { FormatTools.checkPlaneParameters(this, no, buf.length, x, y, w, h); if (tileRows == 1 && tileCols == 1) { readers[getSeries()][no].setId(tiffs[getSeries()][no]); readers[getSeries()][no].openBytes(0, buf, x, y, w, h); readers[getSeries()][no].close(); return buf; } int outputRowLen = w * bpp[getSeries()]; Region image = new Region(x, y, w, h); int outputRow = 0, outputCol = 0; Region intersection = null; byte[] tileBuf = null; for (int row=0; row<tileRows; row++) { for (int col=0; col<tileCols; col++) { Region tile = new Region(col * tileWidth, row * tileHeight, tileWidth, tileHeight); if (!tile.intersects(image)) continue; intersection = tile.intersection(image); int tileIndex = (no * tileRows + row) * tileCols + col; tileBuf = getTile(getSeries(), no, row, col, intersection); int rowLen = tileBuf.length / intersection.height; // copy tile into output image int outputOffset = outputRow * outputRowLen + outputCol; for (int trow=0; trow<intersection.height; trow++) { System.arraycopy(tileBuf, trow * rowLen, buf, outputOffset, rowLen); outputOffset += outputRowLen; } outputCol += rowLen; } if (intersection != null) { outputRow += intersection.height; outputCol = 0; } } return buf; } /* @see loci.formats.IFormatReader#getSeriesUsedFiles(boolean) */ public String[] getSeriesUsedFiles(boolean noPixels) { FormatTools.assertId(currentId, true, 1); Vector<String> files = new Vector<String>(); if (!noPixels && tiffs != null) { String[] f = new String[tiffs[getSeries()].length]; System.arraycopy(tiffs[getSeries()], 0, f, 0, f.length); Arrays.sort(f); files.addAll(Arrays.asList(f)); } if (analysisFiles != null) { for (AnalysisFile file : analysisFiles) { if (file.plate <= 0 && (file.well == getSeries() || file.well < 0 || wellNumber[getSeries()] == file.well)) { files.add(file.filename); } } } if (templateFile != null) files.add(templateFile); return files.toArray(new String[files.size()]); } /* @see loci.formats.IFormatReader#close(boolean) */ public void close(boolean fileOnly) throws IOException { super.close(fileOnly); if (readers != null) { for (MinimalTiffReader[] images : readers) { for (MinimalTiffReader r : images) { if (r != null) r.close(fileOnly); } } } if (!fileOnly) { readers = null; tiffs = null; tileRows = tileCols = 0; resultFile = null; analysisFiles = null; wellNumber = null; tileWidth = tileHeight = 0; wellColumns = 0; bpp = null; cachedTileBuffer = null; templateFile = null; overlayFiles.clear(); overlayPlanes.clear(); } } /* @see loci.formats.IFormatReader#getOptimalTileWidth() */ public int getOptimalTileWidth() { FormatTools.assertId(currentId, true, 1); return readers[0][0].getOptimalTileWidth(); } /* @see loci.formats.IFormatReader#getOptimalTileHeight() */ public int getOptimalTileHeight() { FormatTools.assertId(currentId, true, 1); return readers[0][0].getOptimalTileHeight(); } // -- Internal FormatReader API methods -- /* @see loci.formats.FormatReader#initFile(String) */ protected void initFile(String id) throws FormatException, IOException { super.initFile(id); if (checkSuffix(id, "txt")) { // first need to find a relevant TIFF file Location base = new Location(id).getAbsoluteFile(); Location plate = null; if (base.getParentFile().getName().equals("Batchresults")) { Location experiment = base.getParentFile().getParentFile(); String[] plates = experiment.list(true); Arrays.sort(plates); plate = new Location(experiment, plates[0]); } else { plate = base.getParentFile(); if (plate.getName().equals("results")) plate = plate.getParentFile(); } String[] list = plate.list(true); for (String f : list) { if (f.startsWith("Well")) { Location well = new Location(plate, f); String[] wellList = well.list(true); for (String file : wellList) { String path = new Location(well, file).getAbsolutePath(); if (isThisType(path) && checkSuffix(path, new String[] {"tif", "tiff"})) { initFile(path); return; } } } } throw new FormatException("Could not locate an appropriate TIFF file."); } if (!isGroupFiles()) { tiffs = new String[][] {{id}}; readers = new MinimalTiffReader[1][1]; readers[0][0] = new MinimalTiffReader(); TiffReader r = new TiffReader(); r.setMetadataStore(getMetadataStore()); r.setId(tiffs[0][0]); core = r.getCoreMetadata(); metadataStore = r.getMetadataStore(); Hashtable globalMetadata = r.getGlobalMetadata(); for (Object key : globalMetadata.keySet()) { addGlobalMeta(key.toString(), globalMetadata.get(key)); } r.close(); tileRows = 1; tileCols = 1; return; } analysisFiles = new Vector<AnalysisFile>(); // MIAS is a high content screening format which supports multiple plates, // wells and fields. // Most of the metadata comes from the directory hierarchy, as very little // metadata is present in the actual files. // // The directory hierarchy is either: // // <experiment name> top level experiment directory // Batchresults analysis results for experiment // <plate number>_<plate barcode> one directory for each plate // results analysis results for plate // Well<xxxx> one directory for each well // mode<x>_z<xxx>_t<xxx>_im<x>_<x>.tif // // or: // // <experiment name> top level experiment directory // <plate number> plate directory (3 digits) // <well number> well directory (4 digits) // <channel number> channel directory (1 digit) // <tile row>_<tile col>_<Z>_<T>.tif // // Each TIFF file contains a single grayscale plane. The "mode" block // refers to the channel number; the "z" and "t" blocks refer to the // Z section and timepoint, respectively. The "im<x>_<x>" block gives // the row and column coordinates of the image within a mosaic. // // We are initially given one of these TIFF files; from there, we need // to find the top level experiment directory and work our way down to // determine how many plates and wells are present. LOGGER.info("Building list of TIFF files"); Location baseFile = new Location(id).getAbsoluteFile(); Location plate = baseFile.getParentFile().getParentFile(); String plateName = plate.getName(); if (!(plateName.length() == 3 || (plateName.length() > 3 && plateName.replaceAll("\\d", "").startsWith("-")))) { plate = plate.getParentFile(); plateName = plate.getName(); } int plateNumber = Integer.parseInt(plateName.substring(0, 3)); Location experiment = plate.getParentFile(); String[] directories = experiment.list(true); Arrays.sort(directories); for (String dir : directories) { Location f = new Location(experiment, dir); if (dir.equals("Batchresults")) { String[] results = f.list(true); for (String result : results) { Location file = new Location(f, result); if (result.startsWith("NEO_Results")) { resultFile = file.getAbsolutePath(); AnalysisFile af = new AnalysisFile(); af.filename = resultFile; analysisFiles.add(af); } else if (result.startsWith("NEO_PlateOutput_")) { int plateIndex = Integer.parseInt(result.substring(16, 19)); if (plateIndex == plateNumber) { AnalysisFile af = new AnalysisFile(); af.filename = file.getAbsolutePath(); af.plate = 0; analysisFiles.add(af); } } } } } String[] list = plate.list(true); Arrays.sort(list); Vector<String> wellDirectories = new Vector<String>(); for (String dir : list) { Location f = new Location(plate, dir); if (f.getName().startsWith("Well") || f.getName().length() == 4) { // directory name is valid, but we need to make sure that the // directory contains a TIFF or a subdirectory String[] wellList = f.list(true); if (wellList != null) { boolean validWell = false; for (String potentialTIFF : wellList) { if (potentialTIFF.toLowerCase().endsWith(".tif") || new Location(f, potentialTIFF).isDirectory()) { validWell = true; break; } } if (validWell) wellDirectories.add(f.getAbsolutePath()); } } else if (f.getName().equals("results")) { String[] resultsList = f.list(true); for (String result : resultsList) { // exclude proprietary program state files if (!result.endsWith(".sav") && !result.endsWith(".dsv") && !result.endsWith(".dat")) { Location r = new Location(f, result); AnalysisFile af = new AnalysisFile(); af.filename = r.getAbsolutePath(); af.plate = 0; if (result.toLowerCase().startsWith("well")) { af.well = Integer.parseInt(result.substring(4, 8)) - 1; } analysisFiles.add(af); } } } else if (f.getName().equals("Nugenesistemplate.txt")) { templateFile = f.getAbsolutePath(); } } int nWells = wellDirectories.size(); LOGGER.debug("Found {} wells.", nWells); readers = new MinimalTiffReader[nWells][]; tiffs = new String[nWells][]; int[] zCount = new int[nWells]; int[] cCount = new int[nWells]; int[] tCount = new int[nWells]; String[] order = new String[nWells]; wellNumber = new int[nWells]; String[] wells = wellDirectories.toArray(new String[nWells]); Arrays.sort(wells); for (int j=0; j<nWells; j++) { Location well = new Location(wells[j]); String wellName = well.getName().replaceAll("Well", ""); wellNumber[j] = Integer.parseInt(wellName) - 1; String[] tiffFiles = well.list(true); Vector<String> tmpFiles = new Vector<String>(); for (String tiff : tiffFiles) { String name = tiff.toLowerCase(); if (name.endsWith(".tif") || name.endsWith(".tiff")) { tmpFiles.add(new Location(well, tiff).getAbsolutePath()); } } if (tmpFiles.size() == 0) { LOGGER.debug("No TIFFs in well directory {}", wells[j]); // no TIFFs in the well directory, so there are probably channel // directories which contain the TIFFs for (String dir : tiffFiles) { Location file = new Location(well, dir); if (dir.length() == 1 && file.isDirectory()) { cCount[j]++; String[] tiffs = file.list(true); for (String tiff : tiffs) { String name = tiff.toLowerCase(); if (name.endsWith(".tif") || name.endsWith(".tiff")) { tmpFiles.add(new Location(file, tiff).getAbsolutePath()); } } } } } tiffFiles = tmpFiles.toArray(new String[0]); Location firstTiff = new Location(tiffFiles[0]); FilePattern fp = new FilePattern( firstTiff.getName(), firstTiff.getParentFile().getAbsolutePath()); String[] blocks = fp.getPrefixes(); order[j] = "XY"; int[] count = fp.getCount(); for (int block=blocks.length - 1; block>=0; block--) { blocks[block] = blocks[block].toLowerCase(); blocks[block] = blocks[block].substring(blocks[block].lastIndexOf("_") + 1); if (blocks[block].equals("z")) { zCount[j] = count[block]; order[j] += "Z"; } else if (blocks[block].equals("t")) { tCount[j] = count[block]; order[j] += "T"; } else if (blocks[block].equals("mode")) { cCount[j] = count[block]; order[j] += "C"; } else if (blocks[block].equals("im")) tileRows = count[block]; else if (blocks[block].equals("")) tileCols = count[block]; else if (blocks[block].replaceAll("\\d", "").length() == 0) { if (block == 3) tileRows = count[block]; else if (block == 2) tileCols = count[block]; else if (block == 0) { zCount[j] = count[block]; order[j] += "Z"; } else if (block == 1) { tCount[j] = count[block]; order[j] += "T"; } } else { throw new FormatException("Unsupported block '" + blocks[block]); } } Arrays.sort(tiffFiles); tiffs[j] = tiffFiles; LOGGER.debug("Well {} has {} files.", j, tiffFiles.length); readers[j] = new MinimalTiffReader[tiffFiles.length]; for (int k=0; k<tiffFiles.length; k++) { readers[j][k] = new MinimalTiffReader(); } } // Populate core metadata LOGGER.info("Populating core metadata"); int nSeries = tiffs.length; core = new CoreMetadata[nSeries]; bpp = new int[nSeries]; if (readers.length == 0) { throw new FormatException("No wells were found."); } // assume that all wells have the same width, height, and pixel type readers[0][0].setId(tiffs[0][0]); tileWidth = readers[0][0].getSizeX(); tileHeight = readers[0][0].getSizeY(); if (tileCols == 0) tileCols = 1; if (tileRows == 0) tileRows = 1; for (int i=0; i<core.length; i++) { core[i] = new CoreMetadata(); core[i].sizeZ = zCount[i]; core[i].sizeC = cCount[i]; core[i].sizeT = tCount[i]; if (core[i].sizeZ == 0) core[i].sizeZ = 1; if (core[i].sizeC == 0) core[i].sizeC = 1; if (core[i].sizeT == 0) core[i].sizeT = 1; core[i].sizeX = tileWidth * tileCols; core[i].sizeY = tileHeight * tileRows; core[i].pixelType = readers[0][0].getPixelType(); core[i].sizeC *= readers[0][0].getSizeC(); core[i].rgb = readers[0][0].isRGB(); core[i].littleEndian = readers[0][0].isLittleEndian(); core[i].interleaved = readers[0][0].isInterleaved(); core[i].indexed = readers[0][0].isIndexed(); core[i].falseColor = readers[0][0].isFalseColor(); core[i].dimensionOrder = order[i]; if (core[i].dimensionOrder.indexOf("Z") == -1) { core[i].dimensionOrder += "Z"; } if (core[i].dimensionOrder.indexOf("C") == -1) { core[i].dimensionOrder += "C"; } if (core[i].dimensionOrder.indexOf("T") == -1) { core[i].dimensionOrder += "T"; } core[i].imageCount = core[i].sizeZ * core[i].sizeT * cCount[i]; if (core[i].imageCount == 0) { core[i].imageCount = 1; } bpp[i] = FormatTools.getBytesPerPixel(core[i].pixelType); } // Populate metadata hashtable LOGGER.info("Populating metadata hashtable"); if (resultFile != null && getMetadataOptions().getMetadataLevel() != MetadataLevel.MINIMUM) { String[] cols = null; Vector<String> rows = new Vector<String>(); boolean doKeyValue = true; int nStarLines = 0; String analysisResults = DataTools.readFile(resultFile); String[] lines = analysisResults.split("\n"); for (String line : lines) { line = line.trim(); if (line.length() == 0) continue; if (line.startsWith("******") && line.endsWith("******")) nStarLines++; if (doKeyValue) { String[] n = line.split("\t"); if (n[0].endsWith(":")) n[0] = n[0].substring(0, n[0].length() - 1); if (n.length >= 2) addGlobalMeta(n[0], n[1]); } else { if (cols == null) cols = line.split("\t"); else rows.add(line); } if (nStarLines == 2) doKeyValue = false; } for (String row : rows) { String[] d = row.split("\t"); for (int col=3; col<cols.length; col++) { addGlobalMeta("Plate " + d[0] + ", Well " + d[2] + " " + cols[col], d[col]); if (cols[col].equals("AreaCode")) { String wellID = d[col].replaceAll("\\D", ""); wellColumns = Integer.parseInt(wellID); } } } } // Populate MetadataStore LOGGER.info("Populating MetadataStore"); MetadataStore store = makeFilterMetadata(); MetadataTools.populatePixels(store, this, true); // HACK: if we don't have the analysis file, we don't how many // rows/columns are in the plate // // assume that a 96 well plate is 8x12, and a 384 well plate is 16x24 if (wellColumns == 0) { if (nWells == 96) { wellColumns = 12; } else if (nWells == 384) { wellColumns = 24; } else { LOGGER.warn("Could not determine the plate dimensions."); wellColumns = 24; } } store.setPlateID(MetadataTools.createLSID("Plate", 0), 0); String plateAcqId = MetadataTools.createLSID("PlateAcquisition", 0, 0); store.setPlateAcquisitionID(plateAcqId, 0, 0); store.setPlateAcquisitionMaximumFieldCount(new PositiveInteger(1), 0, 0); for (int well=0; well<nWells; well++) { int wellIndex = wellNumber[well]; int row = wellIndex / wellColumns; int wellCol = (wellIndex % wellColumns) + 1; char wellRow = (char) ('A' + row); store.setWellID(MetadataTools.createLSID("Well", 0, well), 0, well); store.setWellRow(new NonNegativeInteger(row), 0, well); store.setWellColumn(new NonNegativeInteger(wellCol - 1), 0, well); String imageID = MetadataTools.createLSID("Image", well); String wellSampleID = MetadataTools.createLSID("WellSample", 0, well, 0); store.setWellSampleID(wellSampleID, 0, well, 0); store.setWellSampleIndex(new NonNegativeInteger(well), 0, well, 0); store.setImageID(imageID, well); store.setImageName("Well " + wellRow + wellCol, well); store.setWellSampleImageRef(imageID, 0, well, 0); store.setPlateAcquisitionWellSampleRef(wellSampleID, 0, 0, well); } MetadataLevel level = getMetadataOptions().getMetadataLevel(); if (level != MetadataLevel.MINIMUM) { String experimentID = MetadataTools.createLSID("Experiment", 0); store.setExperimentID(experimentID, 0); store.setExperimentType(getExperimentType("Other"), 0); store.setExperimentDescription(experiment.getName(), 0); // populate SPW metadata store.setPlateColumnNamingConvention(getNamingConvention("Number"), 0); store.setPlateRowNamingConvention(getNamingConvention("Letter"), 0); parseTemplateFile(store); plateName = plateName.substring(plateName.indexOf("-") + 1); store.setPlateName(plateName, 0); store.setPlateExternalIdentifier(plateName, 0); for (int well=0; well<nWells; well++) { // populate Image/Pixels metadata store.setImageExperimentRef(experimentID, well); String instrumentID = MetadataTools.createLSID("Instrument", 0); store.setInstrumentID(instrumentID, 0); store.setImageInstrumentRef(instrumentID, well); } if (level != MetadataLevel.NO_OVERLAYS) { // populate image-level ROIs Color[] colors = new Color[getSizeC()]; int nextROI = 0; for (AnalysisFile af : analysisFiles) { String file = af.filename; String name = new Location(file).getName(); if (!name.startsWith("Well")) continue; int[] position = getPositionFromFile(file); int well = position[0]; if (name.endsWith("detail.txt")) { String data = DataTools.readFile(file); String[] lines = data.split("\n"); int start = 0; while (start < lines.length && !lines[start].startsWith("Label")) { start++; } if (start >= lines.length) continue; String[] columns = lines[start].split("\t"); List<String> columnNames = Arrays.asList(columns); for (int j=start+1; j<lines.length; j++) { populateROI(columnNames, lines[j].split("\t"), well, nextROI++, position[1], position[2], store); } } else if (name.endsWith("AllModesOverlay.tif")) { // original color for each channel is stored in // results/Well<nnnn>_mode<n>_z<nnn>_t<nnn>_AllModesOverlay.tif if (colors[position[3]] != null) continue; try { colors[position[3]] = getChannelColorFromFile(file); } catch (IOException e) { } if (colors[position[3]] == null) continue; for (int s=0; s<getSeriesCount(); s++) { store.setChannelColor(colors[position[3]], s, position[3]); } if (position[3] == 0) { nextROI += parseMasks(store, well, nextROI, file); } } else if (name.endsWith("overlay.tif")) { nextROI += parseMasks(store, well, nextROI, file); } } } } } // -- Helper methods -- /** * Get the color associated with the given file's channel. * The file must be one of the * Well<nnnn>_mode<n>_z<nnn>_t<nnn>_AllModesOverlay.tif * files in <experiment>/<plate>/results/ */ private Color getChannelColorFromFile(String file) throws FormatException, IOException { RandomAccessInputStream s = new RandomAccessInputStream(file); TiffParser tp = new TiffParser(s); IFD ifd = tp.getFirstIFD(); s.close(); if (ifd == null) return null; int[] colorMap = ifd.getIFDIntArray(IFD.COLOR_MAP); if (colorMap == null) return null; int nEntries = colorMap.length / 3; int max = Integer.MIN_VALUE; int maxIndex = -1; for (int c=0; c<3; c++) { int v = (colorMap[c * nEntries] >> 8) & 0xff; if (v > max) { max = v; maxIndex = c; } else if (v == max) { return new Color(0, 0, 0, 255); } } switch (maxIndex) { case 0: // red return new Color(255, 0, 0, 255); case 1: // green return new Color(0, 255, 0, 255); case 2: // blue return new Color(0, 0, 255, 255); } return null; } /** * Returns an array of length 5 that contains the well, time point, * Z and channel indices corresponding to the given analysis file. */ private int[] getPositionFromFile(String file) { int[] position = new int[4]; file = file.substring(file.lastIndexOf(File.separator) + 1); String wellIndex = file.substring(4, file.indexOf("_")); position[0] = Integer.parseInt(wellIndex) - 1; int tIndex = file.indexOf("_t") + 2; String t = file.substring(tIndex, file.indexOf("_", tIndex)); position[1] = Integer.parseInt(t); int zIndex = file.indexOf("_z") + 2; String zValue = file.substring(zIndex, file.indexOf("_", zIndex)); position[2] = Integer.parseInt(zValue); int cIndex = file.indexOf("mode") + 4; String cValue = file.substring(cIndex, file.indexOf("_", cIndex)); position[3] = Integer.parseInt(cValue) - 1; return position; } private void populateROI(List<String> columns, String[] data, int series, int roi, int time, int z, MetadataStore store) { Integer tv = new Integer(time); Integer zv = new Integer(z); String roiID = MetadataTools.createLSID("ROI", roi, 0); store.setROIID(roiID, roi); store.setImageROIRef(roiID, series, roi); store.setEllipseID(MetadataTools.createLSID("Shape", roi, 0), roi, 0); store.setEllipseTheT(new NonNegativeInteger(tv), roi, 0); store.setEllipseTheZ(new NonNegativeInteger(zv), roi, 0); store.setEllipseX(new Double(data[columns.indexOf("Col")]), roi, 0); store.setEllipseY(new Double(data[columns.indexOf("Row")]), roi, 0); store.setEllipseText(data[columns.indexOf("Label")], roi, 0); double diam = Double.parseDouble(data[columns.indexOf("Cell Diam.")]); double radius = diam / 2; store.setEllipseRadiusX(radius, roi, 0); store.setEllipseRadiusY(radius, roi, 0); // NB: other attributes are "Nucleus Area", "Cell Type", and // "Mean Nucleus Intens." } private byte[] getTile(int well, int no, int row, int col, Region intersection) throws FormatException, IOException { intersection.x %= tileWidth; intersection.y %= tileHeight; int tileIndex = (no * tileRows + row) * tileCols + col; readers[well][tileIndex].setId(tiffs[well][tileIndex]); int bpp = FormatTools.getBytesPerPixel(getPixelType()); int ch = getRGBChannelCount(); int bufferSize = intersection.width * intersection.height * ch * bpp; if (cachedTileBuffer == null || cachedTileBuffer.length != bufferSize) { cachedTileBuffer = new byte[bufferSize]; } byte[] buf = readers[well][tileIndex].openBytes(0, cachedTileBuffer, intersection.x, intersection.y, intersection.width, intersection.height); readers[well][tileIndex].close(); return buf; } /** Parse metadata from the Nugenesistemplate.txt file. */ private void parseTemplateFile(MetadataStore store) throws IOException { if (templateFile == null) return; Double physicalSizeX = null, physicalSizeY = null, exposure = null; Vector<String> channelNames = new Vector<String>(); String date = null; String data = DataTools.readFile(templateFile); String[] lines = data.split("\r\n"); for (String line : lines) { int eq = line.indexOf("="); if (eq != -1) { String key = line.substring(0, eq); String value = line.substring(eq + 1); if (key.equals("Barcode")) { store.setPlateExternalIdentifier(value, 0); } else if (key.equals("Carrier")) { store.setPlateName(value, 0); } else if (key.equals("Pixel_X")) { physicalSizeX = new Double(value); } else if (key.equals("Pixel_Y")) { physicalSizeY = new Double(value); } else if (key.equals("Objective_ID")) { store.setObjectiveID( MetadataTools.createLSID("Objective", 0, 0), 0, 0); store.setObjectiveModel(value, 0, 0); } else if (key.equals("Magnification")) { int mag = (int) Double.parseDouble(value); if (mag > 0) { store.setObjectiveNominalMagnification( new PositiveInteger(mag), 0, 0); } else { LOGGER.warn( "Expected positive value for NominalMagnification; got {}", mag); } } else if (key.startsWith("Mode_")) { channelNames.add(value); } else if (key.equals("Date")) { date = value; } else if (key.equals("Time")) { date += " " + value; } else if (key.equals("Exposure")) { exposure = new Double(value); } } } for (int well=0; well<tiffs.length; well++) { if (physicalSizeX != null && physicalSizeX > 0) { store.setPixelsPhysicalSizeX(new PositiveFloat(physicalSizeX), well); } else { LOGGER.warn("Expected positive value for PhysicalSizeX; got {}", physicalSizeX); } if (physicalSizeY != null && physicalSizeY > 0) { store.setPixelsPhysicalSizeY(new PositiveFloat(physicalSizeY), well); } else { LOGGER.warn("Expected positive value for PhysicalSizeY; got {}", physicalSizeY); } for (int c=0; c<channelNames.size(); c++) { if (c < getEffectiveSizeC()) { store.setChannelName(channelNames.get(c), well, c); } } date = DateTools.formatDate(date, "dd/MM/yyyy HH:mm:ss"); if (date != null) { store.setImageAcquisitionDate(new Timestamp(date), well); } for (int i=0; i<getImageCount(); i++) { store.setPlaneExposureTime(exposure, well, i); } } } /** * Parse Mask ROIs from the given TIFF and place them in the given * MetadataStore. * @return the number of masks parsed */ private int parseMasks(MetadataStore store, int series, int roi, String overlayTiff) throws FormatException, IOException { if (!parseMasks || series >= getSeriesCount()) return 0; int nOverlays = 0; for (int i=0; i<3; i++) { String roiId = MetadataTools.createLSID("ROI", series, roi + nOverlays); String maskId = MetadataTools.createLSID("Mask", series, roi + nOverlays, 0); overlayFiles.put(maskId, overlayTiff); overlayPlanes.put(maskId, new Integer(i)); boolean validMask = populateMaskPixels(series, roi + nOverlays, 0); if (validMask) { store.setROIID(roiId, roi + nOverlays); String maskID = MetadataTools.createLSID("Shape", roi + nOverlays, 0); store.setMaskID(maskID, roi + nOverlays, 0); store.setMaskX(new Double(0), roi + nOverlays, 0); store.setMaskY(new Double(0), roi + nOverlays, 0); store.setMaskWidth(new Double(getSizeX()), roi + nOverlays, 0); store.setMaskHeight(new Double(getSizeY()), roi + nOverlays, 0); int color = 0xff000000 | (0xff << (8 * (2 - i))); store.setMaskStrokeColor(new Color(color), roi + nOverlays, 0); store.setMaskFillColor(new Color(color), roi + nOverlays, 0); store.setImageROIRef(roiId, series, roi + nOverlays); nOverlays++; } } return nOverlays; } // -- MIASReader API methods -- /** * Populate the MaskPixels.BinData attribute for the Mask identified by the * given Image index, ROI index, and Shape index. * @return true if the mask was populated successfully. */ public boolean populateMaskPixels(int imageIndex, int roiIndex, int shapeIndex) throws FormatException, IOException { FormatTools.assertId(currentId, true, 1); String id = MetadataTools.createLSID("Mask", imageIndex, roiIndex, shapeIndex); String maskFile = overlayFiles.get(id); if (maskFile == null) { LOGGER.warn("Could not find an overlay file matching {}", id); return false; } MinimalTiffReader r = new MinimalTiffReader(); r.setId(maskFile); int index = overlayPlanes.get(id).intValue(); byte[] plane = r.openBytes(0); byte[][] planes = null; if (r.isIndexed()) { planes = ImageTools.indexedToRGB(r.get8BitLookupTable(), plane); } else { int bpp = FormatTools.getBytesPerPixel(r.getPixelType()); planes = new byte[r.getRGBChannelCount()][]; for (int c=0; c<planes.length; c++) { planes[c] = ImageTools.splitChannels(plane, c, r.getRGBChannelCount(), bpp, false, r.isInterleaved()); } } r.close(); for (int i=0; i<planes[0].length; i++) { boolean channelsEqual = true; for (int c=1; c<planes.length; c++) { if (planes[c][i] != planes[0][i]) { channelsEqual = false; break; } } if (channelsEqual) { for (int c=0; c<planes.length; c++) { planes[c][i] = 0; } } } // threshold and binary encode the pixel data boolean validMask = false; BitWriter bits = null; if (planes.length > index) { bits = new BitWriter(planes[index].length / 8); for (int p=0; p<planes[index].length; p++) { bits.write(planes[index][p] == 0 ? 0 : 1, 1); if (planes[index][p] != 0) { validMask = true; } } } if (validMask) { MetadataStore store = makeFilterMetadata(); store.setMaskBinData(bits.toByteArray(), roiIndex, shapeIndex); } else LOGGER.debug("Did not populate MaskPixels.BinData for {}", id); return validMask; } /** * Toggle whether or not Mask pixel data should be parsed in setId. * By default, it is not parsed. */ public void setAutomaticallyParseMasks(boolean parse) throws FormatException { FormatTools.assertId(currentId, false, 1); this.parseMasks = parse; } // -- Helper class -- class AnalysisFile { public String filename; public int plate = -1, well = -1; } }
components/bio-formats/src/loci/formats/in/MIASReader.java
/* * #%L * OME Bio-Formats package for reading and converting biological file formats. * %% * Copyright (C) 2005 - 2012 Open Microscopy Environment: * - Board of Regents of the University of Wisconsin-Madison * - Glencoe Software, Inc. * - University of Dundee * %% * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as * published by the Free Software Foundation, either version 2 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public * License along with this program. If not, see * <http://www.gnu.org/licenses/gpl-2.0.html>. * #L% */ package loci.formats.in; import java.io.File; import java.io.IOException; import java.math.BigInteger; import java.util.Arrays; import java.util.Hashtable; import java.util.List; import java.util.Vector; import loci.common.DataTools; import loci.common.DateTools; import loci.common.Location; import loci.common.RandomAccessInputStream; import loci.common.Region; import loci.formats.CoreMetadata; import loci.formats.FilePattern; import loci.formats.FormatException; import loci.formats.FormatReader; import loci.formats.FormatTools; import loci.formats.ImageTools; import loci.formats.MetadataTools; import loci.formats.codec.BitWriter; import loci.formats.meta.MetadataStore; import loci.formats.tiff.IFD; import loci.formats.tiff.TiffParser; import ome.xml.model.primitives.Color; import ome.xml.model.primitives.NonNegativeInteger; import ome.xml.model.primitives.PositiveFloat; import ome.xml.model.primitives.PositiveInteger; import ome.xml.model.primitives.Timestamp; /** * MIASReader is the file format reader for Maia Scientific MIAS-2 datasets. * * <dl><dt><b>Source code:</b></dt> * <dd><a href="http://trac.openmicroscopy.org.uk/ome/browser/bioformats.git/components/bio-formats/src/loci/formats/in/MIASReader.java">Trac</a>, * <a href="http://git.openmicroscopy.org/?p=bioformats.git;a=blob;f=components/bio-formats/src/loci/formats/in/MIASReader.java;hb=HEAD">Gitweb</a></dd></dl> * * @author Melissa Linkert melissa at glencoesoftware.com */ public class MIASReader extends FormatReader { // -- Fields -- /** TIFF files - indexed by well and file. */ private String[][] tiffs; /** Delegate readers. */ private MinimalTiffReader[][] readers; /** Path to file containing analysis results for all plates. */ private String resultFile = null; private Vector<AnalysisFile> analysisFiles; private int[] wellNumber; private int tileRows, tileCols; private int tileWidth, tileHeight; private int wellColumns; private int[] bpp; private String templateFile; private Hashtable<String, String> overlayFiles = new Hashtable<String, String>(); private Hashtable<String, Integer> overlayPlanes = new Hashtable<String, Integer>(); /** Whether or not mask pixel data should be parsed in setId. */ private boolean parseMasks = false; /** Cached tile buffer to avoid re-allocations when reading tiles. */ private byte[] cachedTileBuffer; // -- Constructor -- /** Constructs a new MIAS reader. */ public MIASReader() { super("MIAS", new String[] {"tif", "tiff", "txt"}); suffixSufficient = false; domains = new String[] {FormatTools.HCS_DOMAIN}; hasCompanionFiles = true; datasetDescription = "One directory per plate containing one directory " + "per well, each with one or more .tif/.tiff files"; } // -- IFormatReader API methods -- /* @see loci.formats.IFormatReader#isSingleFile(String) */ public boolean isSingleFile(String id) throws FormatException, IOException { return false; } /* @see loci.formats.IFormatReader#isThisType(String, boolean) */ public boolean isThisType(String filename, boolean open) { if (!open) return super.isThisType(filename, open); // no file system access Location baseFile = new Location(filename).getAbsoluteFile(); Location wellDir = baseFile.getParentFile(); String wellName = wellDir.getName(); if (checkSuffix(filename, "txt")) { String name = baseFile.getName(); return wellName.equals("results") || wellName.equals("Batchresults") || name.equals("Nugenesistemplate.txt") || name.startsWith("mode"); } Location experiment = null; try { experiment = wellDir.getParentFile().getParentFile(); } catch (NullPointerException e) { } if (experiment == null) return false; boolean validName = wellName.startsWith("Well") || wellName.equals("results") || (wellName.length() == 1 && wellName.replaceAll("\\d", "").length() == 0); return validName && super.isThisType(filename, open); } /* @see loci.formats.IFormatReader#isThisType(RandomAccessInputStream) */ public boolean isThisType(RandomAccessInputStream stream) throws IOException { TiffParser tp = new TiffParser(stream); IFD ifd = tp.getFirstIFD(); if (ifd == null) return false; Object s = ifd.getIFDValue(IFD.SOFTWARE); if (s == null) return false; String software = null; if (s instanceof String[]) software = ((String[]) s)[0]; else software = s.toString(); return software.startsWith("eaZYX") || software.startsWith("SCIL_Image") || software.startsWith("IDL"); } /* @see loci.formats.IFormatReader#fileGroupOption(String) */ public int fileGroupOption(String id) throws FormatException, IOException { return FormatTools.MUST_GROUP; } /* @see loci.formats.IFormatReader#get8BitLookupTable() */ public byte[][] get8BitLookupTable() throws FormatException, IOException { FormatTools.assertId(currentId, true, 1); if (readers == null || readers[0][0].getCurrentFile() == null) { return null; } return readers[0][0].get8BitLookupTable(); } /* @see loci.formats.IFormatReader#get16BitLookupTable() */ public short[][] get16BitLookupTable() throws FormatException, IOException { FormatTools.assertId(currentId, true, 1); if (readers == null || readers[0][0].getCurrentFile() == null) { return null; } return readers[0][0].get16BitLookupTable(); } /** * @see loci.formats.IFormatReader#openBytes(int, byte[], int, int, int, int) */ public byte[] openBytes(int no, byte[] buf, int x, int y, int w, int h) throws FormatException, IOException { FormatTools.checkPlaneParameters(this, no, buf.length, x, y, w, h); if (tileRows == 1 && tileCols == 1) { readers[getSeries()][no].setId(tiffs[getSeries()][no]); readers[getSeries()][no].openBytes(0, buf, x, y, w, h); readers[getSeries()][no].close(); return buf; } int outputRowLen = w * bpp[getSeries()]; Region image = new Region(x, y, w, h); int outputRow = 0, outputCol = 0; Region intersection = null; byte[] tileBuf = null; for (int row=0; row<tileRows; row++) { for (int col=0; col<tileCols; col++) { Region tile = new Region(col * tileWidth, row * tileHeight, tileWidth, tileHeight); if (!tile.intersects(image)) continue; intersection = tile.intersection(image); int tileIndex = (no * tileRows + row) * tileCols + col; tileBuf = getTile(getSeries(), no, row, col, intersection); int rowLen = tileBuf.length / intersection.height; // copy tile into output image int outputOffset = outputRow * outputRowLen + outputCol; for (int trow=0; trow<intersection.height; trow++) { System.arraycopy(tileBuf, trow * rowLen, buf, outputOffset, rowLen); outputOffset += outputRowLen; } outputCol += rowLen; } if (intersection != null) { outputRow += intersection.height; outputCol = 0; } } return buf; } /* @see loci.formats.IFormatReader#getSeriesUsedFiles(boolean) */ public String[] getSeriesUsedFiles(boolean noPixels) { FormatTools.assertId(currentId, true, 1); Vector<String> files = new Vector<String>(); if (!noPixels && tiffs != null) { String[] f = new String[tiffs[getSeries()].length]; System.arraycopy(tiffs[getSeries()], 0, f, 0, f.length); Arrays.sort(f); files.addAll(Arrays.asList(f)); } if (analysisFiles != null) { for (AnalysisFile file : analysisFiles) { if (file.plate <= 0 && (file.well == getSeries() || file.well < 0 || wellNumber[getSeries()] == file.well)) { files.add(file.filename); } } } if (templateFile != null) files.add(templateFile); return files.toArray(new String[files.size()]); } /* @see loci.formats.IFormatReader#close(boolean) */ public void close(boolean fileOnly) throws IOException { super.close(fileOnly); if (readers != null) { for (MinimalTiffReader[] images : readers) { for (MinimalTiffReader r : images) { if (r != null) r.close(fileOnly); } } } if (!fileOnly) { readers = null; tiffs = null; tileRows = tileCols = 0; resultFile = null; analysisFiles = null; wellNumber = null; tileWidth = tileHeight = 0; wellColumns = 0; bpp = null; cachedTileBuffer = null; templateFile = null; overlayFiles.clear(); overlayPlanes.clear(); } } /* @see loci.formats.IFormatReader#getOptimalTileWidth() */ public int getOptimalTileWidth() { FormatTools.assertId(currentId, true, 1); return readers[0][0].getOptimalTileWidth(); } /* @see loci.formats.IFormatReader#getOptimalTileHeight() */ public int getOptimalTileHeight() { FormatTools.assertId(currentId, true, 1); return readers[0][0].getOptimalTileHeight(); } // -- Internal FormatReader API methods -- /* @see loci.formats.FormatReader#initFile(String) */ protected void initFile(String id) throws FormatException, IOException { super.initFile(id); if (checkSuffix(id, "txt")) { // first need to find a relevant TIFF file Location base = new Location(id).getAbsoluteFile(); Location plate = null; if (base.getParentFile().getName().equals("Batchresults")) { Location experiment = base.getParentFile().getParentFile(); String[] plates = experiment.list(true); Arrays.sort(plates); plate = new Location(experiment, plates[0]); } else { plate = base.getParentFile(); if (plate.getName().equals("results")) plate = plate.getParentFile(); } String[] list = plate.list(true); for (String f : list) { if (f.startsWith("Well")) { Location well = new Location(plate, f); String[] wellList = well.list(true); for (String file : wellList) { String path = new Location(well, file).getAbsolutePath(); if (isThisType(path) && checkSuffix(path, new String[] {"tif", "tiff"})) { initFile(path); return; } } } } throw new FormatException("Could not locate an appropriate TIFF file."); } if (!isGroupFiles()) { tiffs = new String[][] {{id}}; readers = new MinimalTiffReader[1][1]; readers[0][0] = new MinimalTiffReader(); TiffReader r = new TiffReader(); r.setMetadataStore(getMetadataStore()); r.setId(tiffs[0][0]); core = r.getCoreMetadata(); metadataStore = r.getMetadataStore(); Hashtable globalMetadata = r.getGlobalMetadata(); for (Object key : globalMetadata.keySet()) { addGlobalMeta(key.toString(), globalMetadata.get(key)); } r.close(); tileRows = 1; tileCols = 1; return; } analysisFiles = new Vector<AnalysisFile>(); // MIAS is a high content screening format which supports multiple plates, // wells and fields. // Most of the metadata comes from the directory hierarchy, as very little // metadata is present in the actual files. // // The directory hierarchy is either: // // <experiment name> top level experiment directory // Batchresults analysis results for experiment // <plate number>_<plate barcode> one directory for each plate // results analysis results for plate // Well<xxxx> one directory for each well // mode<x>_z<xxx>_t<xxx>_im<x>_<x>.tif // // or: // // <experiment name> top level experiment directory // <plate number> plate directory (3 digits) // <well number> well directory (4 digits) // <channel number> channel directory (1 digit) // <tile row>_<tile col>_<Z>_<T>.tif // // Each TIFF file contains a single grayscale plane. The "mode" block // refers to the channel number; the "z" and "t" blocks refer to the // Z section and timepoint, respectively. The "im<x>_<x>" block gives // the row and column coordinates of the image within a mosaic. // // We are initially given one of these TIFF files; from there, we need // to find the top level experiment directory and work our way down to // determine how many plates and wells are present. LOGGER.info("Building list of TIFF files"); Location baseFile = new Location(id).getAbsoluteFile(); Location plate = baseFile.getParentFile().getParentFile(); String plateName = plate.getName(); if (!(plateName.length() == 3 || (plateName.length() > 3 && plateName.replaceAll("\\d", "").startsWith("-")))) { plate = plate.getParentFile(); plateName = plate.getName(); } int plateNumber = Integer.parseInt(plateName.substring(0, 3)); Location experiment = plate.getParentFile(); String[] directories = experiment.list(true); Arrays.sort(directories); for (String dir : directories) { Location f = new Location(experiment, dir); if (dir.equals("Batchresults")) { String[] results = f.list(true); for (String result : results) { Location file = new Location(f, result); if (result.startsWith("NEO_Results")) { resultFile = file.getAbsolutePath(); AnalysisFile af = new AnalysisFile(); af.filename = resultFile; analysisFiles.add(af); } else if (result.startsWith("NEO_PlateOutput_")) { int plateIndex = Integer.parseInt(result.substring(16, 19)); if (plateIndex == plateNumber) { AnalysisFile af = new AnalysisFile(); af.filename = file.getAbsolutePath(); af.plate = 0; analysisFiles.add(af); } } } } } String[] list = plate.list(true); Arrays.sort(list); Vector<String> wellDirectories = new Vector<String>(); for (String dir : list) { Location f = new Location(plate, dir); if (f.getName().startsWith("Well") || f.getName().length() == 4) { // directory name is valid, but we need to make sure that the // directory contains a TIFF or a subdirectory String[] wellList = f.list(true); if (wellList != null) { boolean validWell = false; for (String potentialTIFF : wellList) { if (potentialTIFF.toLowerCase().endsWith(".tif") || new Location(f, potentialTIFF).isDirectory()) { validWell = true; break; } } if (validWell) wellDirectories.add(f.getAbsolutePath()); } } else if (f.getName().equals("results")) { String[] resultsList = f.list(true); for (String result : resultsList) { // exclude proprietary program state files if (!result.endsWith(".sav") && !result.endsWith(".dsv") && !result.endsWith(".dat")) { Location r = new Location(f, result); AnalysisFile af = new AnalysisFile(); af.filename = r.getAbsolutePath(); af.plate = 0; if (result.toLowerCase().startsWith("well")) { af.well = Integer.parseInt(result.substring(4, 8)) - 1; } analysisFiles.add(af); } } } else if (f.getName().equals("Nugenesistemplate.txt")) { templateFile = f.getAbsolutePath(); } } int nWells = wellDirectories.size(); LOGGER.debug("Found {} wells.", nWells); readers = new MinimalTiffReader[nWells][]; tiffs = new String[nWells][]; int[] zCount = new int[nWells]; int[] cCount = new int[nWells]; int[] tCount = new int[nWells]; String[] order = new String[nWells]; wellNumber = new int[nWells]; String[] wells = wellDirectories.toArray(new String[nWells]); Arrays.sort(wells); for (int j=0; j<nWells; j++) { Location well = new Location(wells[j]); String wellName = well.getName().replaceAll("Well", ""); wellNumber[j] = Integer.parseInt(wellName) - 1; String[] tiffFiles = well.list(true); Vector<String> tmpFiles = new Vector<String>(); for (String tiff : tiffFiles) { String name = tiff.toLowerCase(); if (name.endsWith(".tif") || name.endsWith(".tiff")) { tmpFiles.add(new Location(well, tiff).getAbsolutePath()); } } if (tmpFiles.size() == 0) { LOGGER.debug("No TIFFs in well directory {}", wells[j]); // no TIFFs in the well directory, so there are probably channel // directories which contain the TIFFs for (String dir : tiffFiles) { Location file = new Location(well, dir); if (dir.length() == 1 && file.isDirectory()) { cCount[j]++; String[] tiffs = file.list(true); for (String tiff : tiffs) { String name = tiff.toLowerCase(); if (name.endsWith(".tif") || name.endsWith(".tiff")) { tmpFiles.add(new Location(file, tiff).getAbsolutePath()); } } } } } tiffFiles = tmpFiles.toArray(new String[0]); Location firstTiff = new Location(tiffFiles[0]); FilePattern fp = new FilePattern( firstTiff.getName(), firstTiff.getParentFile().getAbsolutePath()); String[] blocks = fp.getPrefixes(); order[j] = "XY"; int[] count = fp.getCount(); for (int block=blocks.length - 1; block>=0; block--) { blocks[block] = blocks[block].toLowerCase(); blocks[block] = blocks[block].substring(blocks[block].lastIndexOf("_") + 1); if (blocks[block].equals("z")) { zCount[j] = count[block]; order[j] += "Z"; } else if (blocks[block].equals("t")) { tCount[j] = count[block]; order[j] += "T"; } else if (blocks[block].equals("mode")) { cCount[j] = count[block]; order[j] += "C"; } else if (blocks[block].equals("im")) tileRows = count[block]; else if (blocks[block].equals("")) tileCols = count[block]; else if (blocks[block].replaceAll("\\d", "").length() == 0) { if (block == 3) tileRows = count[block]; else if (block == 2) tileCols = count[block]; else if (block == 0) { zCount[j] = count[block]; order[j] += "Z"; } else if (block == 1) { tCount[j] = count[block]; order[j] += "T"; } } else { throw new FormatException("Unsupported block '" + blocks[block]); } } Arrays.sort(tiffFiles); tiffs[j] = tiffFiles; LOGGER.debug("Well {} has {} files.", j, tiffFiles.length); readers[j] = new MinimalTiffReader[tiffFiles.length]; for (int k=0; k<tiffFiles.length; k++) { readers[j][k] = new MinimalTiffReader(); } } // Populate core metadata LOGGER.info("Populating core metadata"); int nSeries = tiffs.length; core = new CoreMetadata[nSeries]; bpp = new int[nSeries]; if (readers.length == 0) { throw new FormatException("No wells were found."); } // assume that all wells have the same width, height, and pixel type readers[0][0].setId(tiffs[0][0]); tileWidth = readers[0][0].getSizeX(); tileHeight = readers[0][0].getSizeY(); if (tileCols == 0) tileCols = 1; if (tileRows == 0) tileRows = 1; for (int i=0; i<core.length; i++) { core[i] = new CoreMetadata(); core[i].sizeZ = zCount[i]; core[i].sizeC = cCount[i]; core[i].sizeT = tCount[i]; if (core[i].sizeZ == 0) core[i].sizeZ = 1; if (core[i].sizeC == 0) core[i].sizeC = 1; if (core[i].sizeT == 0) core[i].sizeT = 1; core[i].sizeX = tileWidth * tileCols; core[i].sizeY = tileHeight * tileRows; core[i].pixelType = readers[0][0].getPixelType(); core[i].sizeC *= readers[0][0].getSizeC(); core[i].rgb = readers[0][0].isRGB(); core[i].littleEndian = readers[0][0].isLittleEndian(); core[i].interleaved = readers[0][0].isInterleaved(); core[i].indexed = readers[0][0].isIndexed(); core[i].falseColor = readers[0][0].isFalseColor(); core[i].dimensionOrder = order[i]; if (core[i].dimensionOrder.indexOf("Z") == -1) { core[i].dimensionOrder += "Z"; } if (core[i].dimensionOrder.indexOf("C") == -1) { core[i].dimensionOrder += "C"; } if (core[i].dimensionOrder.indexOf("T") == -1) { core[i].dimensionOrder += "T"; } core[i].imageCount = core[i].sizeZ * core[i].sizeT * cCount[i]; if (core[i].imageCount == 0) { core[i].imageCount = 1; } bpp[i] = FormatTools.getBytesPerPixel(core[i].pixelType); } // Populate metadata hashtable LOGGER.info("Populating metadata hashtable"); if (resultFile != null && getMetadataOptions().getMetadataLevel() != MetadataLevel.MINIMUM) { String[] cols = null; Vector<String> rows = new Vector<String>(); boolean doKeyValue = true; int nStarLines = 0; String analysisResults = DataTools.readFile(resultFile); String[] lines = analysisResults.split("\n"); for (String line : lines) { line = line.trim(); if (line.length() == 0) continue; if (line.startsWith("******") && line.endsWith("******")) nStarLines++; if (doKeyValue) { String[] n = line.split("\t"); if (n[0].endsWith(":")) n[0] = n[0].substring(0, n[0].length() - 1); if (n.length >= 2) addGlobalMeta(n[0], n[1]); } else { if (cols == null) cols = line.split("\t"); else rows.add(line); } if (nStarLines == 2) doKeyValue = false; } for (String row : rows) { String[] d = row.split("\t"); for (int col=3; col<cols.length; col++) { addGlobalMeta("Plate " + d[0] + ", Well " + d[2] + " " + cols[col], d[col]); if (cols[col].equals("AreaCode")) { String wellID = d[col].replaceAll("\\D", ""); wellColumns = Integer.parseInt(wellID); } } } } // Populate MetadataStore LOGGER.info("Populating MetadataStore"); MetadataStore store = makeFilterMetadata(); MetadataTools.populatePixels(store, this, true); // HACK: if we don't have the analysis file, we don't how many // rows/columns are in the plate // // assume that a 96 well plate is 8x12, and a 384 well plate is 16x24 if (wellColumns == 0) { if (nWells == 96) { wellColumns = 12; } else if (nWells == 384) { wellColumns = 24; } else { LOGGER.warn("Could not determine the plate dimensions."); wellColumns = 24; } } store.setPlateID(MetadataTools.createLSID("Plate", 0), 0); String plateAcqId = MetadataTools.createLSID("PlateAcquisition", 0, 0); store.setPlateAcquisitionID(plateAcqId, 0, 0); store.setPlateAcquisitionMaximumFieldCount(new PositiveInteger(1), 0, 0); for (int well=0; well<nWells; well++) { int wellIndex = wellNumber[well]; int row = wellIndex / wellColumns; int wellCol = (wellIndex % wellColumns) + 1; char wellRow = (char) ('A' + row); store.setWellID(MetadataTools.createLSID("Well", 0, well), 0, well); store.setWellRow(new NonNegativeInteger(row), 0, well); store.setWellColumn(new NonNegativeInteger(wellCol - 1), 0, well); String imageID = MetadataTools.createLSID("Image", well); String wellSampleID = MetadataTools.createLSID("WellSample", 0, well, 0); store.setWellSampleID(wellSampleID, 0, well, 0); store.setWellSampleIndex(new NonNegativeInteger(well), 0, well, 0); store.setImageID(imageID, well); store.setImageName("Well " + wellRow + wellCol, well); store.setWellSampleImageRef(imageID, 0, well, 0); store.setPlateAcquisitionWellSampleRef(wellSampleID, 0, 0, well); } MetadataLevel level = getMetadataOptions().getMetadataLevel(); if (level != MetadataLevel.MINIMUM) { String experimentID = MetadataTools.createLSID("Experiment", 0); store.setExperimentID(experimentID, 0); store.setExperimentType(getExperimentType("Other"), 0); store.setExperimentDescription(experiment.getName(), 0); // populate SPW metadata store.setPlateColumnNamingConvention(getNamingConvention("Number"), 0); store.setPlateRowNamingConvention(getNamingConvention("Letter"), 0); parseTemplateFile(store); plateName = plateName.substring(plateName.indexOf("-") + 1); store.setPlateName(plateName, 0); store.setPlateExternalIdentifier(plateName, 0); for (int well=0; well<nWells; well++) { // populate Image/Pixels metadata store.setImageExperimentRef(experimentID, well); String instrumentID = MetadataTools.createLSID("Instrument", 0); store.setInstrumentID(instrumentID, 0); store.setImageInstrumentRef(instrumentID, well); } if (level != MetadataLevel.NO_OVERLAYS) { // populate image-level ROIs Color[] colors = new Color[getSizeC()]; int nextROI = 0; for (AnalysisFile af : analysisFiles) { String file = af.filename; String name = new Location(file).getName(); if (!name.startsWith("Well")) continue; int[] position = getPositionFromFile(file); int well = position[0]; if (name.endsWith("detail.txt")) { String data = DataTools.readFile(file); String[] lines = data.split("\n"); int start = 0; while (start < lines.length && !lines[start].startsWith("Label")) { start++; } if (start >= lines.length) continue; String[] columns = lines[start].split("\t"); List<String> columnNames = Arrays.asList(columns); for (int j=start+1; j<lines.length; j++) { populateROI(columnNames, lines[j].split("\t"), well, nextROI++, position[1], position[2], store); } } else if (name.endsWith("AllModesOverlay.tif")) { // original color for each channel is stored in // results/Well<nnnn>_mode<n>_z<nnn>_t<nnn>_AllModesOverlay.tif if (colors[position[3]] != null) continue; try { colors[position[3]] = getChannelColorFromFile(file); } catch (IOException e) { } if (colors[position[3]] == null) continue; for (int s=0; s<getSeriesCount(); s++) { store.setChannelColor(colors[position[3]], s, position[3]); } if (position[3] == 0) { nextROI += parseMasks(store, well, nextROI, file); } } else if (name.endsWith("overlay.tif")) { nextROI += parseMasks(store, well, nextROI, file); } } } } } // -- Helper methods -- /** * Get the color associated with the given file's channel. * The file must be one of the * Well<nnnn>_mode<n>_z<nnn>_t<nnn>_AllModesOverlay.tif * files in <experiment>/<plate>/results/ */ private Color getChannelColorFromFile(String file) throws FormatException, IOException { RandomAccessInputStream s = new RandomAccessInputStream(file); TiffParser tp = new TiffParser(s); IFD ifd = tp.getFirstIFD(); s.close(); if (ifd == null) return null; int[] colorMap = ifd.getIFDIntArray(IFD.COLOR_MAP); if (colorMap == null) return null; int nEntries = colorMap.length / 3; int max = Integer.MIN_VALUE; int maxIndex = -1; for (int c=0; c<3; c++) { int v = (colorMap[c * nEntries] >> 8) & 0xff; if (v > max) { max = v; maxIndex = c; } else if (v == max) { return new Color(0, 0, 0, 255); } } switch (maxIndex) { case 0: // red return new Color(255, 0, 0, 255); case 1: // green return new Color(0, 255, 0, 255); case 2: // blue return new Color(0, 0, 255, 255); } return null; } /** * Returns an array of length 5 that contains the well, time point, * Z and channel indices corresponding to the given analysis file. */ private int[] getPositionFromFile(String file) { int[] position = new int[4]; file = file.substring(file.lastIndexOf(File.separator) + 1); String wellIndex = file.substring(4, file.indexOf("_")); position[0] = Integer.parseInt(wellIndex) - 1; int tIndex = file.indexOf("_t") + 2; String t = file.substring(tIndex, file.indexOf("_", tIndex)); position[1] = Integer.parseInt(t); int zIndex = file.indexOf("_z") + 2; String zValue = file.substring(zIndex, file.indexOf("_", zIndex)); position[2] = Integer.parseInt(zValue); int cIndex = file.indexOf("mode") + 4; String cValue = file.substring(cIndex, file.indexOf("_", cIndex)); position[3] = Integer.parseInt(cValue) - 1; return position; } private void populateROI(List<String> columns, String[] data, int series, int roi, int time, int z, MetadataStore store) { Integer tv = new Integer(time); Integer zv = new Integer(z); String roiID = MetadataTools.createLSID("ROI", roi, 0); store.setROIID(roiID, roi); store.setImageROIRef(roiID, series, roi); store.setEllipseID(MetadataTools.createLSID("Shape", roi, 0), roi, 0); store.setEllipseTheT(new NonNegativeInteger(tv), roi, 0); store.setEllipseTheZ(new NonNegativeInteger(zv), roi, 0); store.setEllipseX(new Double(data[columns.indexOf("Col")]), roi, 0); store.setEllipseY(new Double(data[columns.indexOf("Row")]), roi, 0); store.setEllipseText(data[columns.indexOf("Label")], roi, 0); double diam = Double.parseDouble(data[columns.indexOf("Cell Diam.")]); double radius = diam / 2; store.setEllipseRadiusX(radius, roi, 0); store.setEllipseRadiusY(radius, roi, 0); // NB: other attributes are "Nucleus Area", "Cell Type", and // "Mean Nucleus Intens." } private byte[] getTile(int well, int no, int row, int col, Region intersection) throws FormatException, IOException { intersection.x %= tileWidth; intersection.y %= tileHeight; int tileIndex = (no * tileRows + row) * tileCols + col; readers[well][tileIndex].setId(tiffs[well][tileIndex]); int bpp = FormatTools.getBytesPerPixel(getPixelType()); int ch = getRGBChannelCount(); int bufferSize = intersection.width * intersection.height * ch * bpp; if (cachedTileBuffer == null || cachedTileBuffer.length != bufferSize) { cachedTileBuffer = new byte[bufferSize]; } byte[] buf = readers[well][tileIndex].openBytes(0, cachedTileBuffer, intersection.x, intersection.y, intersection.width, intersection.height); readers[well][tileIndex].close(); return buf; } /** Parse metadata from the Nugenesistemplate.txt file. */ private void parseTemplateFile(MetadataStore store) throws IOException { if (templateFile == null) return; Double physicalSizeX = null, physicalSizeY = null, exposure = null; Vector<String> channelNames = new Vector<String>(); String date = null; String data = DataTools.readFile(templateFile); String[] lines = data.split("\r\n"); for (String line : lines) { int eq = line.indexOf("="); if (eq != -1) { String key = line.substring(0, eq); String value = line.substring(eq + 1); if (key.equals("Barcode")) { store.setPlateExternalIdentifier(value, 0); } else if (key.equals("Carrier")) { store.setPlateName(value, 0); } else if (key.equals("Pixel_X")) { physicalSizeX = new Double(value); } else if (key.equals("Pixel_Y")) { physicalSizeY = new Double(value); } else if (key.equals("Objective_ID")) { store.setObjectiveID( MetadataTools.createLSID("Objective", 0, 0), 0, 0); store.setObjectiveModel(value, 0, 0); } else if (key.equals("Magnification")) { int mag = (int) Double.parseDouble(value); if (mag > 0) { store.setObjectiveNominalMagnification( new PositiveInteger(mag), 0, 0); } else { LOGGER.warn( "Expected positive value for NominalMagnification; got {}", mag); } } else if (key.startsWith("Mode_")) { channelNames.add(value); } else if (key.equals("Date")) { date = value; } else if (key.equals("Time")) { date += " " + value; } else if (key.equals("Exposure")) { exposure = new Double(value); } } } for (int well=0; well<tiffs.length; well++) { if (physicalSizeX != null && physicalSizeX > 0) { store.setPixelsPhysicalSizeX(new PositiveFloat(physicalSizeX), well); } else { LOGGER.warn("Expected positive value for PhysicalSizeX; got {}", physicalSizeX); } if (physicalSizeY != null && physicalSizeY > 0) { store.setPixelsPhysicalSizeY(new PositiveFloat(physicalSizeY), well); } else { LOGGER.warn("Expected positive value for PhysicalSizeY; got {}", physicalSizeY); } for (int c=0; c<channelNames.size(); c++) { if (c < getEffectiveSizeC()) { store.setChannelName(channelNames.get(c), well, c); } } date = DateTools.formatDate(date, "dd/MM/yyyy HH:mm:ss"); if (date != null) { store.setImageAcquisitionDate(new Timestamp(date), well); } for (int i=0; i<getImageCount(); i++) { store.setPlaneExposureTime(exposure, well, i); } } } /** * Parse Mask ROIs from the given TIFF and place them in the given * MetadataStore. * @return the number of masks parsed */ private int parseMasks(MetadataStore store, int series, int roi, String overlayTiff) throws FormatException, IOException { if (!parseMasks) return 0; int nOverlays = 0; for (int i=0; i<3; i++) { String roiId = MetadataTools.createLSID("ROI", series, roi + nOverlays); String maskId = MetadataTools.createLSID("Mask", series, roi + nOverlays, 0); overlayFiles.put(maskId, overlayTiff); overlayPlanes.put(maskId, new Integer(i)); boolean validMask = populateMaskPixels(series, roi + nOverlays, 0); if (validMask) { store.setROIID(roiId, roi + nOverlays); String maskID = MetadataTools.createLSID("Shape", roi + nOverlays, 0); store.setMaskID(maskID, roi + nOverlays, 0); store.setMaskX(new Double(0), roi + nOverlays, 0); store.setMaskY(new Double(0), roi + nOverlays, 0); store.setMaskWidth(new Double(getSizeX()), roi + nOverlays, 0); store.setMaskHeight(new Double(getSizeY()), roi + nOverlays, 0); int color = 0xff000000 | (0xff << (8 * (2 - i))); store.setMaskStrokeColor(new Color(color), roi + nOverlays, 0); store.setMaskFillColor(new Color(color), roi + nOverlays, 0); store.setImageROIRef(roiId, series, roi + nOverlays); nOverlays++; } } return nOverlays; } // -- MIASReader API methods -- /** * Populate the MaskPixels.BinData attribute for the Mask identified by the * given Image index, ROI index, and Shape index. * @return true if the mask was populated successfully. */ public boolean populateMaskPixels(int imageIndex, int roiIndex, int shapeIndex) throws FormatException, IOException { FormatTools.assertId(currentId, true, 1); String id = MetadataTools.createLSID("Mask", imageIndex, roiIndex, shapeIndex); String maskFile = overlayFiles.get(id); if (maskFile == null) { LOGGER.warn("Could not find an overlay file matching {}", id); return false; } MinimalTiffReader r = new MinimalTiffReader(); r.setId(maskFile); int index = overlayPlanes.get(id).intValue(); byte[] plane = r.openBytes(0); byte[][] planes = null; if (r.isIndexed()) { planes = ImageTools.indexedToRGB(r.get8BitLookupTable(), plane); } else { int bpp = FormatTools.getBytesPerPixel(r.getPixelType()); planes = new byte[r.getRGBChannelCount()][]; for (int c=0; c<planes.length; c++) { planes[c] = ImageTools.splitChannels(plane, c, r.getRGBChannelCount(), bpp, false, r.isInterleaved()); } } r.close(); for (int i=0; i<planes[0].length; i++) { boolean channelsEqual = true; for (int c=1; c<planes.length; c++) { if (planes[c][i] != planes[0][i]) { channelsEqual = false; break; } } if (channelsEqual) { for (int c=0; c<planes.length; c++) { planes[c][i] = 0; } } } // threshold and binary encode the pixel data boolean validMask = false; BitWriter bits = null; if (planes.length > index) { bits = new BitWriter(planes[index].length / 8); for (int p=0; p<planes[index].length; p++) { bits.write(planes[index][p] == 0 ? 0 : 1, 1); if (planes[index][p] != 0) { validMask = true; } } } if (validMask) { MetadataStore store = makeFilterMetadata(); store.setMaskBinData(bits.toByteArray(), roiIndex, shapeIndex); } else LOGGER.debug("Did not populate MaskPixels.BinData for {}", id); return validMask; } /** * Toggle whether or not Mask pixel data should be parsed in setId. * By default, it is not parsed. */ public void setAutomaticallyParseMasks(boolean parse) throws FormatException { FormatTools.assertId(currentId, false, 1); this.parseMasks = parse; } // -- Helper class -- class AnalysisFile { public String filename; public int plate = -1, well = -1; } }
MIAS: don't link Masks to Images that do not exist Closes #9082.
components/bio-formats/src/loci/formats/in/MIASReader.java
MIAS: don't link Masks to Images that do not exist
Java
bsd-3-clause
794c332e774ee508a845cbd43539e4aa1def0b9b
0
NCIP/cananolab,NCIP/cananolab,NCIP/cananolab
package gov.nih.nci.calab.ui.core; import gov.nih.nci.calab.domain.nano.characterization.Characterization; import gov.nih.nci.calab.domain.nano.characterization.DerivedBioAssayData; import gov.nih.nci.calab.domain.nano.characterization.physical.Morphology; import gov.nih.nci.calab.domain.nano.characterization.physical.Shape; import gov.nih.nci.calab.domain.nano.characterization.physical.Solubility; import gov.nih.nci.calab.domain.nano.characterization.physical.Surface; import gov.nih.nci.calab.dto.characterization.CharacterizationBean; import gov.nih.nci.calab.dto.characterization.DatumBean; import gov.nih.nci.calab.dto.characterization.DerivedBioAssayDataBean; import gov.nih.nci.calab.dto.characterization.invitro.CytotoxicityBean; import gov.nih.nci.calab.dto.characterization.physical.MorphologyBean; import gov.nih.nci.calab.dto.characterization.physical.ShapeBean; import gov.nih.nci.calab.dto.characterization.physical.SolubilityBean; import gov.nih.nci.calab.dto.characterization.physical.SurfaceBean; import gov.nih.nci.calab.dto.common.LabFileBean; import gov.nih.nci.calab.dto.common.UserBean; import gov.nih.nci.calab.exception.CalabException; import gov.nih.nci.calab.service.common.FileService; import gov.nih.nci.calab.service.common.LookupService; import gov.nih.nci.calab.service.search.SearchNanoparticleService; import gov.nih.nci.calab.service.security.UserService; import gov.nih.nci.calab.service.submit.SubmitNanoparticleService; import gov.nih.nci.calab.service.util.CaNanoLabConstants; import gov.nih.nci.calab.service.util.PropertyReader; import gov.nih.nci.calab.service.util.StringUtils; import java.io.File; import java.io.FileInputStream; import java.util.ArrayList; import java.util.Date; import java.util.Enumeration; import java.util.List; import java.util.SortedSet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import org.apache.struts.action.ActionMessage; import org.apache.struts.action.ActionMessages; import org.apache.struts.validator.DynaValidatorForm; /** * This action serves as the base action for all characterization related action * classes. It includes common operations such as download, updateManufacturers. * * @author pansu */ /* * CVS $Id: BaseCharacterizationAction.java,v 1.27 2007/05/15 13:33:05 chenhang * Exp $ */ public abstract class BaseCharacterizationAction extends AbstractDispatchAction { protected CharacterizationBean prepareCreate(HttpServletRequest request, DynaValidatorForm theForm) throws Exception { HttpSession session = request.getSession(); CharacterizationBean charBean = (CharacterizationBean) theForm .get("achar"); // retrieve file content FileService fileService = new FileService(); for (DerivedBioAssayDataBean derivedDataFileBean : charBean .getDerivedBioAssayDataList()) { byte[] content = fileService.getFileContent(new Long( derivedDataFileBean.getId())); if (content != null) { derivedDataFileBean.setFileContent(content); } } // set createdBy and createdDate for the characterization UserBean user = (UserBean) session.getAttribute("user"); Date date = new Date(); charBean.setCreatedBy(user.getLoginName()); charBean.setCreatedDate(date); return charBean; } protected void postCreate(HttpServletRequest request, DynaValidatorForm theForm) throws Exception { String particleName = theForm.getString("particleName"); String particleType = theForm.getString("particleType"); request.getSession().setAttribute("newCharacterizationCreated", "true"); request.getSession().setAttribute("newCharacterizationSourceCreated", "true"); request.getSession().setAttribute("newInstrumentCreated", "true"); request.getSession().setAttribute("newCharacterizationFileTypeCreated", "true"); InitSessionSetup.getInstance().setSideParticleMenu(request, particleName, particleType); } protected CharacterizationBean[] prepareCopy(HttpServletRequest request, DynaValidatorForm theForm, SubmitNanoparticleService service) throws Exception { CharacterizationBean charBean = (CharacterizationBean) theForm .get("achar"); String origParticleName = theForm.getString("particleName"); charBean.setParticleName(origParticleName); String[] otherParticles = (String[]) theForm.get("otherParticles"); Boolean copyData = (Boolean) theForm.get("copyData"); CharacterizationBean[] charBeans = new CharacterizationBean[otherParticles.length]; int i = 0; for (String particleName : otherParticles) { CharacterizationBean newCharBean = charBean.copy(copyData .booleanValue()); newCharBean.setParticleName(particleName); // reset view title String timeStamp = StringUtils.convertDateToString(new Date(), "MMddyyHHmmssSSS"); String autoTitle = CaNanoLabConstants.AUTO_COPY_CHARACTERIZATION_VIEW_TITLE_PREFIX + timeStamp; newCharBean.setViewTitle(autoTitle); List<DerivedBioAssayDataBean> dataList = newCharBean .getDerivedBioAssayDataList(); // replace particleName in path and uri with new particleName for (DerivedBioAssayDataBean derivedBioAssayData : dataList) { String origUri = derivedBioAssayData.getUri(); if (origUri != null) derivedBioAssayData.setUri(origUri.replace( origParticleName, particleName)); } charBeans[i] = newCharBean; i++; } return charBeans; } /** * clear session data from the input form * * @param session * @param theForm * @param mapping * @throws Exception */ protected void clearMap(HttpSession session, DynaValidatorForm theForm) throws Exception { // reset achar and otherParticles theForm.set("otherParticles", new String[0]); theForm.set("copyData", false); theForm.set("achar", new CharacterizationBean()); theForm.set("morphology", new MorphologyBean()); theForm.set("shape", new ShapeBean()); theForm.set("surface", new SurfaceBean()); theForm.set("solubility", new SolubilityBean()); theForm.set("cytotoxicity", new CytotoxicityBean()); cleanSessionAttributes(session); } /** * Prepopulate data for the input form * * @param request * @param theForm * @throws Exception */ protected void initSetup(HttpServletRequest request, DynaValidatorForm theForm) throws Exception { HttpSession session = request.getSession(); clearMap(session, theForm); String submitType = (String) request.getParameter("submitType"); String particleName = theForm.getString("particleName"); String particleType = theForm.getString("particleType"); String particleSource = theForm.getString("particleSource"); String charName = request.getParameter("charName"); InitSessionSetup.getInstance().setApplicationOwner(session); InitSessionSetup.getInstance().setSideParticleMenu(request, particleName, particleType); InitSessionSetup.getInstance().setAllInstruments(session); InitSessionSetup.getInstance().setAllDerivedDataFileTypes(session); InitSessionSetup.getInstance().setAllPhysicalDropdowns(session); InitSessionSetup.getInstance().setAllInvitroDropdowns(session); InitSessionSetup.getInstance().setAllCharacterizationMeasureUnitsTypes( session, charName); // TODO If there are more types of charactizations, add their // corresponding // protocol type here. if (submitType.equalsIgnoreCase("physical")) InitSessionSetup.getInstance().setAllProtocolNameVersionsByType( session, "Physical assay"); else InitSessionSetup.getInstance().setAllProtocolNameVersionsByType( session, "In vitro assay"); // set up other particle names from the same source LookupService service = new LookupService(); UserBean user = (UserBean) request.getSession().getAttribute("user"); SortedSet<String> allOtherParticleNames = service.getOtherParticles( particleSource, particleName, user); session.setAttribute("allOtherParticleNames", allOtherParticleNames); InitSessionSetup.getInstance().setDerivedDataCategoriesDatumNames( session, charName); InitSessionSetup.getInstance().setAllCharacterizationDropdowns(session); } /** * Clean the session attribture * * @param sessioin * @throws Exception */ protected void cleanSessionAttributes(HttpSession session) throws Exception { for (Enumeration e = session.getAttributeNames(); e.hasMoreElements();) { String element = (String) e.nextElement(); if (element.startsWith(CaNanoLabConstants.CHARACTERIZATION_FILE)) { session.removeAttribute(element); } } } /** * Set up the input form for adding new characterization * * @param mapping * @param form * @param request * @param response * @return * @throws Exception */ public ActionForward setup(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { DynaValidatorForm theForm = (DynaValidatorForm) form; initSetup(request, theForm); return mapping.getInputForward(); } public ActionForward input(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { DynaValidatorForm theForm = (DynaValidatorForm) form; // update editable dropdowns CharacterizationBean achar = (CharacterizationBean) theForm .get("achar"); ShapeBean shape = (ShapeBean) theForm.get("shape"); MorphologyBean morphology = (MorphologyBean) theForm.get("morphology"); CytotoxicityBean cyto = (CytotoxicityBean) theForm.get("cytotoxicity"); HttpSession session = request.getSession(); updateAllCharEditables(session, achar); updateShapeEditable(session, shape); updateMorphologyEditable(session, morphology); updateCytotoxicityEditable(session, cyto); return mapping.findForward("setup"); } /** * Set up the form for updating existing characterization * * @param mapping * @param form * @param request * @param response * @return * @throws Exception */ public ActionForward setupUpdate(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { DynaValidatorForm theForm = (DynaValidatorForm) form; initSetup(request, theForm); String characterizationId = request.getParameter("characterizationId"); SearchNanoparticleService service = new SearchNanoparticleService(); Characterization aChar = service .getCharacterizationAndDerivedDataBy(characterizationId); if (aChar == null) { throw new Exception( "This characterization no longer exists in the database. Please log in again to refresh."); } CharacterizationBean charBean = new CharacterizationBean(aChar); theForm.set("achar", charBean); // set characterizations with additional information if (aChar instanceof Shape) { theForm.set("shape", new ShapeBean((Shape) aChar)); } else if (aChar instanceof Morphology) { theForm.set("morphology", new MorphologyBean((Morphology) aChar)); } else if (aChar instanceof Solubility) { theForm.set("solubility", new SolubilityBean((Solubility) aChar)); } else if (aChar instanceof Surface) { theForm.set("surface", new SurfaceBean((Surface) aChar)); } UserService userService = new UserService( CaNanoLabConstants.CSM_APP_NAME); UserBean user = (UserBean) request.getSession().getAttribute("user"); // set up charaterization files in the session int fileNumber = 0; for (DerivedBioAssayData obj : aChar.getDerivedBioAssayDataCollection()) { DerivedBioAssayDataBean fileBean = new DerivedBioAssayDataBean(obj); boolean status = userService.checkReadPermission(user, fileBean .getId()); if (status) { List<String> accessibleGroups = userService .getAccessibleGroups(fileBean.getId(), CaNanoLabConstants.CSM_READ_ROLE); String[] visibilityGroups = accessibleGroups .toArray(new String[0]); fileBean.setVisibilityGroups(visibilityGroups); request.getSession().setAttribute( "characterizationFile" + fileNumber, fileBean); } fileNumber++; } return mapping.findForward("setup"); } /** * Prepare the form for viewing existing characterization * * @param mapping * @param form * @param request * @param response * @return * @throws Exception */ public ActionForward setupView(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { return setupUpdate(mapping, form, request, response); } /** * Load file action for characterization file loading. * * @param mapping * @param form * @param request * @param response * @return * @throws Exception */ public ActionForward loadFile(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { request.setAttribute("characterizationName", request .getParameter("charName")); DynaValidatorForm theForm = (DynaValidatorForm) form; String particleName = theForm.getString("particleName"); request.setAttribute("particleName", particleName); request.setAttribute("loadFileForward", mapping.findForward("setup") .getPath()); CharacterizationBean achar = (CharacterizationBean) theForm .get("achar"); int fileNum = Integer.parseInt(request.getParameter("fileNumber")); DerivedBioAssayDataBean derivedBioAssayDataBean = achar .getDerivedBioAssayDataList().get(fileNum); request.setAttribute("file", derivedBioAssayDataBean); return mapping.findForward("loadFile"); } /** * Download action to handle characterization file download and viewing * * @param * @return */ public ActionForward download(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { String fileId = request.getParameter("fileId"); SubmitNanoparticleService service = new SubmitNanoparticleService(); LabFileBean fileBean = service.getFile(fileId); String fileRoot = PropertyReader.getProperty( CaNanoLabConstants.FILEUPLOAD_PROPERTY, "fileRepositoryDir"); File dFile = new File(fileRoot + File.separator + fileBean.getUri()); if (dFile.exists()) { response.setContentType("application/octet-stream"); response.setHeader("Content-disposition", "attachment;filename=" + fileBean.getName()); response.setHeader("cache-control", "Private"); java.io.InputStream in = new FileInputStream(dFile); java.io.OutputStream out = response.getOutputStream(); byte[] bytes = new byte[32768]; int numRead = 0; while ((numRead = in.read(bytes)) > 0) { out.write(bytes, 0, numRead); } out.close(); } else { throw new CalabException( "File to download doesn't exist on the server"); } return null; } public ActionForward addFile(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { DynaValidatorForm theForm = (DynaValidatorForm) form; CharacterizationBean achar = (CharacterizationBean) theForm .get("achar"); List<DerivedBioAssayDataBean> origTables = achar .getDerivedBioAssayDataList(); int origNum = (origTables == null) ? 0 : origTables.size(); List<DerivedBioAssayDataBean> tables = new ArrayList<DerivedBioAssayDataBean>(); for (int i = 0; i < origNum; i++) { tables.add((DerivedBioAssayDataBean) origTables.get(i)); } // add a new one tables.add(new DerivedBioAssayDataBean()); achar.setDerivedBioAssayDataList(tables); String particleName = theForm.getString("particleName"); String particleType = theForm.getString("particleType"); InitSessionSetup.getInstance().setSideParticleMenu(request, particleName, particleType); return input(mapping, form, request, response); } public ActionForward removeFile(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { String findIndexStr = (String) request.getParameter("fileInd"); int fileInd = Integer.parseInt(findIndexStr); DynaValidatorForm theForm = (DynaValidatorForm) form; CharacterizationBean achar = (CharacterizationBean) theForm .get("achar"); List<DerivedBioAssayDataBean> origTables = achar .getDerivedBioAssayDataList(); int origNum = (origTables == null) ? 0 : origTables.size(); List<DerivedBioAssayDataBean> tables = new ArrayList<DerivedBioAssayDataBean>(); for (int i = 0; i < origNum; i++) { tables.add((DerivedBioAssayDataBean) origTables.get(i)); } // remove the one at findInd if (origNum > 0) { tables.remove(fileInd); } achar.setDerivedBioAssayDataList(tables); String particleName = theForm.getString("particleName"); String particleType = theForm.getString("particleType"); InitSessionSetup.getInstance().setSideParticleMenu(request, particleName, particleType); return input(mapping, form, request, response); } public ActionForward addData(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { DynaValidatorForm theForm = (DynaValidatorForm) form; CharacterizationBean achar = (CharacterizationBean) theForm .get("achar"); String fileIndexStr = (String) request.getParameter("fileInd"); int fileInd = Integer.parseInt(fileIndexStr); DerivedBioAssayDataBean derivedBioAssayDataBean = (DerivedBioAssayDataBean) achar .getDerivedBioAssayDataList().get(fileInd); List<DatumBean> origDataList = derivedBioAssayDataBean.getDatumList(); int origNum = (origDataList == null) ? 0 : origDataList.size(); List<DatumBean> dataList = new ArrayList<DatumBean>(); for (int i = 0; i < origNum; i++) { DatumBean dataPoint = (DatumBean) origDataList.get(i); dataList.add(dataPoint); } dataList.add(new DatumBean()); derivedBioAssayDataBean.setDatumList(dataList); String particleName = theForm.getString("particleName"); String particleType = theForm.getString("particleType"); InitSessionSetup.getInstance().setSideParticleMenu(request, particleName, particleType); return input(mapping, form, request, response); } public ActionForward removeData(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { DynaValidatorForm theForm = (DynaValidatorForm) form; CharacterizationBean achar = (CharacterizationBean) theForm .get("achar"); String fileIndexStr = (String) request.getParameter("fileInd"); int fileInd = Integer.parseInt(fileIndexStr); String dataIndexStr = (String) request.getParameter("dataInd"); int dataInd = Integer.parseInt(dataIndexStr); DerivedBioAssayDataBean derivedBioAssayDataBean = (DerivedBioAssayDataBean) achar .getDerivedBioAssayDataList().get(fileInd); List<DatumBean> origDataList = derivedBioAssayDataBean.getDatumList(); int origNum = (origDataList == null) ? 0 : origDataList.size(); List<DatumBean> dataList = new ArrayList<DatumBean>(); for (int i = 0; i < origNum; i++) { DatumBean dataPoint = (DatumBean) origDataList.get(i); dataList.add(dataPoint); } if (origNum > 0) dataList.remove(dataInd); derivedBioAssayDataBean.setDatumList(dataList); String particleName = theForm.getString("particleName"); String particleType = theForm.getString("particleType"); InitSessionSetup.getInstance().setSideParticleMenu(request, particleName, particleType); return input(mapping, form, request, response); // return mapping.getInputForward(); this gives an // IndexOutOfBoundException in the jsp page } /** * Pepopulate data for the form * * @param request * @param theForm * @throws Exception */ public ActionForward deleteConfirmed(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { DynaValidatorForm theForm = (DynaValidatorForm) form; String particleName = theForm.getString("particleName"); String particleType = theForm.getString("particleType"); String strCharId = theForm.getString("characterizationId"); SubmitNanoparticleService service = new SubmitNanoparticleService(); service.deleteCharacterizations(particleName, particleType, new String[] { strCharId }); // signal the session that characterization has been changed request.getSession().setAttribute("newCharacterizationCreated", "true"); InitSessionSetup.getInstance().setSideParticleMenu(request, particleName, particleType); ActionMessages msgs = new ActionMessages(); ActionMessage msg = new ActionMessage("message.delete.characterization"); msgs.add("message", msg); saveMessages(request, msgs); return mapping.findForward("success"); } // add edited option to all editable dropdowns private void updateAllCharEditables(HttpSession session, CharacterizationBean achar) throws Exception { InitSessionSetup.getInstance().updateEditableDropdown(session, achar.getCharacterizationSource(), "characterizationSources"); InitSessionSetup.getInstance().updateEditableDropdown(session, achar.getInstrumentConfigBean().getInstrumentBean().getType(), "allInstrumentTypes"); InitSessionSetup.getInstance().updateEditableDropdown( session, achar.getInstrumentConfigBean().getInstrumentBean() .getManufacturer(), "allManufacturers"); for (DerivedBioAssayDataBean derivedBioAssayDataBean : achar .getDerivedBioAssayDataList()) { InitSessionSetup.getInstance().updateEditableDropdown(session, derivedBioAssayDataBean.getType(), "allDerivedDataFileTypes"); if (derivedBioAssayDataBean != null) { for (String category : derivedBioAssayDataBean.getCategories()) { InitSessionSetup.getInstance().updateEditableDropdown( session, category, "derivedDataCategories"); } for (DatumBean datum : derivedBioAssayDataBean.getDatumList()) { InitSessionSetup.getInstance().updateEditableDropdown( session, datum.getName(), "datumNames"); InitSessionSetup.getInstance().updateEditableDropdown( session, datum.getStatisticsType(), "charMeasureTypes"); InitSessionSetup.getInstance().updateEditableDropdown( session, datum.getUnit(), "charMeasureUnits"); } } } } // add edited option to all editable dropdowns private void updateShapeEditable(HttpSession session, ShapeBean shape) throws Exception { InitSessionSetup.getInstance().updateEditableDropdown(session, shape.getType(), "allShapeTypes"); } private void updateMorphologyEditable(HttpSession session, MorphologyBean morphology) throws Exception { InitSessionSetup.getInstance().updateEditableDropdown(session, morphology.getType(), "allMorphologyTypes"); } private void updateCytotoxicityEditable(HttpSession session, CytotoxicityBean cyto) throws Exception { InitSessionSetup.getInstance().updateEditableDropdown(session, cyto.getCellLine(), "allCellLines"); } public boolean loginRequired() { return true; } }
src/gov/nih/nci/calab/ui/core/BaseCharacterizationAction.java
package gov.nih.nci.calab.ui.core; import gov.nih.nci.calab.domain.nano.characterization.Characterization; import gov.nih.nci.calab.domain.nano.characterization.DerivedBioAssayData; import gov.nih.nci.calab.domain.nano.characterization.physical.Morphology; import gov.nih.nci.calab.domain.nano.characterization.physical.Shape; import gov.nih.nci.calab.domain.nano.characterization.physical.Solubility; import gov.nih.nci.calab.domain.nano.characterization.physical.Surface; import gov.nih.nci.calab.dto.characterization.CharacterizationBean; import gov.nih.nci.calab.dto.characterization.DatumBean; import gov.nih.nci.calab.dto.characterization.DerivedBioAssayDataBean; import gov.nih.nci.calab.dto.characterization.invitro.CytotoxicityBean; import gov.nih.nci.calab.dto.characterization.physical.MorphologyBean; import gov.nih.nci.calab.dto.characterization.physical.ShapeBean; import gov.nih.nci.calab.dto.characterization.physical.SolubilityBean; import gov.nih.nci.calab.dto.characterization.physical.SurfaceBean; import gov.nih.nci.calab.dto.common.LabFileBean; import gov.nih.nci.calab.dto.common.UserBean; import gov.nih.nci.calab.exception.CalabException; import gov.nih.nci.calab.service.common.FileService; import gov.nih.nci.calab.service.common.LookupService; import gov.nih.nci.calab.service.search.SearchNanoparticleService; import gov.nih.nci.calab.service.security.UserService; import gov.nih.nci.calab.service.submit.SubmitNanoparticleService; import gov.nih.nci.calab.service.util.CaNanoLabConstants; import gov.nih.nci.calab.service.util.PropertyReader; import gov.nih.nci.calab.service.util.StringUtils; import java.io.File; import java.io.FileInputStream; import java.util.ArrayList; import java.util.Date; import java.util.Enumeration; import java.util.List; import java.util.SortedSet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import org.apache.struts.action.ActionMessage; import org.apache.struts.action.ActionMessages; import org.apache.struts.validator.DynaValidatorForm; /** * This action serves as the base action for all characterization related action * classes. It includes common operations such as download, updateManufacturers. * * @author pansu */ /* * CVS $Id: BaseCharacterizationAction.java,v 1.27 2007/05/15 13:33:05 chenhang * Exp $ */ public abstract class BaseCharacterizationAction extends AbstractDispatchAction { protected CharacterizationBean prepareCreate(HttpServletRequest request, DynaValidatorForm theForm) throws Exception { HttpSession session = request.getSession(); CharacterizationBean charBean = (CharacterizationBean) theForm .get("achar"); // set createdBy and createdDate for the characterization UserBean user = (UserBean) session.getAttribute("user"); Date date = new Date(); charBean.setCreatedBy(user.getLoginName()); charBean.setCreatedDate(date); return charBean; } protected void postCreate(HttpServletRequest request, DynaValidatorForm theForm) throws Exception { String particleName = theForm.getString("particleName"); String particleType = theForm.getString("particleType"); request.getSession().setAttribute("newCharacterizationCreated", "true"); request.getSession().setAttribute("newCharacterizationSourceCreated", "true"); request.getSession().setAttribute("newInstrumentCreated", "true"); request.getSession().setAttribute("newCharacterizationFileTypeCreated", "true"); InitSessionSetup.getInstance().setSideParticleMenu(request, particleName, particleType); } protected CharacterizationBean[] prepareCopy(HttpServletRequest request, DynaValidatorForm theForm, SubmitNanoparticleService service) throws Exception { CharacterizationBean charBean = (CharacterizationBean) theForm .get("achar"); String origParticleName = theForm.getString("particleName"); charBean.setParticleName(origParticleName); String[] otherParticles = (String[]) theForm.get("otherParticles"); Boolean copyData = (Boolean) theForm.get("copyData"); CharacterizationBean[] charBeans = new CharacterizationBean[otherParticles.length]; int i = 0; for (String particleName : otherParticles) { CharacterizationBean newCharBean = charBean.copy(copyData .booleanValue()); newCharBean.setParticleName(particleName); // reset view title String timeStamp = StringUtils.convertDateToString(new Date(), "MMddyyHHmmssSSS"); String autoTitle = CaNanoLabConstants.AUTO_COPY_CHARACTERIZATION_VIEW_TITLE_PREFIX + timeStamp; newCharBean.setViewTitle(autoTitle); List<DerivedBioAssayDataBean> dataList = newCharBean .getDerivedBioAssayDataList(); // replace particleName in path and uri with new particleName for (DerivedBioAssayDataBean derivedBioAssayData : dataList) { String origUri = derivedBioAssayData.getUri(); derivedBioAssayData.setUri(origUri.replace(origParticleName, particleName)); } charBeans[i] = newCharBean; i++; } return charBeans; } /** * clear session data from the input form * * @param session * @param theForm * @param mapping * @throws Exception */ protected void clearMap(HttpSession session, DynaValidatorForm theForm) throws Exception { // reset achar and otherParticles theForm.set("otherParticles", new String[0]); theForm.set("copyData", false); theForm.set("achar", new CharacterizationBean()); theForm.set("morphology", new MorphologyBean()); theForm.set("shape", new ShapeBean()); theForm.set("surface", new SurfaceBean()); theForm.set("solubility", new SolubilityBean()); theForm.set("cytotoxicity", new CytotoxicityBean()); cleanSessionAttributes(session); } /** * Prepopulate data for the input form * * @param request * @param theForm * @throws Exception */ protected void initSetup(HttpServletRequest request, DynaValidatorForm theForm) throws Exception { HttpSession session = request.getSession(); clearMap(session, theForm); String submitType = (String) request.getParameter("submitType"); String particleName = theForm.getString("particleName"); String particleType = theForm.getString("particleType"); String particleSource = theForm.getString("particleSource"); String charName = request.getParameter("charName"); InitSessionSetup.getInstance().setApplicationOwner(session); InitSessionSetup.getInstance().setSideParticleMenu(request, particleName, particleType); InitSessionSetup.getInstance().setAllInstruments(session); InitSessionSetup.getInstance().setAllDerivedDataFileTypes(session); InitSessionSetup.getInstance().setAllMorphologyTypes(session); InitSessionSetup.getInstance().setAllShapeTypes(session); InitSessionSetup.getInstance().setAllConcentrationUnits(session); InitSessionSetup.getInstance().setAllCellLines(session); InitSessionSetup.getInstance().setAllCharacterizationMeasureUnitsTypes( session, charName); // TODO If there are more types of charactizations, add their // corresponding // protocol type here. if (submitType.equalsIgnoreCase("physical")) InitSessionSetup.getInstance().setAllProtocolNameVersionsByType( session, "Physical assay"); else InitSessionSetup.getInstance().setAllProtocolNameVersionsByType( session, "In vitro assay"); // set up other particle names from the same source LookupService service = new LookupService(); UserBean user = (UserBean) request.getSession().getAttribute("user"); SortedSet<String> allOtherParticleNames = service.getOtherParticles( particleSource, particleName, user); session.setAttribute("allOtherParticleNames", allOtherParticleNames); InitSessionSetup.getInstance().setDerivedDataCategoriesDatumNames( session, charName); InitSessionSetup.getInstance().setAllCharacterizationSources(session); } /** * Clean the session attribture * * @param sessioin * @throws Exception */ protected void cleanSessionAttributes(HttpSession session) throws Exception { for (Enumeration e = session.getAttributeNames(); e.hasMoreElements();) { String element = (String) e.nextElement(); if (element.startsWith(CaNanoLabConstants.CHARACTERIZATION_FILE)) { session.removeAttribute(element); } } } /** * Set up the input form for adding new characterization * * @param mapping * @param form * @param request * @param response * @return * @throws Exception */ public ActionForward setup(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { DynaValidatorForm theForm = (DynaValidatorForm) form; initSetup(request, theForm); return mapping.getInputForward(); } public ActionForward input(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { DynaValidatorForm theForm = (DynaValidatorForm) form; // update editable dropdowns CharacterizationBean achar = (CharacterizationBean) theForm .get("achar"); ShapeBean shape = (ShapeBean) theForm.get("shape"); MorphologyBean morphology = (MorphologyBean) theForm.get("morphology"); CytotoxicityBean cyto = (CytotoxicityBean) theForm.get("cytotoxicity"); HttpSession session = request.getSession(); updateAllCharEditables(session, achar); updateShapeEditable(session, shape); updateMorphologyEditable(session, morphology); updateCytotoxicityEditable(session, cyto); return mapping.findForward("setup"); } /** * Set up the form for updating existing characterization * * @param mapping * @param form * @param request * @param response * @return * @throws Exception */ public ActionForward setupUpdate(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { DynaValidatorForm theForm = (DynaValidatorForm) form; initSetup(request, theForm); String characterizationId = request.getParameter("characterizationId"); SearchNanoparticleService service = new SearchNanoparticleService(); Characterization aChar = service .getCharacterizationAndDerivedDataBy(characterizationId); if (aChar == null) { throw new Exception( "This characterization no longer exists in the database. Please log in again to refresh."); } CharacterizationBean charBean = new CharacterizationBean(aChar); // retrieve file content FileService fileService = new FileService(); for (DerivedBioAssayDataBean derivedDataFileBean : charBean .getDerivedBioAssayDataList()) { byte[] content = fileService.getFileContent(new Long( derivedDataFileBean.getId())); if (content != null) { derivedDataFileBean.setFileContent(content); } } theForm.set("achar", charBean); // set characterizations with additional information if (aChar instanceof Shape) { theForm.set("shape", new ShapeBean((Shape) aChar)); } else if (aChar instanceof Morphology) { theForm.set("morphology", new MorphologyBean((Morphology) aChar)); } else if (aChar instanceof Solubility) { theForm.set("solubility", new SolubilityBean((Solubility) aChar)); } else if (aChar instanceof Surface) { theForm.set("surface", new SurfaceBean((Surface) aChar)); } UserService userService = new UserService( CaNanoLabConstants.CSM_APP_NAME); UserBean user = (UserBean) request.getSession().getAttribute("user"); // set up charaterization files in the session int fileNumber = 0; for (DerivedBioAssayData obj : aChar.getDerivedBioAssayDataCollection()) { DerivedBioAssayDataBean fileBean = new DerivedBioAssayDataBean(obj); boolean status = userService.checkReadPermission(user, fileBean .getId()); if (status) { List<String> accessibleGroups = userService .getAccessibleGroups(fileBean.getId(), CaNanoLabConstants.CSM_READ_ROLE); String[] visibilityGroups = accessibleGroups .toArray(new String[0]); fileBean.setVisibilityGroups(visibilityGroups); request.getSession().setAttribute( "characterizationFile" + fileNumber, fileBean); } fileNumber++; } return mapping.findForward("setup"); } /** * Prepare the form for viewing existing characterization * * @param mapping * @param form * @param request * @param response * @return * @throws Exception */ public ActionForward setupView(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { return setupUpdate(mapping, form, request, response); } /** * Load file action for characterization file loading. * * @param mapping * @param form * @param request * @param response * @return * @throws Exception */ public ActionForward loadFile(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { request.setAttribute("characterizationName", request .getParameter("charName")); DynaValidatorForm theForm = (DynaValidatorForm) form; String particleName = theForm.getString("particleName"); request.setAttribute("particleName", particleName); request.setAttribute("loadFileForward", mapping.findForward("setup") .getPath()); CharacterizationBean achar = (CharacterizationBean) theForm .get("achar"); int fileNum = Integer.parseInt(request.getParameter("fileNumber")); DerivedBioAssayDataBean derivedBioAssayDataBean = achar .getDerivedBioAssayDataList().get(fileNum); request.setAttribute("file", derivedBioAssayDataBean); return mapping.findForward("loadFile"); } /** * Download action to handle characterization file download and viewing * * @param * @return */ public ActionForward download(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { String fileId = request.getParameter("fileId"); SubmitNanoparticleService service = new SubmitNanoparticleService(); LabFileBean fileBean = service.getFile(fileId); String fileRoot = PropertyReader.getProperty( CaNanoLabConstants.FILEUPLOAD_PROPERTY, "fileRepositoryDir"); File dFile = new File(fileRoot + File.separator + fileBean.getUri()); if (dFile.exists()) { response.setContentType("application/octet-stream"); response.setHeader("Content-disposition", "attachment;filename=" + fileBean.getName()); response.setHeader("cache-control", "Private"); java.io.InputStream in = new FileInputStream(dFile); java.io.OutputStream out = response.getOutputStream(); byte[] bytes = new byte[32768]; int numRead = 0; while ((numRead = in.read(bytes)) > 0) { out.write(bytes, 0, numRead); } out.close(); } else { throw new CalabException( "File to download doesn't exist on the server"); } return null; } public ActionForward addFile(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { DynaValidatorForm theForm = (DynaValidatorForm) form; CharacterizationBean achar = (CharacterizationBean) theForm .get("achar"); List<DerivedBioAssayDataBean> origTables = achar .getDerivedBioAssayDataList(); int origNum = (origTables == null) ? 0 : origTables.size(); List<DerivedBioAssayDataBean> tables = new ArrayList<DerivedBioAssayDataBean>(); for (int i = 0; i < origNum; i++) { tables.add((DerivedBioAssayDataBean) origTables.get(i)); } // add a new one tables.add(new DerivedBioAssayDataBean()); achar.setDerivedBioAssayDataList(tables); String particleName = theForm.getString("particleName"); String particleType = theForm.getString("particleType"); InitSessionSetup.getInstance().setSideParticleMenu(request, particleName, particleType); return input(mapping, form, request, response); } public ActionForward removeFile(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { String findIndexStr = (String) request.getParameter("fileInd"); int fileInd = Integer.parseInt(findIndexStr); DynaValidatorForm theForm = (DynaValidatorForm) form; CharacterizationBean achar = (CharacterizationBean) theForm .get("achar"); List<DerivedBioAssayDataBean> origTables = achar .getDerivedBioAssayDataList(); int origNum = (origTables == null) ? 0 : origTables.size(); List<DerivedBioAssayDataBean> tables = new ArrayList<DerivedBioAssayDataBean>(); for (int i = 0; i < origNum; i++) { tables.add((DerivedBioAssayDataBean) origTables.get(i)); } // remove the one at findInd if (origNum > 0) { tables.remove(fileInd); } achar.setDerivedBioAssayDataList(tables); String particleName = theForm.getString("particleName"); String particleType = theForm.getString("particleType"); InitSessionSetup.getInstance().setSideParticleMenu(request, particleName, particleType); return input(mapping, form, request, response); } public ActionForward addData(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { DynaValidatorForm theForm = (DynaValidatorForm) form; CharacterizationBean achar = (CharacterizationBean) theForm .get("achar"); String fileIndexStr = (String) request.getParameter("fileInd"); int fileInd = Integer.parseInt(fileIndexStr); DerivedBioAssayDataBean derivedBioAssayDataBean = (DerivedBioAssayDataBean) achar .getDerivedBioAssayDataList().get(fileInd); List<DatumBean> origDataList = derivedBioAssayDataBean.getDatumList(); int origNum = (origDataList == null) ? 0 : origDataList.size(); List<DatumBean> dataList = new ArrayList<DatumBean>(); for (int i = 0; i < origNum; i++) { DatumBean dataPoint = (DatumBean) origDataList.get(i); dataList.add(dataPoint); } dataList.add(new DatumBean()); derivedBioAssayDataBean.setDatumList(dataList); String particleName = theForm.getString("particleName"); String particleType = theForm.getString("particleType"); InitSessionSetup.getInstance().setSideParticleMenu(request, particleName, particleType); return input(mapping, form, request, response); } public ActionForward removeData(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { DynaValidatorForm theForm = (DynaValidatorForm) form; CharacterizationBean achar = (CharacterizationBean) theForm .get("achar"); String fileIndexStr = (String) request.getParameter("fileInd"); int fileInd = Integer.parseInt(fileIndexStr); String dataIndexStr = (String) request.getParameter("dataInd"); int dataInd = Integer.parseInt(dataIndexStr); DerivedBioAssayDataBean derivedBioAssayDataBean = (DerivedBioAssayDataBean) achar .getDerivedBioAssayDataList().get(fileInd); List<DatumBean> origDataList = derivedBioAssayDataBean.getDatumList(); int origNum = (origDataList == null) ? 0 : origDataList.size(); List<DatumBean> dataList = new ArrayList<DatumBean>(); for (int i = 0; i < origNum; i++) { DatumBean dataPoint = (DatumBean) origDataList.get(i); dataList.add(dataPoint); } if (origNum > 0) dataList.remove(dataInd); derivedBioAssayDataBean.setDatumList(dataList); String particleName = theForm.getString("particleName"); String particleType = theForm.getString("particleType"); InitSessionSetup.getInstance().setSideParticleMenu(request, particleName, particleType); return input(mapping, form, request, response); // return mapping.getInputForward(); this gives an // IndexOutOfBoundException in the jsp page } /** * Pepopulate data for the form * * @param request * @param theForm * @throws Exception */ public ActionForward deleteConfirmed(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { DynaValidatorForm theForm = (DynaValidatorForm) form; String particleName = theForm.getString("particleName"); String particleType = theForm.getString("particleType"); String strCharId = theForm.getString("characterizationId"); SubmitNanoparticleService service = new SubmitNanoparticleService(); service.deleteCharacterizations(particleName, particleType, new String[] { strCharId }); // signal the session that characterization has been changed request.getSession().setAttribute("newCharacterizationCreated", "true"); InitSessionSetup.getInstance().setSideParticleMenu(request, particleName, particleType); ActionMessages msgs = new ActionMessages(); ActionMessage msg = new ActionMessage("message.delete.characterization"); msgs.add("message", msg); saveMessages(request, msgs); return mapping.findForward("success"); } // add edited option to all editable dropdowns private void updateAllCharEditables(HttpSession session, CharacterizationBean achar) throws Exception { InitSessionSetup.getInstance().updateEditableDropdown(session, achar.getCharacterizationSource(), "characterizationSources"); InitSessionSetup.getInstance().updateEditableDropdown(session, achar.getInstrumentConfigBean().getInstrumentBean().getType(), "allInstrumentTypes"); InitSessionSetup.getInstance().updateEditableDropdown( session, achar.getInstrumentConfigBean().getInstrumentBean() .getManufacturer(), "allManufacturers"); for (DerivedBioAssayDataBean derivedBioAssayDataBean : achar .getDerivedBioAssayDataList()) { InitSessionSetup.getInstance().updateEditableDropdown(session, derivedBioAssayDataBean.getType(), "allDerivedDataFileTypes"); if (derivedBioAssayDataBean != null) { for (String category : derivedBioAssayDataBean.getCategories()) { InitSessionSetup.getInstance().updateEditableDropdown( session, category, "derivedDataCategories"); } for (DatumBean datum : derivedBioAssayDataBean.getDatumList()) { InitSessionSetup.getInstance().updateEditableDropdown( session, datum.getName(), "datumNames"); InitSessionSetup.getInstance().updateEditableDropdown( session, datum.getStatisticsType(), "charMeasureTypes"); InitSessionSetup.getInstance().updateEditableDropdown( session, datum.getUnit(), "charMeasureUnits"); } } } } // add edited option to all editable dropdowns private void updateShapeEditable(HttpSession session, ShapeBean shape) throws Exception { InitSessionSetup.getInstance().updateEditableDropdown(session, shape.getType(), "allShapeTypes"); } private void updateMorphologyEditable(HttpSession session, MorphologyBean morphology) throws Exception { InitSessionSetup.getInstance().updateEditableDropdown(session, morphology.getType(), "allMorphologyTypes"); } private void updateCytotoxicityEditable(HttpSession session, CytotoxicityBean cyto) throws Exception { InitSessionSetup.getInstance().updateEditableDropdown(session, cyto.getCellLine(), "allCellLines"); } public boolean loginRequired() { return true; } }
moved retrieval of file content from setupUpdate to preCreate SVN-Revision: 10316
src/gov/nih/nci/calab/ui/core/BaseCharacterizationAction.java
moved retrieval of file content from setupUpdate to preCreate
Java
bsd-3-clause
4b1a692ff3c1619f50b0d1c26e01ae746e6ea0d7
0
egret-labs/egret-android-support
package org.egret.java.HelloEgret; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.net.HttpURLConnection; import java.net.URL; import org.egret.egretframeworknative.EgretRuntimeActivity; import android.os.Bundle; import android.util.Log; public class HelloEgret extends EgretRuntimeActivity { private String loaderUrl; @Override protected void onCreate(Bundle savedInstanceState) { // 1 start // 直接从本地源文件启动,用于调试 // super.onCreate(savedInstanceState); // loaderUrl = ""; // runInUIThread(); // 1 end // 2 start // 从本地zip的源文件启动,推荐方式 // super.onCreate(savedInstanceState); // loaderUrl = "game_code.zip"; // runInUIThread(); // 2 end // 3 start // 从服务器的zip启动 // super.onCreate(savedInstanceState); // loaderUrl = "http://10.0.2.33/launcher.zip"; // runInUIThread(); // 3 end // 4-1/2 start super.onCreate(savedInstanceState); loaderUrl = "http://10.0.2.33/request"; // requestContent = "http://10.0.2.33/launcher.zip" new Thread(new Runnable() { public void run() { loaderUrl = getRequestContent(loaderUrl); if (loaderUrl == null) { return; } Log.d(TAG, loaderUrl); runInUIThread(); } }).start(); // 4-1/2 end } /** * Override your egret game loader url (重写该方法,来指定游戏打包启动的方式) */ @Override public String getLoaderUrl() { return loaderUrl; } // 4-2/2 start private String getRequestContent(String url) { URL realUrl = null; HttpURLConnection conn = null; InputStream in = null; ByteArrayOutputStream out = null; try { realUrl = new URL(loaderUrl); conn = (HttpURLConnection) realUrl.openConnection(); if (conn == null) { return null; } conn.setConnectTimeout(30 * 1000); if (conn.getResponseCode() == 0 || conn.getResponseCode() >= 400) { return null; } in = conn.getInputStream(); out = new ByteArrayOutputStream(); byte[] buffer = new byte[2048]; int read; while ((read = in.read(buffer)) > 0) { out.write(buffer, 0, read); } return new String(out.toByteArray()); } catch (IOException e) { e.printStackTrace(); return null; } finally { try { if (conn != null) { conn.disconnect(); } if (out != null) { out.close(); } if (in != null) { in.close(); } } catch (IOException e) { e.printStackTrace(); } } } // 4-2/2 end }
proj.android/src/org/egret/java/HelloEgret/HelloEgret.java
package org.egret.java.HelloEgret; import org.egret.egretframeworknative.EgretRuntimeActivity; import android.os.Bundle; public class HelloEgret extends EgretRuntimeActivity { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); } /** * Override your egret game loader url (重写该方法,来指定游戏打包启动的方式) * - return "game_code.zip" (推荐本地包方式) * - return "http://www.example.com/game_code.zip" (服务器方式) * - return "" (调试方式) */ @Override public String getLoaderUrl() { return ""; } }
[UPDATE] HelloEgret Template
proj.android/src/org/egret/java/HelloEgret/HelloEgret.java
[UPDATE] HelloEgret Template
Java
bsd-3-clause
ea722961a9e59f5b110e6ba204cc990529fa2e97
0
element-doo/postgresql-jdbc,element-doo/postgresql-jdbc
package org.postgresql.jdbc2; // IMPORTANT NOTE: This file implements the JDBC 2 version of the driver. // If you make any modifications to this file, you must make sure that the // changes are also made (if relevent) to the related JDBC 1 class in the // org.postgresql.jdbc1 package. import java.sql.*; import java.util.Vector; import org.postgresql.util.*; /* * A Statement object is used for executing a static SQL statement and * obtaining the results produced by it. * * <p>Only one ResultSet per Statement can be open at any point in time. * Therefore, if the reading of one ResultSet is interleaved with the * reading of another, each must have been generated by different * Statements. All statement execute methods implicitly close a * statement's current ResultSet if an open one exists. * * @see java.sql.Statement * @see ResultSet */ public class Statement extends org.postgresql.Statement implements java.sql.Statement { private Connection connection; // The connection who created us private Vector batch = null; private int resultsettype; // the resultset type to return private int concurrency; // is it updateable or not? /* * Constructor for a Statement. It simply sets the connection * that created us. * * @param c the Connection instantation that creates us */ public Statement (Connection c) { connection = c; resultsettype = java.sql.ResultSet.TYPE_SCROLL_INSENSITIVE; concurrency = java.sql.ResultSet.CONCUR_READ_ONLY; } /* * Execute a SQL statement that retruns a single ResultSet * * @param sql typically a static SQL SELECT statement * @return a ResulSet that contains the data produced by the query * @exception SQLException if a database access error occurs */ public java.sql.ResultSet executeQuery(String sql) throws SQLException { this.execute(sql); while (result != null && !((org.postgresql.ResultSet)result).reallyResultSet()) result = ((org.postgresql.ResultSet)result).getNext(); if (result == null) throw new PSQLException("postgresql.stat.noresult"); return result; } /* * Execute a SQL INSERT, UPDATE or DELETE statement. In addition * SQL statements that return nothing such as SQL DDL statements * can be executed * * @param sql a SQL statement * @return either a row count, or 0 for SQL commands * @exception SQLException if a database access error occurs */ public int executeUpdate(String sql) throws SQLException { this.execute(sql); if (((org.postgresql.ResultSet)result).reallyResultSet()) throw new PSQLException("postgresql.stat.result"); return this.getUpdateCount(); } /* * setCursorName defines the SQL cursor name that will be used by * subsequent execute methods. This name can then be used in SQL * positioned update/delete statements to identify the current row * in the ResultSet generated by this statement. If a database * doesn't support positioned update/delete, this method is a * no-op. * * <p><B>Note:</B> By definition, positioned update/delete execution * must be done by a different Statement than the one which * generated the ResultSet being used for positioning. Also, cursor * names must be unique within a Connection. * * <p>We throw an additional constriction. There can only be one * cursor active at any one time. * * @param name the new cursor name * @exception SQLException if a database access error occurs */ public void setCursorName(String name) throws SQLException { connection.setCursorName(name); } /* * Execute a SQL statement that may return multiple results. We * don't have to worry about this since we do not support multiple * ResultSets. You can use getResultSet or getUpdateCount to * retrieve the result. * * @param sql any SQL statement * @return true if the next result is a ResulSet, false if it is * an update count or there are no more results * @exception SQLException if a database access error occurs */ public boolean execute(String sql) throws SQLException { if (escapeProcessing) sql = escapeSQL(sql); // New in 7.1, if we have a previous resultset then force it to close // This brings us nearer to compliance, and helps memory management. // Internal stuff will call ExecSQL directly, bypassing this. if (result != null) { java.sql.ResultSet rs = getResultSet(); if (rs != null) rs.close(); } // New in 7.1, pass Statement so that ExecSQL can customise to it result = connection.ExecSQL(sql, this); // New in 7.1, required for ResultSet.getStatement() to work ((org.postgresql.jdbc2.ResultSet)result).setStatement(this); return (result != null && ((org.postgresql.ResultSet)result).reallyResultSet()); } /* * getUpdateCount returns the current result as an update count, * if the result is a ResultSet or there are no more results, -1 * is returned. It should only be called once per result. * * @return the current result as an update count. * @exception SQLException if a database access error occurs */ public int getUpdateCount() throws SQLException { if (result == null) return -1; if (((org.postgresql.ResultSet)result).reallyResultSet()) return -1; return ((org.postgresql.ResultSet)result).getResultCount(); } /* * getMoreResults moves to a Statement's next result. If it returns * true, this result is a ResulSet. * * @return true if the next ResultSet is valid * @exception SQLException if a database access error occurs */ public boolean getMoreResults() throws SQLException { result = ((org.postgresql.ResultSet)result).getNext(); return (result != null && ((org.postgresql.ResultSet)result).reallyResultSet()); } // ** JDBC 2 Extensions ** public void addBatch(String sql) throws SQLException { if (batch == null) batch = new Vector(); batch.addElement(sql); } public void clearBatch() throws SQLException { if (batch != null) batch.removeAllElements(); } public int[] executeBatch() throws SQLException { if (batch == null) batch = new Vector(); int size = batch.size(); int[] result = new int[size]; int i = 0; try { for (i = 0;i < size;i++) result[i] = this.executeUpdate((String)batch.elementAt(i)); } catch (SQLException e) { int[] resultSucceeded = new int[i]; System.arraycopy(result, 0, resultSucceeded, 0, i); PBatchUpdateException updex = new PBatchUpdateException("postgresql.stat.batch.error", new Integer(i), batch.elementAt(i), resultSucceeded); updex.setNextException(e); throw updex; } finally { batch.removeAllElements(); } return result; } public void cancel() throws SQLException { connection.cancelQuery(); } public java.sql.Connection getConnection() throws SQLException { return (java.sql.Connection)connection; } public int getFetchDirection() throws SQLException { throw new PSQLException("postgresql.psqlnotimp"); } public int getFetchSize() throws SQLException { // This one can only return a valid value when were a cursor? throw org.postgresql.Driver.notImplemented(); } public int getResultSetConcurrency() throws SQLException { // new in 7.1 return concurrency; } public int getResultSetType() throws SQLException { // new in 7.1 return resultsettype; } public void setFetchDirection(int direction) throws SQLException { throw org.postgresql.Driver.notImplemented(); } public void setFetchSize(int rows) throws SQLException { throw org.postgresql.Driver.notImplemented(); } /* * New in 7.1 */ public void setResultSetConcurrency(int value) throws SQLException { concurrency = value; } /* * New in 7.1 */ public void setResultSetType(int value) throws SQLException { resultsettype = value; } }
org/postgresql/jdbc2/Statement.java
package org.postgresql.jdbc2; // IMPORTANT NOTE: This file implements the JDBC 2 version of the driver. // If you make any modifications to this file, you must make sure that the // changes are also made (if relevent) to the related JDBC 1 class in the // org.postgresql.jdbc1 package. import java.sql.*; import java.util.Vector; import org.postgresql.util.*; /* * A Statement object is used for executing a static SQL statement and * obtaining the results produced by it. * * <p>Only one ResultSet per Statement can be open at any point in time. * Therefore, if the reading of one ResultSet is interleaved with the * reading of another, each must have been generated by different * Statements. All statement execute methods implicitly close a * statement's current ResultSet if an open one exists. * * @see java.sql.Statement * @see ResultSet */ public class Statement extends org.postgresql.Statement implements java.sql.Statement { private Connection connection; // The connection who created us private Vector batch = null; private int resultsettype; // the resultset type to return private int concurrency; // is it updateable or not? /* * Constructor for a Statement. It simply sets the connection * that created us. * * @param c the Connection instantation that creates us */ public Statement (Connection c) { connection = c; resultsettype = java.sql.ResultSet.TYPE_SCROLL_INSENSITIVE; concurrency = java.sql.ResultSet.CONCUR_READ_ONLY; } /* * Execute a SQL statement that retruns a single ResultSet * * @param sql typically a static SQL SELECT statement * @return a ResulSet that contains the data produced by the query * @exception SQLException if a database access error occurs */ public java.sql.ResultSet executeQuery(String sql) throws SQLException { this.execute(sql); while (result != null && !((org.postgresql.ResultSet)result).reallyResultSet()) result = ((org.postgresql.ResultSet)result).getNext(); if (result == null) throw new PSQLException("postgresql.stat.noresult"); return result; } /* * Execute a SQL INSERT, UPDATE or DELETE statement. In addition * SQL statements that return nothing such as SQL DDL statements * can be executed * * @param sql a SQL statement * @return either a row count, or 0 for SQL commands * @exception SQLException if a database access error occurs */ public int executeUpdate(String sql) throws SQLException { this.execute(sql); if (((org.postgresql.ResultSet)result).reallyResultSet()) throw new PSQLException("postgresql.stat.result"); return this.getUpdateCount(); } /* * setCursorName defines the SQL cursor name that will be used by * subsequent execute methods. This name can then be used in SQL * positioned update/delete statements to identify the current row * in the ResultSet generated by this statement. If a database * doesn't support positioned update/delete, this method is a * no-op. * * <p><B>Note:</B> By definition, positioned update/delete execution * must be done by a different Statement than the one which * generated the ResultSet being used for positioning. Also, cursor * names must be unique within a Connection. * * <p>We throw an additional constriction. There can only be one * cursor active at any one time. * * @param name the new cursor name * @exception SQLException if a database access error occurs */ public void setCursorName(String name) throws SQLException { connection.setCursorName(name); } /* * Execute a SQL statement that may return multiple results. We * don't have to worry about this since we do not support multiple * ResultSets. You can use getResultSet or getUpdateCount to * retrieve the result. * * @param sql any SQL statement * @return true if the next result is a ResulSet, false if it is * an update count or there are no more results * @exception SQLException if a database access error occurs */ public boolean execute(String sql) throws SQLException { if (escapeProcessing) sql = escapeSQL(sql); // New in 7.1, if we have a previous resultset then force it to close // This brings us nearer to compliance, and helps memory management. // Internal stuff will call ExecSQL directly, bypassing this. if (result != null) { java.sql.ResultSet rs = getResultSet(); if (rs != null) rs.close(); } // New in 7.1, pass Statement so that ExecSQL can customise to it result = connection.ExecSQL(sql, this); // New in 7.1, required for ResultSet.getStatement() to work ((org.postgresql.jdbc2.ResultSet)result).setStatement(this); return (result != null && ((org.postgresql.ResultSet)result).reallyResultSet()); } /* * getUpdateCount returns the current result as an update count, * if the result is a ResultSet or there are no more results, -1 * is returned. It should only be called once per result. * * @return the current result as an update count. * @exception SQLException if a database access error occurs */ public int getUpdateCount() throws SQLException { if (result == null) return -1; if (((org.postgresql.ResultSet)result).reallyResultSet()) return -1; return ((org.postgresql.ResultSet)result).getResultCount(); } /* * getMoreResults moves to a Statement's next result. If it returns * true, this result is a ResulSet. * * @return true if the next ResultSet is valid * @exception SQLException if a database access error occurs */ public boolean getMoreResults() throws SQLException { result = ((org.postgresql.ResultSet)result).getNext(); return (result != null && ((org.postgresql.ResultSet)result).reallyResultSet()); } // ** JDBC 2 Extensions ** public void addBatch(String sql) throws SQLException { if (batch == null) batch = new Vector(); batch.addElement(sql); } public void clearBatch() throws SQLException { if (batch != null) batch.removeAllElements(); } public int[] executeBatch() throws SQLException { if (batch == null) batch = new Vector(); int size = batch.size(); int[] result = new int[size]; int i = 0; try { for (i = 0;i < size;i++) result[i] = this.executeUpdate((String)batch.elementAt(i)); } catch (SQLException e) { int[] resultSucceeded = new int[i]; System.arraycopy(result, 0, resultSucceeded, 0, i); PBatchUpdateException updex = new PBatchUpdateException("postgresql.stat.batch.error", new Integer(i), batch.elementAt(i), resultSucceeded); updex.setNextException(e); throw updex; } finally { batch.removeAllElements(); } return result; } public void Cancel() throws SQLException { connection.cancelQuery(); } public java.sql.Connection getConnection() throws SQLException { return (java.sql.Connection)connection; } public int getFetchDirection() throws SQLException { throw new PSQLException("postgresql.psqlnotimp"); } public int getFetchSize() throws SQLException { // This one can only return a valid value when were a cursor? throw org.postgresql.Driver.notImplemented(); } public int getResultSetConcurrency() throws SQLException { // new in 7.1 return concurrency; } public int getResultSetType() throws SQLException { // new in 7.1 return resultsettype; } public void setFetchDirection(int direction) throws SQLException { throw org.postgresql.Driver.notImplemented(); } public void setFetchSize(int rows) throws SQLException { throw org.postgresql.Driver.notImplemented(); } /* * New in 7.1 */ public void setResultSetConcurrency(int value) throws SQLException { concurrency = value; } /* * New in 7.1 */ public void setResultSetType(int value) throws SQLException { resultsettype = value; } }
changed Cancel to cancel
org/postgresql/jdbc2/Statement.java
changed Cancel to cancel
Java
mit
1c136d29cee731dd6f837ceb54c47fa2d6a67b86
0
rossdrew/emuRox,rossdrew/emuRox
src/test/java/com/rox/emu/p6502/com/rox/emu/LDAProperty.java
package com.rox.emu.p6502.com.rox.emu; import com.rox.emu.Memory; import com.rox.emu.p6502.CPU; //@RunWith(JUnitQuickcheck.class) public class LDAProperty { private Memory memory; private CPU processor; // @Before // public void setUp() { // memory = new SimpleMemory(65534); // memory.setByteAt(0x0, 0xFFFC); // memory.setByteAt(0x0, 0xFFFD); // // processor = new CPU(memory); // processor.reset(); // } // // //No Generators are being loaded for int in the GeneratorRepository // @Property // public void loadAccumulator(int byteValue){ // int[] program = {OP_LDA_I, byteValue}; // memory.setMemory(0, program); // // processor.step(); // // Registers registers = processor.getRegisters(); // assertEquals(byteValue, registers.getRegister(Registers.REG_ACCUMULATOR)); // assertEquals(program.length, registers.getPC()); // } }
Removing property testing file for now
src/test/java/com/rox/emu/p6502/com/rox/emu/LDAProperty.java
Removing property testing file for now
Java
mit
12d9210789e1f2fe9f89d8242ccb98901a93b41f
0
alex-sobol/jmade,alex-sobol/jmade
package org.jmade; import org.jmade.core.AgentRunner; import org.jmade.core.event.persistence.EventLogRepository; import org.jmade.core.event.persistence.EventLogger; import org.jmade.example.Buyer; import org.jmade.example.Seller; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.Configuration; @SpringBootApplication @Configuration @EnableAutoConfiguration @ComponentScan public class PlatformApplication { public static void main(String[] args) throws InterruptedException { TestModule2.doTest(); TestModule.doTest(); ConfigurableApplicationContext context = SpringApplication.run(PlatformApplication.class, args); EventLogRepository eventLogRepository = context.getBean(EventLogRepository.class); eventLogRepository.deleteAll(); AgentRunner agentRunner = new AgentRunner(); new EventLogger(eventLogRepository); agentRunner.run(new Buyer("buyer1", 1000.0, 2.0, 0.1)); agentRunner.run(new Buyer("buyer2", 1000.0, 1.0, 1.0)); agentRunner.run(new Seller("seller")); } }
platform/src/main/java/org/jmade/PlatformApplication.java
package org.jmade; import org.jmade.core.AgentRunner; import org.jmade.core.event.persistence.EventLogRepository; import org.jmade.core.event.persistence.EventLogger; import org.jmade.example.Buyer; import org.jmade.example.Seller; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.Configuration; @SpringBootApplication @Configuration @EnableAutoConfiguration @ComponentScan public class PlatformApplication { public static void main(String[] args) throws InterruptedException { TestModule2.doTest(); TestModule.doTest(); ConfigurableApplicationContext context = SpringApplication.run(PlatformApplication.class, args); EventLogRepository eventLogRepository = context.getBean(EventLogRepository.class); eventLogRepository.deleteAll(); AgentRunner agentRunner = new AgentRunner(); //agentRunner.run(new MessagesLogger(messageLogRepository)); new EventLogger(eventLogRepository); agentRunner.run(new Buyer("buyer1", 1000.0, 2.0, 0.1)); agentRunner.run(new Buyer("buyer2", 1000.0, 1.0, 1.0)); agentRunner.run(new Seller("seller")); } }
Deleted unused commented code
platform/src/main/java/org/jmade/PlatformApplication.java
Deleted unused commented code
Java
mit
e176d022b333a48006d828640e3af0bc93d7fc5c
0
Dexor157/collidethings
package drawcube; import javax.swing.*; import java.awt.*; import java.awt.geom.*; import java.math.*; import java.util.*; public class Drawcube extends JFrame { public Drawcube(){ super ("Drawcube"); setSize(1300,800); setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); setLayout(null); setResizable(false); setVisible(true); } public double howlong(int x1, int x2, int y1, int y2){ double length; length = Math.sqrt(Math.abs(((x2 - x1)^2) + ((y2 - y1)^2))); return length; } public double howlong(int width, int height){ double length; length = Math.sqrt(Math.pow(width,2) + (Math.pow(height,2))); return length; } public void paint(Graphics g) { super.paint(g); // fixes the immediate problem. int x = 400; int y = 400; int width = 150; int height = 150; double angle = Math.PI/4; //double slantlength = howlong(width, height); double slantlength = width; System.out.println(slantlength); int slantwidth = (int)(slantlength * Math.cos(angle)); int slantheight = (int)(slantlength * Math.sin(angle)); System.out.println(slantwidth); System.out.println(slantheight); Graphics2D g2 = (Graphics2D) g; //This one is the sean version actually //SEAN VERSION SERIOUSLY GUYS Line2D lin0 = new Line2D.Float(x, y, x-width,y); Line2D lin1 = new Line2D.Float(x, y, x, y-height); Line2D lin2 = new Line2D.Float(x, y, x+slantwidth, y-slantheight); Line2D lin3 = new Line2D.Float(x, y-height, x-width, y-height); g2.draw(lin0); g2.draw(lin1); g2.draw(lin2); g2.draw(lin3); } public static void main(String[] args) { Drawcube frame = new Drawcube(); } }
drawcube/src/drawcube/Drawcube.java
package drawcube; import javax.swing.*; import java.awt.*; import java.awt.geom.*; import java.math.*; import java.util.*; public class Drawcube extends JFrame { public Drawcube(){ super ("Drawcube"); setSize(1300,800); setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); setLayout(null); setResizable(false); setVisible(true); } public double howlong(int x1, int x2, int y1, int y2){ double length; length = Math.sqrt(Math.abs(((x2 - x1)^2) + ((y2 - y1)^2))); return length; } public double howlong(int width, int height){ double length; length = Math.sqrt(Math.pow(width,2) + (Math.pow(height,2))); return length; } public void paint(Graphics g) { super.paint(g); // fixes the immediate problem. int x = 400; int y = 400; int width = 150; int height = 150; double angle = Math.PI/4; //double slantlength = howlong(width, height); double slantlength = width; System.out.println(slantlength); int slantwidth = (int)(slantlength * Math.cos(angle)); int slantheight = (int)(slantlength * Math.sin(angle)); System.out.println(slantwidth); System.out.println(slantheight); Graphics2D g2 = (Graphics2D) g; //This one is the sean version actually //SEAN VERSION SERIOUSLY GUYS Line2D lin0 = new Line2D.Float(x, y, x-width,y); Line2D lin1 = new Line2D.Float(x, y, x, y-height); Line2D lin2 = new Line2D.Float(x, y, x+slantwidth, y-slantheight); Line2D lin3 = new Line2D.Float(x, y-height, x-width, y-height); g2.draw(lin0); g2.draw(lin1); g2.draw(lin2); g2.draw(lin3); } public static void main(String[] args) { Drawcube frame = new Drawcube(); } }
commit caps message to sean hopefully
drawcube/src/drawcube/Drawcube.java
commit caps message to sean hopefully
Java
mit
93d70158d164ed2e8c60a85395a92286c75fc445
0
InnoSystems/RestOne,InnoSystems/RestOne,InnoSystems/RestOne
package com.restOne.greeting; import java.util.concurrent.atomic.AtomicLong; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; @RestController public class GreetingController { private static final String template = "Hello, %s!"; private final AtomicLong counter = new AtomicLong(); @RequestMapping("/greeting") public Greeting greeting(@RequestParam(value = "name", defaultValue = "Kakki") String name) { return new Greeting(counter.incrementAndGet(), String.format(template, name)); } }
src/main/java/com/restOne/greeting/GreetingController.java
package com.restOne.greeting; import java.util.concurrent.atomic.AtomicLong; import org.springframework.web.bind.annotation.CrossOrigin; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; @RestController public class GreetingController { private static final String template = "Hello, %s!"; private final AtomicLong counter = new AtomicLong(); @RequestMapping("/greeting") public Greeting greeting(@RequestParam(value = "name", defaultValue = "Kakki") String name) { return new Greeting(counter.incrementAndGet(), String.format(template, name)); } }
Remove not needed imports
src/main/java/com/restOne/greeting/GreetingController.java
Remove not needed imports
Java
mit
ac025831b199f46a15e938b44248b0da76223296
0
phenoscape/PhenoscapeOBD-WS
package org.obd.ws.application.test; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.sql.SQLException; import org.junit.Test; import org.obd.query.Shard; import org.obd.query.impl.AbstractSQLShard; import org.obd.query.impl.OBDSQLShard; import org.obd.ws.resources.AutoCompleteResource; import org.obd.ws.resources.TermResource; import org.restlet.resource.Representation; import org.restlet.resource.ResourceException; public class OBDApplicationTest { @Test public void testTermSearch() throws ResourceException, IOException, SQLException, ClassNotFoundException { File connParamFile = new File("testfiles/connectionParameters"); BufferedReader br = new BufferedReader(new FileReader(connParamFile)); String[] connParams = new String[3]; String param; int j = 0; while ((param = br.readLine()) != null) { connParams[j++] = param; } Shard obdsql = new OBDSQLShard(); ((AbstractSQLShard) obdsql).connect(connParams[0], connParams[1], connParams[2]); TermResource tr = new TermResource(obdsql, "ZFA:0000107"); Representation rep2 = tr.represent(tr.getVariants().get(0)); AutoCompleteResource acr = new AutoCompleteResource(obdsql, "basihyal", new String[]{"true", "true", "true", "TTO:TAO:COLLECTION"}); Representation rep = acr.represent(acr.getVariants().get(0)); } }
test/org/obd/ws/application/test/OBDApplicationTest.java
package org.obd.ws.application.test; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.sql.SQLException; import org.junit.Test; import org.obd.query.Shard; import org.obd.query.impl.AbstractSQLShard; import org.obd.query.impl.OBDSQLShard; import org.obd.ws.resources.AutoCompleteResource; import org.obd.ws.resources.TermResource; import org.restlet.resource.Representation; public class OBDApplicationTest { @Test public void testTermSearch() throws IOException, SQLException, ClassNotFoundException{ File connParamFile = new File("testfiles/connectionParameters"); BufferedReader br = new BufferedReader(new FileReader(connParamFile)); String[] connParams = new String[3]; String param; int j = 0; while ((param = br.readLine()) != null) { connParams[j++] = param; } Shard obdsql = new OBDSQLShard(); ((AbstractSQLShard) obdsql).connect(connParams[0], connParams[1], connParams[2]); TermResource tr = new TermResource(obdsql, "ZFA:0000107"); Representation rep2 = tr.getRepresentation(tr.getVariants().get(0)); AutoCompleteResource acr = new AutoCompleteResource(obdsql, "basihyal", new String[]{"true", "true", "true", "TTO:TAO:COLLECTION"}); Representation rep = acr.getRepresentation(acr.getVariants().get(0)); } }
Committing test file before we move it.
test/org/obd/ws/application/test/OBDApplicationTest.java
Committing test file before we move it.
Java
mit
e8898495a29b0b215fdc93580f1ecaaeb2698a25
0
cslinmiso/LineAPI4J
package io.cslinmiso.line.api.impl; /** * * @Package: io.cslinmiso.line.api.impl * @FileName: LineApiImpl.java * @author: treylin * @date: 2016/03/25, 上午 10:46:25 * * <pre> * The MIT License (MIT) * * Copyright (c) 2015 Trey Lin * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. * </pre> */ import io.cslinmiso.line.api.LineApi; import io.cslinmiso.line.model.LoginCallback; import io.cslinmiso.line.utils.Utility; import java.io.IOException; import java.io.InputStream; import java.math.BigInteger; import java.nio.charset.StandardCharsets; import java.security.KeyFactory; import java.security.interfaces.RSAPublicKey; import java.security.spec.RSAPublicKeySpec; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.crypto.Cipher; import line.thrift.AuthQrcode; import line.thrift.Contact; import line.thrift.Group; import line.thrift.IdentityProvider; import line.thrift.LoginResult; import line.thrift.LoginResultType; import line.thrift.Message; import line.thrift.Operation; import line.thrift.Profile; import line.thrift.Room; import line.thrift.TMessageBoxWrapUp; import line.thrift.TMessageBoxWrapUpResponse; import line.thrift.TalkException; import line.thrift.TalkService; import line.thrift.TalkService.Client; import org.apache.commons.codec.binary.Hex; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.apache.thrift.TException; import org.apache.thrift.protocol.TCompactProtocol; import org.apache.thrift.protocol.TProtocol; import org.apache.thrift.transport.THttpClient; import org.apache.thrift.transport.TTransport; import org.apache.thrift.transport.TTransportException; import com.mashape.unirest.http.HttpResponse; import com.mashape.unirest.http.JsonNode; import com.mashape.unirest.http.Unirest; public class LineApiImpl implements LineApi { public enum OSType { WINDOWS, MAC } private static final String EMAIL_REGEX = "[^@]+@[^@]+\\.[^@]+"; private static final String X_LINE_ACCESS = "X-Line-Access"; /** The ip. */ private String ip = "127.0.0.1"; /** The line application version. */ private String version = "4.7.0"; /** The com_name. */ private final String systemName; private final OSType osType; private String id; private String password; private String authToken; private String verifier; private String certificate; /** The revision. */ private long revision; /** The _headers. */ private Map<String, String> headers = new HashMap<String, String>(); /** The _client. */ public TalkService.Client client; public LineApiImpl(OSType osType, String systemName) { this.osType = osType; this.systemName = systemName; initHeaders(); } public LineApiImpl() { this(OSType.MAC, "Line4J"); } public static void main(String[] args) { // LineApi api = new LineApiImpl(); // try { // api.login("xxxx", "xxxx"); // } catch (java.net.SocketTimeoutException e) { // // setAwaitforVerify false // } catch (Exception e) { // // TODO Auto-generated catch block // e.printStackTrace(); // } } private void initHeaders() { String osVersion; String userAgent; String app; if (osType.equals(OSType.WINDOWS)) { osVersion = "6.1.7600-7-x64"; userAgent = "DESKTOP:WIN:" + osVersion + "(" + version + ")"; app = "DESKTOPWIN\t" + osVersion + "\tWINDOWS\t" + version; } else { osVersion = "10.10.4-YOSEMITE-x64"; userAgent = "DESKTOP:MAC:" + osVersion + "(" + version + ")"; app = "DESKTOPMAC\t" + osVersion + "\tMAC\t" + version; } headers.put("User-Agent", userAgent); headers.put("X-Line-Application", app); } /** * Ready. * * @throws TTransportException */ private Client ready() throws TTransportException { THttpClient transport = new THttpClient(LINE_HTTP_IN_URL); transport.setCustomHeaders(headers); transport.open(); return new TalkService.Client(new TCompactProtocol(transport)); } @Override public LoginResult login(@Nonnull String id, @Nonnull String password) throws Exception { return login(id, password, null, null); } @Override public LoginResult login(@Nonnull String id, @Nonnull String password, @Nullable String certificate, @Nullable LoginCallback loginCallback) throws Exception { this.id = id; this.password = password; this.certificate = certificate; IdentityProvider provider; Map<String, String> json; String sessionKey; boolean keepLoggedIn = true; String accessLocation = this.ip; // Login to LINE server. if (id.matches(EMAIL_REGEX)) { provider = IdentityProvider.LINE; // LINE json = getCertResult(LINE_SESSION_LINE_URL); } else { provider = IdentityProvider.NAVER_KR; // NAVER json = getCertResult(LINE_SESSION_NAVER_URL); } sessionKey = json.get("session_key"); String message = (char) (sessionKey.length()) + sessionKey + (char) (id.length()) + id + (char) (password.length()) + password; String[] keyArr = json.get("rsa_key").split(","); String keyName = keyArr[0]; String n = keyArr[1]; String e = keyArr[2]; BigInteger modulus = new BigInteger(n, 16); BigInteger pubExp = new BigInteger(e, 16); KeyFactory keyFactory = KeyFactory.getInstance("RSA"); RSAPublicKeySpec pubKeySpec = new RSAPublicKeySpec(modulus, pubExp); RSAPublicKey publicKey = (RSAPublicKey) keyFactory.generatePublic(pubKeySpec); Cipher cipher = Cipher.getInstance("RSA/ECB/PKCS1Padding"); cipher.init(Cipher.ENCRYPT_MODE, publicKey); byte[] enBytes = cipher.doFinal(message.getBytes(StandardCharsets.UTF_8)); String encryptString = Hex.encodeHexString(enBytes); THttpClient transport = new THttpClient(LINE_HTTP_URL); transport.setCustomHeaders(headers); transport.open(); LoginResult result; try { TProtocol protocol = new TCompactProtocol(transport); this.client = new TalkService.Client(protocol); result = this.client.loginWithIdentityCredentialForCertificate(provider, keyName, encryptString, keepLoggedIn, accessLocation, this.systemName, this.certificate); if (result.getType() == LoginResultType.REQUIRE_DEVICE_CONFIRM) { headers.put(X_LINE_ACCESS, result.getVerifier()); if (loginCallback != null) { loginCallback.onDeviceConfirmRequired(result.getPinCode()); } else { throw new Exception("Device confirmation is required. Please set " + LoginCallback.class.getSimpleName() + " to get the pin code"); } // await for pinCode to be certified, it will return a verifier afterward. loginWithVerifierForCertificate(); } else if (result.getType() == LoginResultType.SUCCESS) { // if param certificate has passed certification setAuthToken(result.getAuthToken()); } } finally { close(); } // Once the client passed the verification, switch connection to HTTP_IN_URL client = ready(); return result; } /* * (non-Javadoc) * * @see api.line.LineApi#loginWithAuthToken(java.lang.String) */ public void loginWithAuthToken(String authToken) throws Exception { if (StringUtils.isNotEmpty(authToken)) { setAuthToken(authToken); } THttpClient transport = new THttpClient(LINE_HTTP_URL); transport.setCustomHeaders(headers); transport.open(); TProtocol protocol = new TCompactProtocol(transport); setClient(new TalkService.Client(protocol)); } /* * (non-Javadoc) * * @see api.line.LineApi#loginWithQrCode() */ public AuthQrcode loginWithQrCode() throws Exception { // Request QrCode from LINE server. // Map<String, String> json = null; boolean keepLoggedIn = false; THttpClient transport = new THttpClient(LINE_HTTP_URL); transport.setCustomHeaders(headers); transport.open(); TProtocol protocol = new TCompactProtocol(transport); this.client = new TalkService.Client(protocol); AuthQrcode result = this.client.getAuthQrcode(keepLoggedIn, systemName); headers.put(X_LINE_ACCESS, result.getVerifier()); System.out.println("Retrieved QR Code."); return result; // await for QR code to be certified, it will return a verifier afterward. // loginWithVerifier(); } /* * (non-Javadoc) * * @see api.line.LineApi#loginWithVerifier() */ public String loginWithVerifierForCertificate() throws Exception { Map json; json = getCertResult(LINE_CERTIFICATE_URL); if (json == null) { throw new Exception("fail to pass certificate check."); } // login with verifier json = (Map) json.get("result"); String verifierLocal = (String) json.get("verifier"); this.verifier = verifierLocal; LoginResult result = this.client.loginWithVerifierForCertificate(verifierLocal); if (result.getType() == LoginResultType.SUCCESS) { setAuthToken(result.getAuthToken()); setCertificate(result.getCertificate()); return result.getCertificate(); } else if (result.getType() == LoginResultType.REQUIRE_QRCODE) { throw new Exception("require QR code"); } else { throw new Exception("require device confirm"); } } public Map getCertResult(String url) throws Exception { Unirest unirest = new Unirest(); // set timed out in 2 mins. Unirest.setTimeouts(120000, 120000); HttpResponse<JsonNode> jsonResponse = unirest.get(url).headers(this.headers).asJson(); return Utility.toMap(jsonResponse.getBody().getObject()); } public boolean postContent(String url, Map<String, Object> data, InputStream is) throws Exception { Unirest unirest = new Unirest(); byte[] byteArray = IOUtils.toByteArray(is); HttpResponse<JsonNode> jsonResponse = unirest.post(url).headers(this.headers).fields(data).field("file", byteArray, "").asJson(); return jsonResponse.getStatus() == 201; } /** * * After login, update authToken to avoid expiration of authToken. This method skip the PinCode * validation step. * **/ public boolean updateAuthToken() throws Exception { if (this.certificate != null) { this.login(this.id, this.password, this.certificate, null); this.loginWithAuthToken(this.authToken); return true; } else { throw new Exception("You need to login first. There is no valid certificate"); } } /** * find and add Contact by user id * * @return * @throws TException * @throws TalkException **/ public Contact findContactByUserid(String userid) throws TalkException, TException { return this.client.findContactByUserid(userid); } /** * find and add Contact by user id * * @return * @throws TException * @throws TalkException **/ public Map<String, Contact> findAndAddContactsByUserid(int reqSeq, String userid) throws TalkException, TException { return this.client.findAndAddContactsByUserid(0, userid); } /** * find contacts by email (not tested) * * @return * @throws TException * @throws TalkException **/ public Map<String, Contact> findContactsByEmail(Set<String> emails) throws TalkException, TException { return this.client.findContactsByEmail(emails); } /** * find and add contact by email (not tested) * * @return * @throws TException * @throws TalkException **/ public Map<String, Contact> findAndAddContactsByEmail(int reqSeq, Set<String> emails) throws TalkException, TException { return this.client.findAndAddContactsByEmail(0, emails); } /** * find and add contact by phone number (not tested) * * @return * @return * @throws TException * @throws TalkException **/ public Map<String, Contact> findContactsByPhone( Set<String> phones) throws TalkException, TException { return this.client.findContactsByPhone(phones); } /** * find and add contact by phone number (not tested) * * @return * @return * @throws TException * @throws TalkException **/ public Map<String, Contact> findAndAddContactsByPhone(int reqSeq, Set<String> phones) throws TalkException, TException { return this.client.findAndAddContactsByPhone(0, phones); } /** * Get profile information * * returns Profile object; - picturePath - displayName - phone (base64 encoded?) - * allowSearchByUserid - pictureStatus - userid - mid # used for unique id for account - * phoneticName - regionCode - allowSearchByEmail - email - statusMessage **/ public Profile getProfile() throws TalkException, TException { return this.client.getProfile(); } public List<String> getAllContactIds() throws TalkException, TException { /** Get all contacts of your LINE account **/ return this.client.getAllContactIds(); } public List<String> getBlockedContactIds() throws TalkException, TException { /** Get all blocked contacts of your LINE account **/ return this.client.getBlockedContactIds(); } public List<String> getHiddenContactIds() throws TalkException, TException { /** Get all hidden contacts of your LINE account **/ return this.client.getHiddenContactMids(); } public List<Contact> getContacts(List<String> ids) throws TalkException, TException { /** * Get contact information list from ids * * {returns{ List of Contact list;} - status - capableVideoCall - dispalyName - settings - * pictureStatus - capableVoiceCall - capableBuddy - mid - displayNameOverridden - relation - * thumbnailUrl_ - createdTime - facoriteTime - capableMyhome - attributes - type - phoneticName * - statusMessage **/ // if type(ids) != list{ // msg = "argument should be list of contact ids" return this.client.getContacts(ids); } public Room createRoom(int reqSeq, List<String> ids) throws TalkException, TException { /** Create a chat room **/ // reqSeq = 0; return this.client.createRoom(reqSeq, ids); } public Room getRoom(String roomId) throws TalkException, TException { /** Get a chat room **/ return this.client.getRoom(roomId); } public void inviteIntoRoom(String roomId, List<String> contactIds) throws TalkException, TException { /** Invite contacts into room **/ this.client.inviteIntoRoom(0, roomId, contactIds); } public void leaveRoom(String id) throws TalkException, TException { /** Leave a chat room **/ this.client.leaveRoom(0, id); } public Group createGroup(int seq, String name, List<String> ids) throws TalkException, TException { /** Create a group **/ // seq = 0; return this.client.createGroup(seq, name, ids); } public List<Group> getGroups(List<String> groupIds) throws TalkException, TException { /** Get a list of group with ids **/ // if type(ids) != list{ // msg = "argument should be list of group ids" // this.raise_error(msg) return this.client.getGroups(groupIds); } public List<String> getGroupIdsJoined() throws TalkException, TException { /** Get group id that you joined **/ return this.client.getGroupIdsJoined(); } public List<String> getGroupIdsInvited() throws TalkException, TException { /** Get group id that you invited **/ return this.client.getGroupIdsInvited(); } public void acceptGroupInvitation(int seq, String groupId) throws TalkException, TException { /** Accept a group invitation **/ // seq = 0; this.client.acceptGroupInvitation(seq, groupId); } public void cancelGroupInvitation(int seq, String groupId, List<String> contactIds) throws TalkException, TException { /** Cancel a group invitation **/ // seq = 0; this.client.cancelGroupInvitation(seq, groupId, contactIds); } public void inviteIntoGroup(int seq, String groupId, List<String> contactIds) throws TalkException, TException { /** Invite contacts into group **/ // seq = 0; this.client.inviteIntoGroup(seq, groupId, contactIds); } public void kickoutFromGroup(int seq, String groupId, List<String> contactIds) throws TalkException, TException { /** Kick a group members **/ // seq = 0; this.client.kickoutFromGroup(seq, groupId, contactIds); } public void leaveGroup(String id) throws TalkException, TException { /** Leave a group **/ this.client.leaveGroup(0, id); } public List<Message> getRecentMessages(String id, int count) throws TalkException, TException { /** Get recent messages from `id` **/ return this.client.getRecentMessages(id, count); } public Message sendMessage(int seq, Message message) throws TalkException, TException { /** * Send a message to `id`. `id` could be contact id or group id * * param message: `message` instance **/ return this.client.sendMessage(seq, message); } public long getLastOpRevision() throws TalkException, TException { return this.client.getLastOpRevision(); } public List<Operation> fetchOperations(long revision, int count) throws TalkException, TException { return this.client.fetchOperations(revision, count); } public TMessageBoxWrapUp getMessageBoxCompactWrapUp(String id) { try { return this.client.getMessageBoxCompactWrapUp(id); } catch (Exception e) { return null; } } public TMessageBoxWrapUpResponse getMessageBoxCompactWrapUpList(int start, int count) throws Exception { try { return this.client.getMessageBoxCompactWrapUpList(start, count); } catch (Exception e) { throw new Exception(e.getMessage()); } } private void setAuthToken(String token) { headers.put(X_LINE_ACCESS, token); this.authToken = token; } public TalkService.Client getClient() { return client; } public void setClient(TalkService.Client client) { this.client = client; } public String getLineAccessToken() { return headers.get(X_LINE_ACCESS); } public String getCertificate() { return certificate; } public void setCertificate(String certificate) { this.certificate = certificate; } @Override public void close() throws IOException { if (client == null) { return; } TTransport inputTransport = client.getInputProtocol().getTransport(); inputTransport.close(); TTransport outputTransport = client.getOutputProtocol().getTransport(); outputTransport.close(); } }
src/main/java/io/cslinmiso/line/api/impl/LineApiImpl.java
package io.cslinmiso.line.api.impl; /** * * @Package: io.cslinmiso.line.api.impl * @FileName: LineApiImpl.java * @author: treylin * @date: 2016/03/25, 上午 10:46:25 * * <pre> * The MIT License (MIT) * * Copyright (c) 2015 Trey Lin * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. * </pre> */ import io.cslinmiso.line.api.LineApi; import io.cslinmiso.line.model.LoginCallback; import io.cslinmiso.line.utils.Utility; import java.io.IOException; import java.io.InputStream; import java.math.BigInteger; import java.nio.charset.StandardCharsets; import java.security.KeyFactory; import java.security.interfaces.RSAPublicKey; import java.security.spec.RSAPublicKeySpec; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.crypto.Cipher; import line.thrift.AuthQrcode; import line.thrift.Contact; import line.thrift.Group; import line.thrift.IdentityProvider; import line.thrift.LoginResult; import line.thrift.LoginResultType; import line.thrift.Message; import line.thrift.Operation; import line.thrift.Profile; import line.thrift.Room; import line.thrift.TMessageBoxWrapUp; import line.thrift.TMessageBoxWrapUpResponse; import line.thrift.TalkException; import line.thrift.TalkService; import line.thrift.TalkService.Client; import org.apache.commons.codec.binary.Hex; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.apache.thrift.TException; import org.apache.thrift.protocol.TCompactProtocol; import org.apache.thrift.protocol.TProtocol; import org.apache.thrift.transport.THttpClient; import org.apache.thrift.transport.TTransport; import org.apache.thrift.transport.TTransportException; import com.mashape.unirest.http.HttpResponse; import com.mashape.unirest.http.JsonNode; import com.mashape.unirest.http.Unirest; public class LineApiImpl implements LineApi { public enum OSType { WINDOWS, MAC } private static final String EMAIL_REGEX = "[^@]+@[^@]+\\.[^@]+"; private static final String X_LINE_ACCESS = "X-Line-Access"; /** The ip. */ private String ip = "127.0.0.1"; /** The line application version. */ private String version = "4.7.0"; /** The com_name. */ private final String systemName; private final OSType osType; private String id; private String password; private String authToken; private String verifier; private String certificate; /** The revision. */ private long revision; /** The _headers. */ private Map<String, String> headers = new HashMap<String, String>(); /** The _client. */ public TalkService.Client client; public LineApiImpl(OSType osType, String systemName) { this.osType = osType; this.systemName = systemName; initHeaders(); } public LineApiImpl() { this(OSType.MAC, "Line4J"); } public static void main(String[] args) { // LineApi api = new LineApiImpl(); // try { // api.login("xxxx", "xxxx"); // } catch (java.net.SocketTimeoutException e) { // // setAwaitforVerify false // } catch (Exception e) { // // TODO Auto-generated catch block // e.printStackTrace(); // } } private void initHeaders() { String osVersion; String userAgent; String app; if (osType.equals(OSType.WINDOWS)) { osVersion = "6.1.7600-7-x64"; userAgent = "DESKTOP:WIN:" + osVersion + "(" + version + ")"; app = "DESKTOPWIN\t" + osVersion + "\tWINDOWS\t" + version; } else { osVersion = "10.10.4-YOSEMITE-x64"; userAgent = "DESKTOP:MAC:" + osVersion + "(" + version + ")"; app = "DESKTOPMAC\t" + osVersion + "\tMAC\t" + version; } headers.put("User-Agent", userAgent); headers.put("X-Line-Application", app); } /** * Ready. * * @throws TTransportException */ private Client ready() throws TTransportException { THttpClient transport = new THttpClient(LINE_HTTP_IN_URL); transport.setCustomHeaders(headers); transport.open(); return new TalkService.Client(new TCompactProtocol(transport)); } @Override public LoginResult login(@Nonnull String id, @Nonnull String password) throws Exception { return login(id, password, null, null); } @Override public LoginResult login(@Nonnull String id, @Nonnull String password, @Nullable String certificate, @Nullable LoginCallback loginCallback) throws Exception { this.id = id; this.password = password; this.certificate = certificate; IdentityProvider provider; Map<String, String> json; String sessionKey; boolean keepLoggedIn = true; String accessLocation = this.ip; // Login to LINE server. if (id.matches(EMAIL_REGEX)) { provider = IdentityProvider.LINE; // LINE json = getCertResult(LINE_SESSION_LINE_URL); } else { provider = IdentityProvider.NAVER_KR; // NAVER json = getCertResult(LINE_SESSION_NAVER_URL); } sessionKey = json.get("session_key"); String message = (char) (sessionKey.length()) + sessionKey + (char) (id.length()) + id + (char) (password.length()) + password; String[] keyArr = json.get("rsa_key").split(","); String keyName = keyArr[0]; String n = keyArr[1]; String e = keyArr[2]; BigInteger modulus = new BigInteger(n, 16); BigInteger pubExp = new BigInteger(e, 16); KeyFactory keyFactory = KeyFactory.getInstance("RSA"); RSAPublicKeySpec pubKeySpec = new RSAPublicKeySpec(modulus, pubExp); RSAPublicKey publicKey = (RSAPublicKey) keyFactory.generatePublic(pubKeySpec); Cipher cipher = Cipher.getInstance("RSA/ECB/PKCS1Padding"); cipher.init(Cipher.ENCRYPT_MODE, publicKey); byte[] enBytes = cipher.doFinal(message.getBytes(StandardCharsets.UTF_8)); String encryptString = Hex.encodeHexString(enBytes); THttpClient transport = new THttpClient(LINE_HTTP_URL); transport.setCustomHeaders(headers); transport.open(); LoginResult result; try { TProtocol protocol = new TCompactProtocol(transport); this.client = new TalkService.Client(protocol); result = this.client.loginWithIdentityCredentialForCertificate(provider, keyName, encryptString, keepLoggedIn, accessLocation, this.systemName, this.certificate); if (result.getType() == LoginResultType.REQUIRE_DEVICE_CONFIRM) { headers.put(X_LINE_ACCESS, result.getVerifier()); if (loginCallback != null) { loginCallback.onDeviceConfirmRequired(result.getPinCode()); } else { throw new Exception("Device confirmation is required. Please set " + LoginCallback.class.getSimpleName() + " to get the pin code"); } // await for pinCode to be certified, it will return a verifier afterward. loginWithVerifierForCertificate(); } else if (result.getType() == LoginResultType.SUCCESS) { // if param certificate has passed certification setAuthToken(result.getAuthToken()); } } finally { close(); } // Once the client passed the verification, switch connection to HTTP_IN_URL client = ready(); return result; } /* * (non-Javadoc) * * @see api.line.LineApi#loginWithAuthToken(java.lang.String) */ public void loginWithAuthToken(String authToken) throws Exception { if (StringUtils.isNotEmpty(authToken)) { setAuthToken(authToken); } THttpClient transport = new THttpClient(LINE_HTTP_URL); transport.setCustomHeaders(headers); transport.open(); TProtocol protocol = new TCompactProtocol(transport); setClient(new TalkService.Client(protocol)); } /* * (non-Javadoc) * * @see api.line.LineApi#loginWithQrCode() */ public AuthQrcode loginWithQrCode() throws Exception { // Request QrCode from LINE server. // Map<String, String> json = null; boolean keepLoggedIn = false; THttpClient transport = new THttpClient(LINE_HTTP_URL); transport.setCustomHeaders(headers); transport.open(); TProtocol protocol = new TCompactProtocol(transport); this.client = new TalkService.Client(protocol); AuthQrcode result = this.client.getAuthQrcode(keepLoggedIn, systemName); headers.put(X_LINE_ACCESS, result.getVerifier()); System.out.println("Retrieved QR Code."); return result; // await for QR code to be certified, it will return a verifier afterward. // loginWithVerifier(); } /* * (non-Javadoc) * * @see api.line.LineApi#loginWithVerifier() */ public String loginWithVerifierForCertificate() throws Exception { Map json; json = getCertResult(LINE_CERTIFICATE_URL); if (json == null) { throw new Exception("fail to pass certificate check."); } // login with verifier json = (Map) json.get("result"); String verifierLocal = (String) json.get("verifier"); this.verifier = verifierLocal; LoginResult result = this.client.loginWithVerifierForCertificate(verifierLocal); if (result.getType() == LoginResultType.SUCCESS) { setAuthToken(result.getAuthToken()); setCertificate(result.getCertificate()); return result.getCertificate(); } else if (result.getType() == LoginResultType.REQUIRE_QRCODE) { throw new Exception("require QR code"); } else { throw new Exception("require device confirm"); } } public Map getCertResult(String url) throws Exception { Unirest unirest = new Unirest(); // set timed out in 2 mins. Unirest.setTimeouts(120000, 120000); HttpResponse<JsonNode> jsonResponse = unirest.get(url).headers(this.headers).asJson(); return Utility.toMap(jsonResponse.getBody().getObject()); } public boolean postContent(String url, Map<String, Object> data, InputStream is) throws Exception { Unirest unirest = new Unirest(); byte[] byteArray = IOUtils.toByteArray(is); HttpResponse<JsonNode> jsonResponse = unirest.post(url).headers(this.headers).fields(data).field("file", byteArray, "").asJson(); return jsonResponse.getStatus() == 201; } /** * * After login, update authToken to avoid expiration of authToken. This method skip the PinCode * validation step. * **/ public boolean updateAuthToken() throws Exception { if (this.certificate != null) { this.login(this.id, this.password); this.loginWithAuthToken(this.authToken); return true; } else { throw new Exception("You need to login first. There is no valid certificate"); } } /** * find and add Contact by user id * * @return * @throws TException * @throws TalkException **/ public Contact findContactByUserid(String userid) throws TalkException, TException { return this.client.findContactByUserid(userid); } /** * find and add Contact by user id * * @return * @throws TException * @throws TalkException **/ public Map<String, Contact> findAndAddContactsByUserid(int reqSeq, String userid) throws TalkException, TException { return this.client.findAndAddContactsByUserid(0, userid); } /** * find contacts by email (not tested) * * @return * @throws TException * @throws TalkException **/ public Map<String, Contact> findContactsByEmail(Set<String> emails) throws TalkException, TException { return this.client.findContactsByEmail(emails); } /** * find and add contact by email (not tested) * * @return * @throws TException * @throws TalkException **/ public Map<String, Contact> findAndAddContactsByEmail(int reqSeq, Set<String> emails) throws TalkException, TException { return this.client.findAndAddContactsByEmail(0, emails); } /** * find and add contact by phone number (not tested) * * @return * @return * @throws TException * @throws TalkException **/ public Map<String, Contact> findContactsByPhone( Set<String> phones) throws TalkException, TException { return this.client.findContactsByPhone(phones); } /** * find and add contact by phone number (not tested) * * @return * @return * @throws TException * @throws TalkException **/ public Map<String, Contact> findAndAddContactsByPhone(int reqSeq, Set<String> phones) throws TalkException, TException { return this.client.findAndAddContactsByPhone(0, phones); } /** * Get profile information * * returns Profile object; - picturePath - displayName - phone (base64 encoded?) - * allowSearchByUserid - pictureStatus - userid - mid # used for unique id for account - * phoneticName - regionCode - allowSearchByEmail - email - statusMessage **/ public Profile getProfile() throws TalkException, TException { return this.client.getProfile(); } public List<String> getAllContactIds() throws TalkException, TException { /** Get all contacts of your LINE account **/ return this.client.getAllContactIds(); } public List<String> getBlockedContactIds() throws TalkException, TException { /** Get all blocked contacts of your LINE account **/ return this.client.getBlockedContactIds(); } public List<String> getHiddenContactIds() throws TalkException, TException { /** Get all hidden contacts of your LINE account **/ return this.client.getHiddenContactMids(); } public List<Contact> getContacts(List<String> ids) throws TalkException, TException { /** * Get contact information list from ids * * {returns{ List of Contact list;} - status - capableVideoCall - dispalyName - settings - * pictureStatus - capableVoiceCall - capableBuddy - mid - displayNameOverridden - relation - * thumbnailUrl_ - createdTime - facoriteTime - capableMyhome - attributes - type - phoneticName * - statusMessage **/ // if type(ids) != list{ // msg = "argument should be list of contact ids" return this.client.getContacts(ids); } public Room createRoom(int reqSeq, List<String> ids) throws TalkException, TException { /** Create a chat room **/ // reqSeq = 0; return this.client.createRoom(reqSeq, ids); } public Room getRoom(String roomId) throws TalkException, TException { /** Get a chat room **/ return this.client.getRoom(roomId); } public void inviteIntoRoom(String roomId, List<String> contactIds) throws TalkException, TException { /** Invite contacts into room **/ this.client.inviteIntoRoom(0, roomId, contactIds); } public void leaveRoom(String id) throws TalkException, TException { /** Leave a chat room **/ this.client.leaveRoom(0, id); } public Group createGroup(int seq, String name, List<String> ids) throws TalkException, TException { /** Create a group **/ // seq = 0; return this.client.createGroup(seq, name, ids); } public List<Group> getGroups(List<String> groupIds) throws TalkException, TException { /** Get a list of group with ids **/ // if type(ids) != list{ // msg = "argument should be list of group ids" // this.raise_error(msg) return this.client.getGroups(groupIds); } public List<String> getGroupIdsJoined() throws TalkException, TException { /** Get group id that you joined **/ return this.client.getGroupIdsJoined(); } public List<String> getGroupIdsInvited() throws TalkException, TException { /** Get group id that you invited **/ return this.client.getGroupIdsInvited(); } public void acceptGroupInvitation(int seq, String groupId) throws TalkException, TException { /** Accept a group invitation **/ // seq = 0; this.client.acceptGroupInvitation(seq, groupId); } public void cancelGroupInvitation(int seq, String groupId, List<String> contactIds) throws TalkException, TException { /** Cancel a group invitation **/ // seq = 0; this.client.cancelGroupInvitation(seq, groupId, contactIds); } public void inviteIntoGroup(int seq, String groupId, List<String> contactIds) throws TalkException, TException { /** Invite contacts into group **/ // seq = 0; this.client.inviteIntoGroup(seq, groupId, contactIds); } public void kickoutFromGroup(int seq, String groupId, List<String> contactIds) throws TalkException, TException { /** Kick a group members **/ // seq = 0; this.client.kickoutFromGroup(seq, groupId, contactIds); } public void leaveGroup(String id) throws TalkException, TException { /** Leave a group **/ this.client.leaveGroup(0, id); } public List<Message> getRecentMessages(String id, int count) throws TalkException, TException { /** Get recent messages from `id` **/ return this.client.getRecentMessages(id, count); } public Message sendMessage(int seq, Message message) throws TalkException, TException { /** * Send a message to `id`. `id` could be contact id or group id * * param message: `message` instance **/ return this.client.sendMessage(seq, message); } public long getLastOpRevision() throws TalkException, TException { return this.client.getLastOpRevision(); } public List<Operation> fetchOperations(long revision, int count) throws TalkException, TException { return this.client.fetchOperations(revision, count); } public TMessageBoxWrapUp getMessageBoxCompactWrapUp(String id) { try { return this.client.getMessageBoxCompactWrapUp(id); } catch (Exception e) { return null; } } public TMessageBoxWrapUpResponse getMessageBoxCompactWrapUpList(int start, int count) throws Exception { try { return this.client.getMessageBoxCompactWrapUpList(start, count); } catch (Exception e) { throw new Exception(e.getMessage()); } } private void setAuthToken(String token) { headers.put(X_LINE_ACCESS, token); this.authToken = token; } public TalkService.Client getClient() { return client; } public void setClient(TalkService.Client client) { this.client = client; } public String getLineAccessToken() { return headers.get(X_LINE_ACCESS); } public String getCertificate() { return certificate; } public void setCertificate(String certificate) { this.certificate = certificate; } @Override public void close() throws IOException { if (client == null) { return; } TTransport inputTransport = client.getInputProtocol().getTransport(); inputTransport.close(); TTransport outputTransport = client.getOutputProtocol().getTransport(); outputTransport.close(); } }
When update auth token, should use the old certificate.
src/main/java/io/cslinmiso/line/api/impl/LineApiImpl.java
When update auth token, should use the old certificate.
Java
agpl-3.0
a2ea9c5409d034a75567be3c5539ec817ad0338c
0
VoltDB/voltdb,wolffcm/voltdb,zuowang/voltdb,zuowang/voltdb,simonzhangsm/voltdb,paulmartel/voltdb,migue/voltdb,kumarrus/voltdb,wolffcm/voltdb,simonzhangsm/voltdb,flybird119/voltdb,creative-quant/voltdb,VoltDB/voltdb,ingted/voltdb,flybird119/voltdb,ingted/voltdb,ingted/voltdb,migue/voltdb,flybird119/voltdb,ingted/voltdb,kumarrus/voltdb,deerwalk/voltdb,ingted/voltdb,simonzhangsm/voltdb,VoltDB/voltdb,paulmartel/voltdb,flybird119/voltdb,VoltDB/voltdb,ingted/voltdb,wolffcm/voltdb,simonzhangsm/voltdb,wolffcm/voltdb,paulmartel/voltdb,migue/voltdb,zuowang/voltdb,kumarrus/voltdb,zuowang/voltdb,simonzhangsm/voltdb,kumarrus/voltdb,zuowang/voltdb,migue/voltdb,kumarrus/voltdb,deerwalk/voltdb,paulmartel/voltdb,VoltDB/voltdb,creative-quant/voltdb,zuowang/voltdb,creative-quant/voltdb,deerwalk/voltdb,paulmartel/voltdb,deerwalk/voltdb,kumarrus/voltdb,simonzhangsm/voltdb,migue/voltdb,paulmartel/voltdb,ingted/voltdb,wolffcm/voltdb,VoltDB/voltdb,simonzhangsm/voltdb,migue/voltdb,wolffcm/voltdb,migue/voltdb,simonzhangsm/voltdb,VoltDB/voltdb,paulmartel/voltdb,deerwalk/voltdb,deerwalk/voltdb,creative-quant/voltdb,zuowang/voltdb,migue/voltdb,deerwalk/voltdb,paulmartel/voltdb,flybird119/voltdb,flybird119/voltdb,deerwalk/voltdb,creative-quant/voltdb,kumarrus/voltdb,creative-quant/voltdb,creative-quant/voltdb,wolffcm/voltdb,flybird119/voltdb,ingted/voltdb,wolffcm/voltdb,kumarrus/voltdb,zuowang/voltdb,flybird119/voltdb,creative-quant/voltdb
/* This file is part of VoltDB. * Copyright (C) 2008-2014 VoltDB Inc. * * Permission is hereby granted, free of charge, to any person obtaining * a copy of this software and associated documentation files (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to * the following conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. * IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR * OTHER DEALINGS IN THE SOFTWARE. */ package org.voltdb.compiler; import java.io.File; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URL; import java.net.URLDecoder; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import junit.framework.TestCase; import org.apache.commons.lang3.StringUtils; import org.voltdb.ProcInfoData; import org.voltdb.VoltDB.Configuration; import org.voltdb.VoltType; import org.voltdb.benchmark.tpcc.TPCCProjectBuilder; import org.voltdb.catalog.Catalog; import org.voltdb.catalog.CatalogMap; import org.voltdb.catalog.Column; import org.voltdb.catalog.Connector; import org.voltdb.catalog.ConnectorTableInfo; import org.voltdb.catalog.Database; import org.voltdb.catalog.Group; import org.voltdb.catalog.GroupRef; import org.voltdb.catalog.Procedure; import org.voltdb.catalog.SnapshotSchedule; import org.voltdb.catalog.Table; import org.voltdb.compiler.VoltCompiler.Feedback; import org.voltdb.compiler.VoltCompiler.VoltCompilerException; import org.voltdb.types.IndexType; import org.voltdb.utils.BuildDirectoryUtils; import org.voltdb.utils.CatalogUtil; public class TestVoltCompiler extends TestCase { String nothing_jar; String testout_jar; @Override public void setUp() { nothing_jar = BuildDirectoryUtils.getBuildDirectoryPath() + File.pathSeparator + "nothing.jar"; testout_jar = BuildDirectoryUtils.getBuildDirectoryPath() + File.pathSeparator + "testout.jar"; } @Override public void tearDown() { File njar = new File(nothing_jar); njar.delete(); File tjar = new File(testout_jar); tjar.delete(); } public void testBrokenLineParsing() throws IOException { final String simpleSchema1 = "create table table1r_el (pkey integer, column2_integer integer, PRIMARY KEY(pkey));\n" + "create view v_table1r_el (column2_integer, num_rows) as\n" + "select column2_integer as column2_integer,\n" + "count(*) as num_rows\n" + "from table1r_el\n" + "group by column2_integer;\n" + "create view v_table1r_el2 (column2_integer, num_rows) as\n" + "select column2_integer as column2_integer,\n" + "count(*) as num_rows\n" + "from table1r_el\n" + "group by column2_integer\n;\n"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema1); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas>" + "<schema path='" + schemaPath + "' />" + "</schemas>" + "<procedures>" + "<procedure class='Foo'>" + "<sql>select * from table1r_el;</sql>" + "</procedure>" + "</procedures>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertTrue(success); } public void testUTF8XMLFromHSQL() throws IOException { final String simpleSchema = "create table blah (pkey integer not null, strval varchar(200), PRIMARY KEY(pkey));\n"; VoltProjectBuilder pb = new VoltProjectBuilder(); pb.addLiteralSchema(simpleSchema); pb.addStmtProcedure("utf8insert", "insert into blah values(1, 'něco za nic')"); pb.addPartitionInfo("blah", "pkey"); boolean success = pb.compile(Configuration.getPathToCatalogForTest("utf8xml.jar")); assertTrue(success); } private boolean isFeedbackPresent(String expectedError, ArrayList<Feedback> fbs) { for (Feedback fb : fbs) { if (fb.getStandardFeedbackLine().contains(expectedError)) { return true; } } return false; } public void testMismatchedPartitionParams() throws IOException { String expectedError; ArrayList<Feedback> fbs; fbs = checkPartitionParam("CREATE TABLE PKEY_BIGINT ( PKEY BIGINT NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_BIGINT ON COLUMN PKEY;", "org.voltdb.compiler.procedures.PartitionParamBigint", "PKEY_BIGINT"); expectedError = "Type mismatch between partition column and partition parameter for procedure " + "org.voltdb.compiler.procedures.PartitionParamBigint may cause overflow or loss of precision.\n" + "Partition column is type VoltType.BIGINT and partition parameter is type VoltType.STRING"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkPartitionParam("CREATE TABLE PKEY_BIGINT ( PKEY BIGINT NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_BIGINT ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.PartitionParamBigint;", "PKEY_BIGINT"); expectedError = "Type mismatch between partition column and partition parameter for procedure " + "org.voltdb.compiler.procedures.PartitionParamBigint may cause overflow or loss of precision.\n" + "Partition column is type VoltType.BIGINT and partition parameter is type VoltType.STRING"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkPartitionParam("CREATE TABLE PKEY_BIGINT ( PKEY BIGINT NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_BIGINT ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamBigint;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamBigint ON TABLE PKEY_BIGINT COLUMN PKEY;", "PKEY_BIGINT"); expectedError = "Type mismatch between partition column and partition parameter for procedure " + "org.voltdb.compiler.procedures.NotAnnotatedPartitionParamBigint may cause overflow or loss of precision.\n" + "Partition column is type VoltType.BIGINT and partition parameter is type VoltType.STRING"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkPartitionParam("CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;", "org.voltdb.compiler.procedures.PartitionParamInteger", "PKEY_INTEGER"); expectedError = "Type mismatch between partition column and partition parameter for procedure " + "org.voltdb.compiler.procedures.PartitionParamInteger may cause overflow or loss of precision.\n" + "Partition column is type VoltType.INTEGER and partition parameter " + "is type VoltType.BIGINT"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkPartitionParam("CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.PartitionParamInteger;", "PKEY_INTEGER"); expectedError = "Type mismatch between partition column and partition parameter for procedure " + "org.voltdb.compiler.procedures.PartitionParamInteger may cause overflow or loss of precision.\n" + "Partition column is type VoltType.INTEGER and partition parameter " + "is type VoltType.BIGINT"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkPartitionParam("CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN PKEY;", "PKEY_INTEGER"); expectedError = "Type mismatch between partition column and partition parameter for procedure " + "org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger may cause overflow or loss of precision.\n" + "Partition column is type VoltType.INTEGER and partition parameter " + "is type VoltType.BIGINT"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkPartitionParam("CREATE TABLE PKEY_SMALLINT ( PKEY SMALLINT NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_SMALLINT ON COLUMN PKEY;", "org.voltdb.compiler.procedures.PartitionParamSmallint", "PKEY_SMALLINT"); expectedError = "Type mismatch between partition column and partition parameter for procedure " + "org.voltdb.compiler.procedures.PartitionParamSmallint may cause overflow or loss of precision.\n" + "Partition column is type VoltType.SMALLINT and partition parameter " + "is type VoltType.BIGINT"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkPartitionParam("CREATE TABLE PKEY_SMALLINT ( PKEY SMALLINT NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_SMALLINT ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.PartitionParamSmallint;", "PKEY_SMALLINT"); expectedError = "Type mismatch between partition column and partition parameter for procedure " + "org.voltdb.compiler.procedures.PartitionParamSmallint may cause overflow or loss of precision.\n" + "Partition column is type VoltType.SMALLINT and partition parameter " + "is type VoltType.BIGINT"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkPartitionParam("CREATE TABLE PKEY_SMALLINT ( PKEY SMALLINT NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_SMALLINT ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamSmallint;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamSmallint ON TABLE PKEY_SMALLINT COLUMN PKEY;", "PKEY_SMALLINT"); expectedError = "Type mismatch between partition column and partition parameter for procedure " + "org.voltdb.compiler.procedures.NotAnnotatedPartitionParamSmallint may cause overflow or loss of precision.\n" + "Partition column is type VoltType.SMALLINT and partition parameter " + "is type VoltType.BIGINT"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkPartitionParam("CREATE TABLE PKEY_TINYINT ( PKEY TINYINT NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_TINYINT ON COLUMN PKEY;", "org.voltdb.compiler.procedures.PartitionParamTinyint", "PKEY_TINYINT"); expectedError = "Type mismatch between partition column and partition parameter for procedure " + "org.voltdb.compiler.procedures.PartitionParamTinyint may cause overflow or loss of precision.\n" + "Partition column is type VoltType.TINYINT and partition parameter " + "is type VoltType.SMALLINT"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkPartitionParam("CREATE TABLE PKEY_TINYINT ( PKEY TINYINT NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_TINYINT ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.PartitionParamTinyint;", "PKEY_TINYINT"); expectedError = "Type mismatch between partition column and partition parameter for procedure " + "org.voltdb.compiler.procedures.PartitionParamTinyint may cause overflow or loss of precision.\n" + "Partition column is type VoltType.TINYINT and partition parameter " + "is type VoltType.SMALLINT"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkPartitionParam("CREATE TABLE PKEY_TINYINT ( PKEY TINYINT NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_TINYINT ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamTinyint;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamTinyint ON TABLE PKEY_TINYINT COLUMN PKEY;", "PKEY_TINYINT"); expectedError = "Type mismatch between partition column and partition parameter for procedure " + "org.voltdb.compiler.procedures.NotAnnotatedPartitionParamTinyint may cause overflow or loss of precision.\n" + "Partition column is type VoltType.TINYINT and partition parameter " + "is type VoltType.SMALLINT"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkPartitionParam("CREATE TABLE PKEY_STRING ( PKEY VARCHAR(32) NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_STRING ON COLUMN PKEY;", "org.voltdb.compiler.procedures.PartitionParamString", "PKEY_STRING"); expectedError = "Type mismatch between partition column and partition parameter for procedure " + "org.voltdb.compiler.procedures.PartitionParamString may cause overflow or loss of precision.\n" + "Partition column is type VoltType.STRING and partition parameter " + "is type VoltType.INTEGER"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkPartitionParam("CREATE TABLE PKEY_STRING ( PKEY VARCHAR(32) NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_STRING ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.PartitionParamString;", "PKEY_STRING"); expectedError = "Type mismatch between partition column and partition parameter for procedure " + "org.voltdb.compiler.procedures.PartitionParamString may cause overflow or loss of precision.\n" + "Partition column is type VoltType.STRING and partition parameter " + "is type VoltType.INTEGER"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkPartitionParam("CREATE TABLE PKEY_STRING ( PKEY VARCHAR(32) NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_STRING ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamString;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamString ON TABLE PKEY_STRING COLUMN PKEY;", "PKEY_STRING"); expectedError = "Type mismatch between partition column and partition parameter for procedure " + "org.voltdb.compiler.procedures.NotAnnotatedPartitionParamString may cause overflow or loss of precision.\n" + "Partition column is type VoltType.STRING and partition parameter " + "is type VoltType.INTEGER"; assertTrue(isFeedbackPresent(expectedError, fbs)); } private ArrayList<Feedback> checkPartitionParam(String ddl, String procedureClass, String table) { final File schemaFile = VoltProjectBuilder.writeStringToTempFile(ddl); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas>" + "<schema path='" + schemaPath + "' />" + "</schemas>" + "<procedures>" + "<procedure class='" + procedureClass + "' />" + "</procedures>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertFalse(success); return compiler.m_errors; } private ArrayList<Feedback> checkPartitionParam(String ddl, String table) { final File schemaFile = VoltProjectBuilder.writeStringToTempFile(ddl); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas>" + "<schema path='" + schemaPath + "' />" + "</schemas>" + "<procedures/>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertFalse(success); return compiler.m_errors; } public void testSnapshotSettings() throws IOException { String schemaPath = ""; try { final URL url = TPCCProjectBuilder.class.getResource("tpcc-ddl.sql"); schemaPath = URLDecoder.decode(url.getPath(), "UTF-8"); } catch (final UnsupportedEncodingException e) { e.printStackTrace(); System.exit(-1); } VoltProjectBuilder builder = new VoltProjectBuilder(); builder.addProcedures(org.voltdb.compiler.procedures.TPCCTestProc.class); builder.setSnapshotSettings("32m", 5, "/tmp", "woobar"); builder.addSchema(schemaPath); try { assertTrue(builder.compile("/tmp/snapshot_settings_test.jar")); final String catalogContents = VoltCompilerUtils.readFileFromJarfile("/tmp/snapshot_settings_test.jar", "catalog.txt"); final Catalog cat = new Catalog(); cat.execute(catalogContents); CatalogUtil.compileDeployment(cat, builder.getPathToDeployment(), true, false); SnapshotSchedule schedule = cat.getClusters().get("cluster").getDatabases(). get("database").getSnapshotschedule().get("default"); assertEquals(32, schedule.getFrequencyvalue()); assertEquals("m", schedule.getFrequencyunit()); //Will be empty because the deployment file initialization is what sets this value assertEquals("/tmp", schedule.getPath()); assertEquals("woobar", schedule.getPrefix()); } finally { final File jar = new File("/tmp/snapshot_settings_test.jar"); jar.delete(); } } // TestExportSuite tests most of these options are tested end-to-end; however need to test // that a disabled connector is really disabled and that auth data is correct. public void testExportSetting() throws IOException { final VoltProjectBuilder project = new VoltProjectBuilder(); project.addSchema(getClass().getResource("ExportTester-ddl.sql")); project.addExport(false /* disabled */); project.setTableAsExportOnly("A"); project.setTableAsExportOnly("B"); try { boolean success = project.compile("/tmp/exportsettingstest.jar"); assertTrue(success); final String catalogContents = VoltCompilerUtils.readFileFromJarfile("/tmp/exportsettingstest.jar", "catalog.txt"); final Catalog cat = new Catalog(); cat.execute(catalogContents); Connector connector = cat.getClusters().get("cluster").getDatabases(). get("database").getConnectors().get("0"); assertFalse(connector.getEnabled()); } finally { final File jar = new File("/tmp/exportsettingstest.jar"); jar.delete(); } } // test that Export configuration is insensitive to the case of the table name public void testExportTableCase() throws IOException { final VoltProjectBuilder project = new VoltProjectBuilder(); project.addSchema(TestVoltCompiler.class.getResource("ExportTester-ddl.sql")); project.addStmtProcedure("Dummy", "insert into a values (?, ?, ?);", "a.a_id: 0"); project.addPartitionInfo("A", "A_ID"); project.addPartitionInfo("B", "B_ID"); project.addPartitionInfo("e", "e_id"); project.addPartitionInfo("f", "f_id"); project.addExport(true /* enabled */); project.setTableAsExportOnly("A"); // uppercase DDL, uppercase export project.setTableAsExportOnly("b"); // uppercase DDL, lowercase export project.setTableAsExportOnly("E"); // lowercase DDL, uppercase export project.setTableAsExportOnly("f"); // lowercase DDL, lowercase export try { assertTrue(project.compile("/tmp/exportsettingstest.jar")); final String catalogContents = VoltCompilerUtils.readFileFromJarfile("/tmp/exportsettingstest.jar", "catalog.txt"); final Catalog cat = new Catalog(); cat.execute(catalogContents); CatalogUtil.compileDeployment(cat, project.getPathToDeployment(), true, false); Connector connector = cat.getClusters().get("cluster").getDatabases(). get("database").getConnectors().get("0"); assertTrue(connector.getEnabled()); // Assert that all tables exist in the connector section of catalog assertNotNull(connector.getTableinfo().getIgnoreCase("a")); assertNotNull(connector.getTableinfo().getIgnoreCase("b")); assertNotNull(connector.getTableinfo().getIgnoreCase("e")); assertNotNull(connector.getTableinfo().getIgnoreCase("f")); } finally { final File jar = new File("/tmp/exportsettingstest.jar"); jar.delete(); } } // test that the source table for a view is not export only public void testViewSourceNotExportOnly() throws IOException { final VoltProjectBuilder project = new VoltProjectBuilder(); project.addSchema(TestVoltCompiler.class.getResource("ExportTesterWithView-ddl.sql")); project.addStmtProcedure("Dummy", "select * from v_table1r_el_only"); project.addExport(true /* enabled */); project.setTableAsExportOnly("table1r_el_only"); try { assertFalse(project.compile("/tmp/exporttestview.jar")); } finally { final File jar = new File("/tmp/exporttestview.jar"); jar.delete(); } } // test that a view is not export only public void testViewNotExportOnly() throws IOException { final VoltProjectBuilder project = new VoltProjectBuilder(); project.addSchema(TestVoltCompiler.class.getResource("ExportTesterWithView-ddl.sql")); project.addStmtProcedure("Dummy", "select * from table1r_el_only"); project.addExport(true /* enabled */); project.setTableAsExportOnly("v_table1r_el_only"); try { assertFalse(project.compile("/tmp/exporttestview.jar")); } finally { final File jar = new File("/tmp/exporttestview.jar"); jar.delete(); } } public void testBadPath() { final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML("invalidnonsense", nothing_jar); assertFalse(success); } public void testXSDSchemaOrdering() throws IOException { final File schemaFile = VoltProjectBuilder.writeStringToTempFile("create table T(ID INTEGER);"); final String schemaPath = schemaFile.getPath(); final String project = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database>" + "<schemas>" + "<schema path='" + schemaPath + "'/>" + "</schemas>" + "<procedures>" + "<procedure class='proc'><sql>select * from T</sql></procedure>" + "</procedures>" + "</database>" + "</project>"; final File xmlFile = VoltProjectBuilder.writeStringToTempFile(project); final String projectPath = xmlFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); boolean success = compiler.compileWithProjectXML(projectPath, nothing_jar); assertTrue(success); } public void testXMLFileWithDeprecatedElements() { final File schemaFile = VoltProjectBuilder.writeStringToTempFile("create table T(ID INTEGER);"); final String schemaPath = schemaFile.getPath(); final String project = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database>" + "<schemas>" + "<schema path='" + schemaPath + "'/>" + "</schemas>" + "<procedures>" + "<procedure class='proc'><sql>select * from T</sql></procedure>" + "</procedures>" + "</database>" + "<security enabled='true'/>" + "</project>"; final File xmlFile = VoltProjectBuilder.writeStringToTempFile(project); final String path = xmlFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); boolean success = compiler.compileWithProjectXML(path, nothing_jar); assertFalse(success); assertTrue( isFeedbackPresent("Found deprecated XML element \"security\"", compiler.m_errors) ); } public void testXMLFileWithInvalidSchemaReference() { final String simpleXML = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='my schema file.sql' /></schemas>" + "<procedures><procedure class='procedures/procs.jar' /></procedures>" + "</database>" + "</project>"; final File xmlFile = VoltProjectBuilder.writeStringToTempFile(simpleXML); final String projectPath = xmlFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, nothing_jar); assertFalse(success); } public void testXMLFileWithSchemaError() { final File schemaFile = VoltProjectBuilder.writeStringToTempFile("create table T(ID INTEGER);"); final String simpleXML = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='baddbname'>" + "<schemas>" + "<schema path='" + schemaFile.getAbsolutePath() + "'/>" + "</schemas>" + // invalid project file: no procedures // "<procedures>" + // "<procedure class='proc'><sql>select * from T</sql></procedure>" + //"</procedures>" + "</database>" + "</project>"; final File xmlFile = VoltProjectBuilder.writeStringToTempFile(simpleXML); final String projectPath = xmlFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, nothing_jar); assertFalse(success); } public void testXMLFileWithWrongDBName() { final File schemaFile = VoltProjectBuilder.writeStringToTempFile("create table T(ID INTEGER);"); final String simpleXML = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='baddbname'>" + "<schemas>" + "<schema path='" + schemaFile.getAbsolutePath() + "'/>" + "</schemas>" + "<procedures>" + "<procedure class='proc'><sql>select * from T</sql></procedure>" + "</procedures>" + "</database>" + "</project>"; final File xmlFile = VoltProjectBuilder.writeStringToTempFile(simpleXML); final String projectPath = xmlFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, nothing_jar); assertFalse(success); } public void testXMLFileWithDefaultDBName() { final File schemaFile = VoltProjectBuilder.writeStringToTempFile("create table T(ID INTEGER);"); final String simpleXML = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database>" + "<schemas>" + "<schema path='" + schemaFile.getAbsolutePath() + "'/>" + "</schemas>" + "<procedures>" + "<procedure class='proc'><sql>select * from T</sql></procedure>" + "</procedures>" + "</database>" + "</project>"; final File xmlFile = VoltProjectBuilder.writeStringToTempFile(simpleXML); final String path = xmlFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(path, nothing_jar); assertTrue(success); assertTrue(compiler.m_catalog.getClusters().get("cluster").getDatabases().get("database") != null); } public void testBadClusterConfig() throws IOException { // check no hosts ClusterConfig cluster_config = new ClusterConfig(0, 1, 0); assertFalse(cluster_config.validate()); // check no sites-per-hosts cluster_config = new ClusterConfig(1, 0, 0); assertFalse(cluster_config.validate()); } public void testXMLFileWithDDL() throws IOException { final String simpleSchema1 = "create table books (cash integer default 23 NOT NULL, title varchar(3) default 'foo', PRIMARY KEY(cash)); " + "PARTITION TABLE books ON COLUMN cash;"; // newline inserted to test catalog friendliness final String simpleSchema2 = "create table books2\n (cash integer default 23 NOT NULL, title varchar(3) default 'foo', PRIMARY KEY(cash));"; final File schemaFile1 = VoltProjectBuilder.writeStringToTempFile(simpleSchema1); final String schemaPath1 = schemaFile1.getPath(); final File schemaFile2 = VoltProjectBuilder.writeStringToTempFile(simpleSchema2); final String schemaPath2 = schemaFile2.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<!-- xml comment check -->" + "<database name='database'>" + "<!-- xml comment check -->" + "<schemas>" + "<!-- xml comment check -->" + "<schema path='" + schemaPath1 + "' />" + "<schema path='" + schemaPath2 + "' />" + "<!-- xml comment check -->" + "</schemas>" + "<!-- xml comment check -->" + "<procedures>" + "<!-- xml comment check -->" + "<procedure class='org.voltdb.compiler.procedures.AddBook' />" + "<procedure class='Foo'>" + "<sql>select * from books;</sql>" + "</procedure>" + "</procedures>" + "<!-- xml comment check -->" + "</database>" + "<!-- xml comment check -->" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertTrue(success); final Catalog c1 = compiler.getCatalog(); final String catalogContents = VoltCompilerUtils.readFileFromJarfile(testout_jar, "catalog.txt"); final Catalog c2 = new Catalog(); c2.execute(catalogContents); assertTrue(c2.serialize().equals(c1.serialize())); } public void testProcWithBoxedParam() throws IOException { final String simpleSchema = "create table books (cash integer default 23, title varchar(3) default 'foo', PRIMARY KEY(cash));"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas>" + "<schema path='" + schemaPath + "' />" + "</schemas>" + "<procedures>" + "<procedure class='org.voltdb.compiler.procedures.AddBookBoxed' />" + "</procedures>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertFalse(success); } public void testDDLWithNoLengthString() throws IOException { // DO NOT COPY PASTE THIS INVALID EXAMPLE! final String simpleSchema1 = "create table books (cash integer default 23, title varchar default 'foo', PRIMARY KEY(cash));"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema1); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas>" + "<schema path='" + schemaPath + "' />" + "</schemas>" + "<procedures>" + "<procedure class='org.voltdb.compiler.procedures.AddBook' />" + "<procedure class='Foo'>" + "<sql>select * from books;</sql>" + "</procedure>" + "</procedures>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertFalse(success); } public void testDDLWithLongStringInCharacters() throws IOException { int length = VoltType.MAX_VALUE_LENGTH_IN_CHARACTERS + 10; final String simpleSchema1 = "create table books (cash integer default 23, " + "title varchar("+length+") default 'foo', PRIMARY KEY(cash));"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema1); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas>" + "<schema path='" + schemaPath + "' />" + "</schemas>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertTrue(success); // Check warnings assertEquals(1, compiler.m_warnings.size()); String warningMsg = compiler.m_warnings.get(0).getMessage(); String expectedMsg = "The size of VARCHAR column TITLE in table BOOKS greater than " + "262144 will be enforced as byte counts rather than UTF8 character counts. " + "To eliminate this warning, specify \"VARCHAR(262154 BYTES)\""; assertEquals(expectedMsg, warningMsg); Database db = compiler.getCatalog().getClusters().get("cluster").getDatabases().get("database"); Column var = db.getTables().get("BOOKS").getColumns().get("TITLE"); assertTrue(var.getInbytes()); } public void testDDLWithTooLongVarbinaryVarchar() throws IOException { int length = VoltType.MAX_VALUE_LENGTH + 10; String simpleSchema1 = "create table books (cash integer default 23, " + "title varbinary("+length+") , PRIMARY KEY(cash));"; String error1 = "VARBINARY column size for column BOOKS.TITLE is > " + VoltType.MAX_VALUE_LENGTH+" char maximum."; checkDDLErrorMessage(simpleSchema1, error1); String simpleSchema2 = "create table books (cash integer default 23, " + "title varchar("+length+") , PRIMARY KEY(cash));"; String error2 = "VARCHAR column size for column BOOKS.TITLE is > " + VoltType.MAX_VALUE_LENGTH+" char maximum."; checkDDLErrorMessage(simpleSchema2, error2); } public void testNullablePartitionColumn() throws IOException { final String simpleSchema = "create table books (cash integer default 23, title varchar(3) default 'foo', PRIMARY KEY(cash));" + "partition table books on column cash;"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures><procedure class='org.voltdb.compiler.procedures.AddBook'/></procedures>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertFalse(success); boolean found = false; for (final VoltCompiler.Feedback fb : compiler.m_errors) { if (fb.message.indexOf("Partition column") > 0) found = true; } assertTrue(found); } public void testXMLFileWithBadDDL() throws IOException { final String simpleSchema = "create table books (id integer default 0, strval varchar(33000) default '', PRIMARY KEY(id));"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures><procedure class='org.voltdb.compiler.procedures.AddBook' /></procedures>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertFalse(success); } // NOTE: TPCCTest proc also tests whitespaces regressions in SQL literals public void testWithTPCCDDL() { String schemaPath = ""; try { final URL url = TPCCProjectBuilder.class.getResource("tpcc-ddl.sql"); schemaPath = URLDecoder.decode(url.getPath(), "UTF-8"); } catch (final UnsupportedEncodingException e) { e.printStackTrace(); System.exit(-1); } final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures><procedure class='org.voltdb.compiler.procedures.TPCCTestProc' /></procedures>" + "</database>" + "</project>"; //System.out.println(simpleProject); final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertTrue(success); } public void testSeparateCatalogCompilation() throws IOException { String schemaPath = ""; try { final URL url = TPCCProjectBuilder.class.getResource("tpcc-ddl.sql"); schemaPath = URLDecoder.decode(url.getPath(), "UTF-8"); } catch (final UnsupportedEncodingException e) { e.printStackTrace(); System.exit(-1); } final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures><procedure class='org.voltdb.compiler.procedures.TPCCTestProc' /></procedures>" + "</database>" + "</project>"; //System.out.println(simpleProject); final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler1 = new VoltCompiler(); final VoltCompiler compiler2 = new VoltCompiler(); final Catalog catalog = compileCatalogFromProject(compiler1, projectPath); final String cat1 = catalog.serialize(); final boolean success = compiler2.compileWithProjectXML(projectPath, testout_jar); final String cat2 = VoltCompilerUtils.readFileFromJarfile(testout_jar, "catalog.txt"); assertTrue(success); assertTrue(cat1.compareTo(cat2) == 0); } private Catalog compileCatalogFromProject( final VoltCompiler compiler, final String projectPath) { try { return compiler.compileCatalogFromProject(projectPath); } catch (VoltCompilerException e) { e.printStackTrace(); fail(); return null; } } private boolean compileFromDDL( final VoltCompiler compiler, final String jarPath, final String... schemaPaths) { try { return compiler.compileFromDDL(jarPath, schemaPaths); } catch (VoltCompilerException e) { e.printStackTrace(); fail(); return false; } } public void testDDLTableTooManyColumns() throws IOException { String schemaPath = ""; try { final URL url = TestVoltCompiler.class.getResource("toowidetable-ddl.sql"); schemaPath = URLDecoder.decode(url.getPath(), "UTF-8"); } catch (final UnsupportedEncodingException e) { e.printStackTrace(); System.exit(-1); } final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures><procedure class='org.voltdb.compiler.procedures.TPCCTestProc' /></procedures>" + "</database>" + "</project>"; //System.out.println(simpleProject); final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertFalse(success); boolean found = false; for (final VoltCompiler.Feedback fb : compiler.m_errors) { if (fb.message.startsWith("Table MANY_COLUMNS has")) found = true; } assertTrue(found); } public void testExtraFilesExist() throws IOException { String schemaPath = ""; try { final URL url = TPCCProjectBuilder.class.getResource("tpcc-ddl.sql"); schemaPath = URLDecoder.decode(url.getPath(), "UTF-8"); } catch (final UnsupportedEncodingException e) { e.printStackTrace(); System.exit(-1); } final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures><procedure class='org.voltdb.compiler.procedures.TPCCTestProc' /></procedures>" + "</database>" + "</project>"; //System.out.println(simpleProject); final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertTrue(success); final String sql = VoltCompilerUtils.readFileFromJarfile(testout_jar, VoltCompiler.AUTOGEN_DDL_FILE_NAME); assertNotNull(sql); } public void testXMLFileWithELEnabled() throws IOException { final String simpleSchema = "create table books (cash integer default 23 NOT NULL, title varchar(3) default 'foo');"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + " <database name='database'>" + " <partitions><partition table='books' column='cash'/></partitions> " + " <schemas><schema path='" + schemaPath + "' /></schemas>" + " <procedures><procedure class='org.voltdb.compiler.procedures.AddBook' /></procedures>" + " <export>" + " <tables><table name='books'/></tables>" + " </export>" + " </database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertTrue(success); final Catalog c1 = compiler.getCatalog(); //System.out.println("PRINTING Catalog 1"); //System.out.println(c1.serialize()); final String catalogContents = VoltCompilerUtils.readFileFromJarfile(testout_jar, "catalog.txt"); final Catalog c2 = new Catalog(); c2.execute(catalogContents); assertTrue(c2.serialize().equals(c1.serialize())); } public void testOverrideProcInfo() throws IOException { final String simpleSchema = "create table books (cash integer default 23 not null, title varchar(3) default 'foo', PRIMARY KEY(cash));" + "PARTITION TABLE books ON COLUMN cash;"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures><procedure class='org.voltdb.compiler.procedures.AddBook' /></procedures>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final ProcInfoData info = new ProcInfoData(); info.singlePartition = true; info.partitionInfo = "BOOKS.CASH: 0"; final Map<String, ProcInfoData> overrideMap = new HashMap<String, ProcInfoData>(); overrideMap.put("AddBook", info); final VoltCompiler compiler = new VoltCompiler(); compiler.setProcInfoOverrides(overrideMap); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertTrue(success); final String catalogContents = VoltCompilerUtils.readFileFromJarfile(testout_jar, "catalog.txt"); final Catalog c2 = new Catalog(); c2.execute(catalogContents); final Database db = c2.getClusters().get("cluster").getDatabases().get("database"); final Procedure addBook = db.getProcedures().get("AddBook"); assertEquals(true, addBook.getSinglepartition()); } public void testOverrideNonAnnotatedProcInfo() throws IOException { final String simpleSchema = "create table books (cash integer default 23 not null, title varchar(3) default 'foo', PRIMARY KEY(cash));" + "PARTITION TABLE books ON COLUMN cash;" + "create procedure from class org.voltdb.compiler.procedures.AddBook;" + "partition procedure AddBook ON TABLE books COLUMN cash;"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures/>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final ProcInfoData info = new ProcInfoData(); info.singlePartition = true; info.partitionInfo = "BOOKS.CASH: 0"; final Map<String, ProcInfoData> overrideMap = new HashMap<String, ProcInfoData>(); overrideMap.put("AddBook", info); final VoltCompiler compiler = new VoltCompiler(); compiler.setProcInfoOverrides(overrideMap); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertTrue(success); final String catalogContents = VoltCompilerUtils.readFileFromJarfile(testout_jar, "catalog.txt"); final Catalog c2 = new Catalog(); c2.execute(catalogContents); final Database db = c2.getClusters().get("cluster").getDatabases().get("database"); final Procedure addBook = db.getProcedures().get("AddBook"); assertEquals(true, addBook.getSinglepartition()); } public void testBadStmtProcName() throws IOException { final String simpleSchema = "create table books (cash integer default 23 not null, title varchar(10) default 'foo', PRIMARY KEY(cash));"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures><procedure class='@Foo'><sql>select * from books;</sql></procedure></procedures>" + "<partitions><partition table='BOOKS' column='CASH' /></partitions>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertFalse(success); } public void testBadDdlStmtProcName() throws IOException { final String simpleSchema = "create table books (cash integer default 23 not null, title varchar(10) default 'foo', PRIMARY KEY(cash));" + "create procedure @Foo as select * from books;"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures/>" + "<partitions><partition table='BOOKS' column='CASH' /></partitions>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertFalse(success); } public void testGoodStmtProcName() throws IOException { final String simpleSchema = "create table books (cash integer default 23 not null, title varchar(3) default 'foo', PRIMARY KEY(cash));" + "PARTITION TABLE books ON COLUMN cash;"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures><procedure class='Foo'><sql>select * from books;</sql></procedure></procedures>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertTrue(success); } public void testGoodDdlStmtProcName() throws IOException { final String simpleSchema = "create table books (cash integer default 23 not null, title varchar(3) default 'foo', PRIMARY KEY(cash));" + "PARTITION TABLE books ON COLUMN cash;" + "CREATE PROCEDURE Foo AS select * from books where cash = ?;" + "PARTITION PROCEDURE Foo ON TABLE BOOKS COLUMN CASH PARAMETER 0;"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures/>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertTrue(success); } public void testMaterializedView() throws IOException { final String simpleSchema = "create table books (cash integer default 23 NOT NULL, title varchar(10) default 'foo', PRIMARY KEY(cash));\n" + "partition table books on column cash;\n" + "create view matt (title, cash, num, foo) as select title, cash, count(*), sum(cash) from books group by title, cash;"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures><procedure class='org.voltdb.compiler.procedures.AddBook' /></procedures>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); // final ClusterConfig cluster_config = new ClusterConfig(1, 1, 0, "localhost"); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertTrue(success); final Catalog c1 = compiler.getCatalog(); final String catalogContents = VoltCompilerUtils.readFileFromJarfile(testout_jar, "catalog.txt"); final Catalog c2 = new Catalog(); c2.execute(catalogContents); assertTrue(c2.serialize().equals(c1.serialize())); } public void testVarbinary() throws IOException { final String simpleSchema = "create table books (cash integer default 23 NOT NULL, title varbinary(10) default NULL, PRIMARY KEY(cash));" + "partition table books on column cash;"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures>" + "<procedure class='get'><sql>select * from books;</sql></procedure>" + "<procedure class='i1'><sql>insert into books values(5, 'AA');</sql></procedure>" + "<procedure class='i2'><sql>insert into books values(5, ?);</sql></procedure>" + "<procedure class='s1'><sql>update books set title = 'bb';</sql></procedure>" + "</procedures>" + //"<procedures><procedure class='org.voltdb.compiler.procedures.AddBook' /></procedures>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); // final ClusterConfig cluster_config = new ClusterConfig(1, 1, 0, "localhost"); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertTrue(success); final Catalog c1 = compiler.getCatalog(); final String catalogContents = VoltCompilerUtils.readFileFromJarfile(testout_jar, "catalog.txt"); final Catalog c2 = new Catalog(); c2.execute(catalogContents); assertTrue(c2.serialize().equals(c1.serialize())); } public void testDdlProcVarbinary() throws IOException { final String simpleSchema = "create table books (cash integer default 23 NOT NULL, title varbinary(10) default NULL, PRIMARY KEY(cash));" + "partition table books on column cash;" + "create procedure get as select * from books;" + "create procedure i1 as insert into books values(5, 'AA');" + "create procedure i2 as insert into books values(5, ?);" + "create procedure s1 as update books set title = 'bb';" + "create procedure i3 as insert into books values( ?, ?);" + "partition procedure i3 on table books column cash;" + "create procedure d1 as delete from books where title = ? and cash = ?;" + "partition procedure d1 on table books column cash parameter 1;"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures/>" + //"<procedures><procedure class='org.voltdb.compiler.procedures.AddBook' /></procedures>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); // final ClusterConfig cluster_config = new ClusterConfig(1, 1, 0, "localhost"); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertTrue(success); final Catalog c1 = compiler.getCatalog(); final String catalogContents = VoltCompilerUtils.readFileFromJarfile(testout_jar, "catalog.txt"); final Catalog c2 = new Catalog(); c2.execute(catalogContents); assertTrue(c2.serialize().equals(c1.serialize())); } // // There are DDL tests a number of places. TestDDLCompiler seems more about // verifying HSQL behaviour. Additionally, there are users of PlannerAideDeCamp // that verify plans for various DDL/SQL combinations. // // I'm going to add some DDL parsing validation tests here, as they seem to have // more to do with compiling a catalog.. and there are some related tests already // in this file. // private VoltCompiler compileForDDLTest(String schemaPath, boolean expectSuccess) { final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures><procedure class='sample'><sql>select * from t</sql></procedure></procedures>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); projectFile.deleteOnExit(); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertEquals(expectSuccess, success); return compiler; } private String getPathForSchema(String s) { final File schemaFile = VoltProjectBuilder.writeStringToTempFile(s); schemaFile.deleteOnExit(); return schemaFile.getPath(); } public void testDDLCompilerLeadingGarbage() throws IOException { final String s = "-- a valid comment\n" + "- an invalid comment\n" + "create table t(id integer);"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), false); assertTrue(c.hasErrors()); } public void testDDLCompilerLeadingWhitespace() throws IOException { final String s = " \n" + "\n" + "create table t(id integer);"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 1); } public void testDDLCompilerLeadingComment() throws IOException { final String s = "-- this is a leading comment\n" + " -- with some leading whitespace\n" + " create table t(id integer);"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 1); } public void testDDLCompilerLeadingCommentAndHashMarks() throws IOException { final String s = "-- ### this is a leading comment\n" + " -- with some ### leading whitespace\n" + " create table t(id integer);"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 1); } public void testDDLCompilerNoNewlines() throws IOException { final String s = "create table t(id integer); create table r(id integer);"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 2); } public void testDDLCompilerSplitLines() throws IOException { final String s = "create\n" + "table\n" + "t(id\n" + "integer);"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 1); } public void testDDLCompilerTrailingComment1() throws IOException { final String s = "create table t(id integer) -- this is a trailing comment\n" + "-- and a line full of comments\n" + ";\n"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 1); } public void testDDLCompilerTrailingComment2() throws IOException { final String s = "create table t(id integer) -- this is a trailing comment\n" + ";\n"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 1); } public void testDDLCompilerTrailingCommentAndHashMarks() throws IOException { final String s = "create table t(id varchar(128) default '###') -- ### this ###### is a trailing comment\n" + ";\n"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 1); } public void testDDLCompilerTrailingComment3() throws IOException { final String s = "create table t(id integer) -- this is a trailing comment\n" + "-- and a line full of comments\n" + ";"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 1); } public void testDDLCompilerTrailingComment4() throws IOException { final String s = "create table t(id integer) -- this is a trailing comment\n" + ";"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 1); } public void testDDLCompilerTrailingComment5() throws IOException { final String s = "create table t(id integer) -- this is a trailing comment\n" + "-- and a line full of comments\n" + " ;\n"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 1); } public void testDDLCompilerTrailingComment6() throws IOException { final String s = "create table t(id integer) -- this is a trailing comment\n" + "-- and a line full of comments\n" + " ;\n" + "-- ends with a comment\n"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 1); } public void testDDLCompilerInvalidStatement() throws IOException { final String s = "create table t for justice -- with a comment\n"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), false); assertTrue(c.hasErrors()); } public void testDDLCompilerCommentThatLooksLikeStatement() throws IOException { final String s = "create table t(id integer); -- create table r(id integer);"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 1); } public void testDDLCompilerLeadingSemicolon() throws IOException { final String s = "; create table t(id integer);"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), false); assertTrue(c.hasErrors()); } public void testDDLCompilerMultipleStatementsOnMultipleLines() throws IOException { final String s = "create table t(id integer); create\n" + "table r(id integer); -- second table"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 2); } public void testDDLCompilerStringLiteral() throws IOException { final String s = "create table t(id varchar(3) default 'abc');"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 1); Table tbl = c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().getIgnoreCase("t"); String defaultvalue = tbl.getColumns().getIgnoreCase("id").getDefaultvalue(); assertTrue(defaultvalue.equalsIgnoreCase("abc")); } public void testDDLCompilerSemiColonInStringLiteral() throws IOException { final String s = "create table t(id varchar(5) default 'a;bc');"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 1); Table tbl = c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().getIgnoreCase("t"); String defaultvalue = tbl.getColumns().getIgnoreCase("id").getDefaultvalue(); assertTrue(defaultvalue.equalsIgnoreCase("a;bc")); } public void testDDLCompilerDashDashInStringLiteral() throws IOException { final String s = "create table t(id varchar(5) default 'a--bc');"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 1); Table tbl = c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().getIgnoreCase("t"); String defaultvalue = tbl.getColumns().getIgnoreCase("id").getDefaultvalue(); assertTrue(defaultvalue.equalsIgnoreCase("a--bc")); } public void testDDLCompilerNewlineInStringLiteral() throws IOException { final String s = "create table t(id varchar(5) default 'a\n" + "bc');"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 1); Table tbl = c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().getIgnoreCase("t"); String defaultvalue = tbl.getColumns().getIgnoreCase("id").getDefaultvalue(); // In the debugger, this looks valid at parse time but is mangled somewhere // later, perhaps in HSQL or in the catalog assembly? // ENG-681 System.out.println(defaultvalue); // assertTrue(defaultvalue.equalsIgnoreCase("a\nbc")); } public void testDDLCompilerEscapedStringLiterals() throws IOException { final String s = "create table t(id varchar(10) default 'a''b''''c');"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 1); Table tbl = c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().getIgnoreCase("t"); String defaultvalue = tbl.getColumns().getIgnoreCase("id").getDefaultvalue(); assertTrue(defaultvalue.equalsIgnoreCase("a'b''c")); } // Test that DDLCompiler's index creation adheres to the rules implicit in // the EE's tableindexfactory. Currently (10/3/2010) these are: // All column types can be used in a tree array. Only int types can // be used in hash tables or array indexes String[] column_types = {"tinyint", "smallint", "integer", "bigint", "float", "varchar(10)", "timestamp", "decimal"}; IndexType[] default_index_types = {IndexType.BALANCED_TREE, IndexType.BALANCED_TREE, IndexType.BALANCED_TREE, IndexType.BALANCED_TREE, IndexType.BALANCED_TREE, IndexType.BALANCED_TREE, IndexType.BALANCED_TREE, IndexType.BALANCED_TREE}; boolean[] can_be_hash = {true, true, true, true, false, false, true, false}; boolean[] can_be_tree = {true, true, true, true, true, true, true, true}; public void testDDLCompilerIndexDefaultTypes() { for (int i = 0; i < column_types.length; i++) { String s = "create table t(id " + column_types[i] + " not null, num integer not null);\n" + "create index idx_t_id on t(id);\n" + "create index idx_t_idnum on t(id,num);"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); Database d = c.m_catalog.getClusters().get("cluster").getDatabases().get("database"); assertEquals(default_index_types[i].getValue(), d.getTables().getIgnoreCase("t").getIndexes().getIgnoreCase("idx_t_id").getType()); assertEquals(default_index_types[i].getValue(), d.getTables().getIgnoreCase("t").getIndexes().getIgnoreCase("idx_t_idnum").getType()); } } public void testDDLCompilerHashIndexAllowed() { for (int i = 0; i < column_types.length; i++) { final String s = "create table t(id " + column_types[i] + " not null, num integer not null);\n" + "create index idx_t_id_hash on t(id);\n" + "create index idx_t_idnum_hash on t(id,num);"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), can_be_hash[i]); if (can_be_hash[i]) { // do appropriate index exists checks assertFalse(c.hasErrors()); Database d = c.m_catalog.getClusters().get("cluster").getDatabases().get("database"); assertEquals(IndexType.HASH_TABLE.getValue(), d.getTables().getIgnoreCase("t").getIndexes().getIgnoreCase("idx_t_id_hash").getType()); assertEquals(IndexType.HASH_TABLE.getValue(), d.getTables().getIgnoreCase("t").getIndexes().getIgnoreCase("idx_t_idnum_hash").getType()); } else { assertTrue(c.hasErrors()); } } } public void testUniqueIndexAllowed() { final String s = "create table t(id integer not null, num integer not null);\n" + "create unique index idx_t_unique on t(id,num);\n" + "create index idx_t on t(num);"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); Database d = c.m_catalog.getClusters().get("cluster").getDatabases().get("database"); assertTrue(d.getTables().getIgnoreCase("t").getIndexes().getIgnoreCase("idx_t_unique").getUnique()); assertFalse(d.getTables().getIgnoreCase("t").getIndexes().getIgnoreCase("idx_t").getUnique()); // also validate that simple column indexes don't trigger the generalized expression index handling String noExpressionFound = ""; assertEquals(noExpressionFound, d.getTables().getIgnoreCase("t").getIndexes().getIgnoreCase("idx_t_unique").getExpressionsjson()); assertEquals(noExpressionFound, d.getTables().getIgnoreCase("t").getIndexes().getIgnoreCase("idx_t").getExpressionsjson()); } public void testFunctionIndexAllowed() { final String s = "create table t(id integer not null, num integer not null);\n" + "create unique index idx_ft_unique on t(abs(id+num));\n" + "create index idx_ft on t(abs(num));"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); Database d = c.m_catalog.getClusters().get("cluster").getDatabases().get("database"); assertTrue(d.getTables().getIgnoreCase("t").getIndexes().getIgnoreCase("idx_ft_unique").getUnique()); assertFalse(d.getTables().getIgnoreCase("t").getIndexes().getIgnoreCase("idx_ft").getUnique()); // Validate that general expression indexes get properly annotated with an expressionjson attribute String noExpressionFound = ""; assertNotSame(noExpressionFound, d.getTables().getIgnoreCase("t").getIndexes().getIgnoreCase("idx_ft_unique").getExpressionsjson()); assertNotSame(noExpressionFound, d.getTables().getIgnoreCase("t").getIndexes().getIgnoreCase("idx_ft").getExpressionsjson()); } public void testDDLCompilerVarcharTreeIndexAllowed() { for (int i = 0; i < column_types.length; i++) { final String s = "create table t(id " + column_types[i] + " not null, num integer not null);\n" + "create index idx_t_id_tree on t(id);\n" + "create index idx_t_idnum_tree on t(id,num);"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), can_be_tree[i]); assertFalse(c.hasErrors()); Database d = c.m_catalog.getClusters().get("cluster").getDatabases().get("database"); assertEquals(IndexType.BALANCED_TREE.getValue(), d.getTables().getIgnoreCase("t").getIndexes().getIgnoreCase("idx_t_id_tree").getType()); assertEquals(IndexType.BALANCED_TREE.getValue(), d.getTables().getIgnoreCase("t").getIndexes().getIgnoreCase("idx_t_idnum_tree").getType()); } } public void testDDLCompilerTwoIdenticalIndexes() { final String s = "create table t(id integer not null, num integer not null);\n" + "create index idx_t_idnum1 on t(id,num);\n" + "create index idx_t_idnum2 on t(id,num);"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.hasErrorsOrWarnings()); } public void testDDLCompilerSameNameIndexesOnTwoTables() { final String s = "create table t1(id integer not null, num integer not null);\n" + "create table t2(id integer not null, num integer not null);\n" + "create index idx_t_idnum on t1(id,num);\n" + "create index idx_t_idnum on t2(id,num);"; // if this test ever fails, it's worth figuring out why // When written, HSQL wouldn't allow two indexes with the same name, // even across tables. compileForDDLTest(getPathForSchema(s), false); } public void testDDLCompilerTwoCoveringIndexes() { final String s = "create table t(id integer not null, num integer not null);\n" + "create index idx_t_idnum_hash on t(id,num);\n" + "create index idx_t_idnum_tree on t(id,num);"; compileForDDLTest(getPathForSchema(s), true); } public void testDDLCompilerTwoSwappedOrderIndexes() { final String s = "create table t(id integer not null, num integer not null);\n" + "create index idx_t_idnum_a on t(num,id);\n" + "create index idx_t_idnum_b on t(id,num);"; final VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertEquals(false, c.hasErrorsOrWarnings()); } public void testDDLCompilerDropOneOfThreeIndexes() { final String s = "create table t(id integer not null, num integer not null);\n" + "create index idx_t_idnum_a on t(num,id);\n" + "create index idx_t_idnum_b on t(id,num);\n" + "create index idx_t_idnum_c on t(id,num);\n"; final VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertEquals(true, c.hasErrorsOrWarnings()); int foundCount = 0; for (VoltCompiler.Feedback f : c.m_warnings) { if (f.message.contains("Dropping index")) { foundCount++; } } assertEquals(1, foundCount); } public void testDDLCompilerUniqueAndNonUniqueIndexOnSameColumns() { final String s = "create table t(id integer not null, num integer not null);\n" + "create unique index idx_t_idnum_unique on t(id,num);\n" + "create index idx_t_idnum on t(id,num);"; compileForDDLTest(getPathForSchema(s), true); } public void testDDLCompilerTwoIndexesWithSameName() { final String s = "create table t(id integer not null, num integer not null);\n" + "create index idx_t_idnum on t(id);\n" + "create index idx_t_idnum on t(id,num);"; compileForDDLTest(getPathForSchema(s), false); } public void testDDLCompilerIndexesOrMatViewContainSQLFunctionNOW() { // Test indexes. String ddl = ""; String errorIndexMsg = "Index IDX_T_TM cannot include the function NOW or CURRENT_TIMESTAMP."; ddl = "create table t(id integer not null, tm timestamp);\n" + "create index idx_t_tm on t(since_epoch(second, CURRENT_TIMESTAMP) - since_epoch(second, tm));"; checkDDLErrorMessage(ddl, errorIndexMsg); ddl = "create table t(id integer not null, tm timestamp);\n" + "create index idx_t_tm on t(since_epoch(second, NOW) - since_epoch(second, tm));"; checkDDLErrorMessage(ddl, errorIndexMsg); ddl = "create table t(id integer not null, tm timestamp);\n" + "create index idx_t_tm on t(CURRENT_TIMESTAMP);"; checkDDLErrorMessage(ddl, errorIndexMsg); // Test MatView. String errorMatviewMsg = "Materialized view \"MY_VIEW\" cannot include the function NOW or CURRENT_TIMESTAMP."; ddl = "create table t(id integer not null, tm timestamp);\n" + "create view my_view as select since_epoch(second, CURRENT_TIMESTAMP) - since_epoch(second, tm), " + "count(*) from t group by since_epoch(second, CURRENT_TIMESTAMP) - since_epoch(second, tm);"; checkDDLErrorMessage(ddl, errorMatviewMsg); ddl = "create table t(id integer not null, tm timestamp);\n" + "create view my_view as select since_epoch(second, NOW) - since_epoch(second, tm), " + "count(*) from t group by since_epoch(second, NOW) - since_epoch(second, tm);"; checkDDLErrorMessage(ddl, errorMatviewMsg); ddl = "create table t(id integer not null, tm timestamp);\n" + "create view my_view as select tm, count(*), count(CURRENT_TIMESTAMP) from t group by tm;"; checkDDLErrorMessage(ddl, errorMatviewMsg); ddl = "create table t(id integer not null, tm timestamp);\n" + "create view my_view as select tm, count(*), count(NOW) from t group by tm;"; checkDDLErrorMessage(ddl, errorMatviewMsg); ddl = "create table t(id integer not null, tm timestamp);\n" + "create view my_view as select tm, count(*) from t " + "where since_epoch(second, CURRENT_TIMESTAMP) - since_epoch(second, tm) > 60 " + "group by tm;"; checkDDLErrorMessage(ddl, errorMatviewMsg); } private static final String msgP = "does not include the partitioning column"; private static final String msgPR = "ASSUMEUNIQUE is not valid for an index that includes the partitioning column. " + "Please use UNIQUE instead"; private static final String msgR = "ASSUMEUNIQUE is not valid for replicated tables. " + "Please use UNIQUE instead"; public void testColumnUniqueGiveException() { String schema; // (1) ****** Replicate tables // A unique index on the non-primary key for replicated table gets no error. schema = "create table t0 (id bigint not null, name varchar(32) not null UNIQUE, age integer, primary key (id));\n"; checkValidUniqueAndAssumeUnique(schema, null, msgR); // Similar to above, but use a different way to define unique column. schema = "create table t0 (id bigint not null, name varchar(32) not null, age integer, " + "primary key (id), UNIQUE (name) );\n"; checkValidUniqueAndAssumeUnique(schema, null, msgR); // (2) ****** Partition Table: UNIQUE valid, ASSUMEUNIQUE not valid // A unique index on the partitioning key ( no primary key) gets no error. schema = "create table t0 (id bigint not null UNIQUE, name varchar(32) not null, age integer);\n" + "PARTITION TABLE t0 ON COLUMN id;\n"; checkValidUniqueAndAssumeUnique(schema, null, msgPR); // Similar to above, but use a different way to define unique column. schema = "create table t0 (id bigint not null, name varchar(32) not null, age integer, " + "primary key (id), UNIQUE(id) );\n" + "PARTITION TABLE t0 ON COLUMN id;\n"; checkValidUniqueAndAssumeUnique(schema, null, msgPR); // A unique index on the partitioning key ( also primary key) gets no error. schema = "create table t0 (id bigint not null UNIQUE, name varchar(32) not null, age integer, primary key (id));\n" + "PARTITION TABLE t0 ON COLUMN id;\n"; checkValidUniqueAndAssumeUnique(schema, null, msgPR); // A unique compound index on the partitioning key and another column gets no error. schema = "create table t0 (id bigint not null, name varchar(32) not null, age integer, " + "UNIQUE (id, age), primary key (id));\n" + "PARTITION TABLE t0 ON COLUMN id;\n"; checkValidUniqueAndAssumeUnique(schema, null, msgPR); // A unique index on the partitioning key and an expression like abs(age) gets no error. schema = "create table t0 (id bigint not null, name varchar(32) not null, age integer, " + "primary key (id), UNIQUE (id, abs(age)) );\n" + "PARTITION TABLE t0 ON COLUMN id;\n"; checkValidUniqueAndAssumeUnique(schema, null, msgPR); // (3) ****** Partition Table: UNIQUE not valid // A unique index on the partitioning key ( non-primary key) gets one error. schema = "create table t0 (id bigint not null, name varchar(32) not null UNIQUE, age integer, primary key (id));\n" + "PARTITION TABLE t0 ON COLUMN name;\n"; checkValidUniqueAndAssumeUnique(schema, msgP, msgPR); // A unique index on the partitioning key ( no primary key) gets one error. schema = "create table t0 (id bigint not null, name varchar(32) not null UNIQUE, age integer);\n" + "PARTITION TABLE t0 ON COLUMN id;\n"; checkValidUniqueAndAssumeUnique(schema, msgP, null); // A unique index on the non-partitioning key gets one error. schema = "create table t0 (id bigint not null, name varchar(32) UNIQUE, age integer, primary key (id));\n" + "PARTITION TABLE t0 ON COLUMN id;\n"; checkValidUniqueAndAssumeUnique(schema, msgP, null); // A unique index on an unrelated expression like abs(age) gets a error. schema = "create table t0 (id bigint not null, name varchar(32), age integer, UNIQUE (abs(age)), primary key (id));\n" + "PARTITION TABLE t0 ON COLUMN id;\n"; checkValidUniqueAndAssumeUnique(schema, msgP, null); // A unique index on an expression of the partitioning key like substr(1, 2, name) gets two errors. schema = "create table t0 (id bigint not null, name varchar(32) not null, age integer, " + "primary key (id), UNIQUE (substr(name, 1, 2 )) );\n" + "PARTITION TABLE t0 ON COLUMN name;\n"; // 1) unique index, 2) primary key checkValidUniqueAndAssumeUnique(schema, msgP, msgP); // A unique index on the non-partitioning key, non-partitioned column gets two errors. schema = "create table t0 (id bigint not null, name varchar(32) not null, age integer UNIQUE, primary key (id));\n" + "PARTITION TABLE t0 ON COLUMN name;\n"; // 1) unique index, 2) primary key checkValidUniqueAndAssumeUnique(schema, msgP, msgP); } private void checkDDLErrorMessage(String ddl, String errorMsg) { final File schemaFile = VoltProjectBuilder.writeStringToTempFile(ddl); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas>" + "<schema path='" + schemaPath + "' />" + "</schemas>" + "<procedures/>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); boolean expectSuccess = errorMsg == null ? true : false; assertEquals(expectSuccess, success); if (!expectSuccess) { assertTrue(isFeedbackPresent(errorMsg, compiler.m_errors)); } } private void checkValidUniqueAndAssumeUnique(String ddl, String errorUnique, String errorAssumeUnique) { checkDDLErrorMessage(ddl, errorUnique); checkDDLErrorMessage(ddl.replace("UNIQUE", "ASSUMEUNIQUE"), errorAssumeUnique); } public void testUniqueIndexGiveException() { String schema; // (1) ****** Replicate tables // A unique index on the non-primary key for replicated table gets no error. schema = "create table t0 (id bigint not null, name varchar(32) not null, age integer, primary key (id));\n" + "CREATE UNIQUE INDEX user_index0 ON t0 (name) ;"; checkValidUniqueAndAssumeUnique(schema, null, msgR); // (2) ****** Partition Table: UNIQUE valid, ASSUMEUNIQUE not valid // A unique index on the partitioning key ( no primary key) gets no error. schema = "create table t0 (id bigint not null, name varchar(32) not null, age integer);\n" + "PARTITION TABLE t0 ON COLUMN id;\n" + "CREATE UNIQUE INDEX user_index1 ON t0 (id) ;"; checkValidUniqueAndAssumeUnique(schema, null, msgPR); // A unique index on the partitioning key ( also primary key) gets no error. schema = "create table t0 (id bigint not null, name varchar(32) not null, age integer, primary key (id));\n" + "PARTITION TABLE t0 ON COLUMN id;\n" + "CREATE UNIQUE INDEX user_index2 ON t0 (id) ;"; checkValidUniqueAndAssumeUnique(schema, null, msgPR); // A unique compound index on the partitioning key and another column gets no error. schema = "create table t0 (id bigint not null, name varchar(32) not null, age integer, primary key (id));\n" + "PARTITION TABLE t0 ON COLUMN id;\n" + "CREATE UNIQUE INDEX user_index3 ON t0 (id, age) ;"; checkValidUniqueAndAssumeUnique(schema, null, msgPR); // A unique index on the partitioning key and an expression like abs(age) gets no error. schema = "create table t0 (id bigint not null, name varchar(32) not null, age integer, primary key (id));\n" + "PARTITION TABLE t0 ON COLUMN id;\n" + "CREATE UNIQUE INDEX user_index4 ON t0 (id, abs(age)) ;"; checkValidUniqueAndAssumeUnique(schema, null, msgPR); // (3) ****** Partition Table: UNIQUE not valid // A unique index on the partitioning key ( no primary key) gets one error. schema = "create table t0 (id bigint not null, name varchar(32) not null, age integer);\n" + "PARTITION TABLE t0 ON COLUMN id;\n" + "CREATE UNIQUE INDEX user_index7 ON t0 (name) ;"; checkValidUniqueAndAssumeUnique(schema, msgP, null); // A unique index on the non-partitioning key gets one error. schema = "create table t0 (id bigint not null, name varchar(32), age integer, primary key (id));\n" + "PARTITION TABLE t0 ON COLUMN id;\n" + "CREATE UNIQUE INDEX user_index8 ON t0 (name) ;"; checkValidUniqueAndAssumeUnique(schema, msgP, null); // A unique index on an unrelated expression like abs(age) gets a error. schema = "create table t0 (id bigint not null, name varchar(32), age integer, primary key (id));\n" + "PARTITION TABLE t0 ON COLUMN id;\n" + "CREATE UNIQUE INDEX user_index9 ON t0 (abs(age)) ;"; checkValidUniqueAndAssumeUnique(schema, msgP, null); // A unique index on the partitioning key ( non-primary key) gets one error. schema = "create table t0 (id bigint not null, name varchar(32) not null, age integer, primary key (id));\n" + "PARTITION TABLE t0 ON COLUMN name;"; checkValidUniqueAndAssumeUnique(schema, msgP, msgP); // A unique index on an expression of the partitioning key like substr(1, 2, name) gets two errors. schema = "create table t0 (id bigint not null, name varchar(32) not null, age integer, primary key (id));\n" + "PARTITION TABLE t0 ON COLUMN name;\n" + "CREATE UNIQUE INDEX user_index10 ON t0 (substr(name, 1, 2 )) ;"; // 1) unique index, 2) primary key checkValidUniqueAndAssumeUnique(schema, msgP, msgP); // A unique index on the non-partitioning key, non-partitioned column gets two errors. schema = "create table t0 (id bigint not null, name varchar(32) not null, age integer, primary key (id));\n" + "PARTITION TABLE t0 ON COLUMN name;\n" + "CREATE UNIQUE INDEX user_index12 ON t0 (age) ;"; // 1) unique index, 2) primary key checkValidUniqueAndAssumeUnique(schema, msgP, msgP); } public void testDDLCompilerMatView() { // Test MatView. String ddl; ddl = "create table t(id integer not null, num integer);\n" + "create view my_view as select num, count(*) from t group by num order by num;"; checkDDLErrorMessage(ddl, "Materialized view \"MY_VIEW\" with ORDER BY clause is not supported."); ddl = "create table t(id integer not null, num integer, wage integer);\n" + "create view my_view1 (num, total, sumwage) " + "as select num, count(*), sum(wage) from t group by num; \n" + "create view my_view2 (num, total, sumwage) " + "as select num, count(*), sum(sumwage) from my_view1 group by num; "; checkDDLErrorMessage(ddl, "A materialized view (MY_VIEW2) can not be defined on another view (MY_VIEW1)"); } public void testDDLCompilerTableLimit() { String ddl; // test failed cases ddl = "create table t(id integer not null, num integer," + "CONSTRAINT tblimit1 LIMIT PARTITION ROWS 6xx);"; checkDDLErrorMessage(ddl, "unexpected token: XX"); ddl = "create table t(id integer not null, num integer," + "CONSTRAINT tblimit1 LIMIT PARTITION ROWS 66666666666666666666666666666666);"; checkDDLErrorMessage(ddl, "incompatible data type in operation"); ddl = "create table t(id integer not null, num integer," + "CONSTRAINT tblimit1 LIMIT PARTITION ROWS -10);"; checkDDLErrorMessage(ddl, "Invalid constraint limit number '-10'"); ddl = "create table t(id integer not null, num integer," + "CONSTRAINT tblimit1 LIMIT PARTITION ROWS 5, CONSTRAINT tblimit2 LIMIT PARTITION ROWS 7);"; checkDDLErrorMessage(ddl, "Too many table limit constraints for table T"); ddl = "create table t(id integer not null, num integer," + "CONSTRAINT tblimit1 LIMIT PARTITION Row 6);"; checkDDLErrorMessage(ddl, "unexpected token: ROW required: ROWS"); ddl = "create table t(id integer not null, num integer," + "CONSTRAINT tblimit1 LIMIT Rows 6);"; checkDDLErrorMessage(ddl, "unexpected token: ROWS required: PARTITION"); // Test success cases ddl = "create table t(id integer not null, num integer," + "CONSTRAINT tblimit1 LIMIT PARTITION ROWS 6);"; checkDDLErrorMessage(ddl, null); ddl = "create table t(id integer not null, num integer," + "LIMIT PARTITION ROWS 6);"; checkDDLErrorMessage(ddl, null); } public void testPartitionOnBadType() { final String simpleSchema = "create table books (cash float default 0.0 NOT NULL, title varchar(10) default 'foo', PRIMARY KEY(cash));"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<partitions><partition table='books' column='cash'/></partitions> " + "<procedures><procedure class='org.voltdb.compiler.procedures.AddBook' /></procedures>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertFalse(success); } public void testOmittedProcedureList() { final String simpleSchema = "create table books (cash float default 0.0 NOT NULL, title varchar(10) default 'foo', PRIMARY KEY(cash));"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertTrue(success); } public void test3324MPPlan() throws IOException { final String simpleSchema = "create table blah (pkey integer not null, strval varchar(200), PRIMARY KEY(pkey));\n"; VoltProjectBuilder pb = new VoltProjectBuilder(); pb.enableDiagnostics(); pb.addLiteralSchema(simpleSchema); pb.addPartitionInfo("blah", "pkey"); pb.addStmtProcedure("undeclaredspquery1", "select strval UNDECLARED1 from blah where pkey = ?"); pb.addStmtProcedure("undeclaredspquery2", "select strval UNDECLARED2 from blah where pkey = 12"); pb.addStmtProcedure("declaredspquery1", "select strval SODECLARED1 from blah where pkey = ?", "blah.pkey:0"); // Currently no way to do this? // pb.addStmtProcedure("declaredspquery2", "select strval SODECLARED2 from blah where pkey = 12", "blah.pkey=12"); boolean success = pb.compile(Configuration.getPathToCatalogForTest("test3324.jar")); assertTrue(success); List<String> diagnostics = pb.harvestDiagnostics(); // This asserts that the undeclared SP plans don't mistakenly get SP treatment // -- they must each include a RECEIVE plan node. assertEquals(2, countStringsMatching(diagnostics, ".*\"UNDECLARED.\".*\"PLAN_NODE_TYPE\":\"RECEIVE\".*")); // This asserts that the methods used to prevent undeclared SP plans from getting SP treatment // don't over-reach to declared SP plans. assertEquals(0, countStringsMatching(diagnostics, ".*\"SODECLARED.\".*\"PLAN_NODE_TYPE\":\"RECEIVE\".*")); // System.out.println("test3324MPPlan"); // System.out.println(diagnostics); } public void testBadDDLErrorLineNumber() throws IOException { final String schema = "-- a comment\n" + // 1 "create table books (\n" + // 2 " id integer default 0,\n" + // 3 " strval varchar(33000) default '',\n" + // 4 " PRIMARY KEY(id)\n" + // 5 ");\n" + // 6 "\n" + // 7 "-- another comment\n" + // 8 "create view badview (\n" + // 9 * error reported here * " id,\n" + " COUNT(*),\n" + " total\n" + " as\n" + "select id, COUNT(*), SUM(cnt)\n" + " from books\n" + " group by id;"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(schema); final String schemaPath = schemaFile.getPath(); final String project = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures><procedure class='org.voltdb.compiler.procedures.AddBook' /></procedures>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(project); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertFalse(success); for (Feedback error: compiler.m_errors) { assertEquals(9, error.lineNo); } } public void testInvalidCreateProcedureDDL() throws Exception { ArrayList<Feedback> fbs; String expectedError; fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NonExistentPartitionParamInteger;" + "PARTITION PROCEDURE NonExistentPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Cannot load class for procedure: org.voltdb.compiler.procedures.NonExistentPartitionParamInteger"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "PARTITION PROCEDURE NotDefinedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Partition in referencing an undefined procedure \"NotDefinedPartitionParamInteger\""; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.PartitionParamInteger;" + "PARTITION PROCEDURE PartitionParamInteger ON TABLE PKEY_WHAAAT COLUMN PKEY;" ); expectedError = "PartitionParamInteger has partition properties defined both in class " + "\"org.voltdb.compiler.procedures.PartitionParamInteger\" and in the schema defintion file(s)"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_WHAAAT COLUMN PKEY;" ); expectedError = "PartitionInfo for procedure " + "org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger refers to a column " + "in schema which can't be found."; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN PSURROGATE;" ); expectedError = "PartitionInfo for procedure " + "org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger refers to a column " + "in schema which can't be found."; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN PKEY PARAMETER 8;" ); expectedError = "PartitionInfo specifies invalid parameter index for procedure: " + "org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM GLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Invalid CREATE PROCEDURE statement: " + "\"CREATE PROCEDURE FROM GLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger\"" + ", expected syntax: \"CREATE PROCEDURE"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger FOR TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Invalid PARTITION statement: \"PARTITION PROCEDURE " + "NotAnnotatedPartitionParamInteger FOR TABLE PKEY_INTEGER COLUMN PKEY\", " + "expected syntax: PARTITION PROCEDURE <procedure> ON " + "TABLE <table> COLUMN <column> [PARAMETER <parameter-index-no>]"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER CLUMN PKEY PARMTR 0;" ); expectedError = "Invalid PARTITION statement: \"PARTITION PROCEDURE " + "NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER CLUMN PKEY PARMTR 0\", " + "expected syntax: PARTITION PROCEDURE <procedure> ON " + "TABLE <table> COLUMN <column> [PARAMETER <parameter-index-no>]"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN PKEY PARAMETER hello;" ); expectedError = "Invalid PARTITION statement: \"PARTITION PROCEDURE " + "NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN PKEY PARAMETER hello\", " + "expected syntax: PARTITION PROCEDURE <procedure> ON " + "TABLE <table> COLUMN <column> [PARAMETER <parameter-index-no>]"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROGEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN PKEY PARAMETER hello;" ); expectedError = "Invalid PARTITION statement: " + "\"PARTITION PROGEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER " + "COLUMN PKEY PARAMETER hello\", expected syntax: \"PARTITION TABLE <table> " + "ON COLUMN <column>\" or \"PARTITION PROCEDURE <procedure> ON " + "TABLE <table> COLUMN <column> [PARAMETER <parameter-index-no>]\""; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE OUTOF CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN PKEY PARAMETER 2;" ); expectedError = "Invalid CREATE PROCEDURE statement: " + "\"CREATE PROCEDURE OUTOF CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger\"" + ", expected syntax: \"CREATE PROCEDURE"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "MAKE PROCEDURE OUTOF CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN PKEY PARAMETER 2;" ); expectedError = "DDL Error: \"unexpected token: MAKE\" in statement starting on lineno: 1"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE 1PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN;" ); expectedError = "Unknown indentifier in DDL: \"PARTITION TABLE 1PKEY_INTEGER ON COLUMN PKEY\" " + "contains invalid identifier \"1PKEY_INTEGER\""; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN 2PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Unknown indentifier in DDL: \"PARTITION TABLE PKEY_INTEGER ON COLUMN 2PKEY\" " + "contains invalid identifier \"2PKEY\""; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS 0rg.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Unknown indentifier in DDL: \""+ "CREATE PROCEDURE FROM CLASS 0rg.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger" + "\" contains invalid identifier \"0rg.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger\""; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.3compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Unknown indentifier in DDL: \""+ "CREATE PROCEDURE FROM CLASS org.voltdb.3compiler.procedures.NotAnnotatedPartitionParamInteger" + "\" contains invalid identifier \"org.voltdb.3compiler.procedures.NotAnnotatedPartitionParamInteger\""; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.4NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Unknown indentifier in DDL: \""+ "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.4NotAnnotatedPartitionParamInteger" + "\" contains invalid identifier \"org.voltdb.compiler.procedures.4NotAnnotatedPartitionParamInteger\""; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE 5NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Unknown indentifier in DDL: \""+ "PARTITION PROCEDURE 5NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN PKEY" + "\" contains invalid identifier \"5NotAnnotatedPartitionParamInteger\""; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE 6PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Unknown indentifier in DDL: \""+ "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE 6PKEY_INTEGER COLUMN PKEY" + "\" contains invalid identifier \"6PKEY_INTEGER\""; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN 7PKEY;" ); expectedError = "Unknown indentifier in DDL: \""+ "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN 7PKEY" + "\" contains invalid identifier \"7PKEY\""; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger TABLE PKEY_INTEGER ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Invalid PARTITION statement: \"PARTITION PROCEDURE " + "NotAnnotatedPartitionParamInteger TABLE PKEY_INTEGER ON TABLE PKEY_INTEGER COLUMN PKEY\", " + "expected syntax: PARTITION PROCEDURE <procedure> ON " + "TABLE <table> COLUMN <column> [PARAMETER <parameter-index-no>]"; assertTrue(isFeedbackPresent(expectedError, fbs)); } public void testInvalidSingleStatementCreateProcedureDDL() throws Exception { ArrayList<Feedback> fbs; String expectedError; fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, DESCR VARCHAR(128), PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE Foo AS BANBALOO pkey FROM PKEY_INTEGER;" + "PARTITION PROCEDURE Foo ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Failed to plan for statement (sql) BANBALOO pkey FROM PKEY_INTEGER"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, DESCR VARCHAR(128), PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE Foo AS SELEC pkey FROM PKEY_INTEGER;" + "PARTITION PROCEDURE Foo ON TABLE PKEY_INTEGER COLUMN PKEY PARAMETER 0;" ); expectedError = "Failed to plan for statement (sql) SELEC pkey FROM PKEY_INTEGER"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, DESCR VARCHAR(128), PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE Foo AS DELETE FROM PKEY_INTEGER WHERE PKEY = ?;" + "PARTITION PROCEDURE Foo ON TABLE PKEY_INTEGER COLUMN PKEY PARAMETER 2;" ); expectedError = "PartitionInfo specifies invalid parameter index for procedure: Foo"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, DESCR VARCHAR(128), PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE Foo AS DELETE FROM PKEY_INTEGER;" + "PARTITION PROCEDURE Foo ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "PartitionInfo specifies invalid parameter index for procedure: Foo"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, DESCR VARCHAR(128), PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE org.kanamuri.Foo AS DELETE FROM PKEY_INTEGER;" + "PARTITION PROCEDURE Foo ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "PartitionInfo specifies invalid parameter index for procedure: org.kanamuri.Foo"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, DESCR VARCHAR(128), PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE 7Foo AS DELETE FROM PKEY_INTEGER WHERE PKEY = ?;" + "PARTITION PROCEDURE 7Foo ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Unknown indentifier in DDL: \""+ "CREATE PROCEDURE 7Foo AS DELETE FROM PKEY_INTEGER WHERE PKEY = ?" + "\" contains invalid identifier \"7Foo\""; assertTrue(isFeedbackPresent(expectedError, fbs)); } public void testInvalidGtroovyProcedureDDL() throws Exception { ArrayList<Feedback> fbs; String expectedError; if (Float.parseFloat(System.getProperty("java.specification.version")) < 1.7) return; fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, DESCR VARCHAR(128), PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE Foo AS ###\n" + " stmt = new SQLStmt('SELECT PKEY, DESCR FROM PKEY = ?')\n" + " transactOn = { int key -> \n" + " voltQueueSQL(stmt,key)\n" + " voltExecuteSQL(true)\n" + " }\n" + "### LANGUAGE GROOVY;\n" + "PARTITION PROCEDURE Foo ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "user lacks privilege or object not found: PKEY"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, DESCR VARCHAR(128), PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE Foo AS ###\n" + " stmt = new SQLStmt('SELECT PKEY, DESCR FROM PKEY_INTEGER WHERE PKEY = ?')\n" + " transactOn = { int key -> \n" + " voltQueueSQL(stmt,key)\n" + " voltExecuteSQL(true)\n" + " \n" + "### LANGUAGE GROOVY;\n" + "PARTITION PROCEDURE Foo ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Procedure \"Foo\" code block has syntax errors"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, DESCR VARCHAR(128), PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE Foo AS ###\n" + " stmt = new SQLStmt('SELECT PKEY, DESCR FROM PKEY_INTEGER WHERE PKEY = ?')\n" + " runMeInstead = { int key -> \n" + " voltQueueSQL(stmt,key)\n" + " voltExecuteSQL(true)\n" + " }\n" + "### LANGUAGE GROOVY;\n" + "PARTITION PROCEDURE Foo ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Procedure \"Foo\" code block does not contain the required \"transactOn\" closure"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, DESCR VARCHAR(128), PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE Foo AS ###\n" + "package voltkv.procedures;\n" + "\n" + "import org.voltdb.*;\n" + "\n" + "@ProcInfo(partitionInfo=\"store.key:0\", singlePartition=true)\n" + "public class Put extends VoltProcedure {\n" + " // Checks if key exists\n" + " public final SQLStmt checkStmt = new SQLStmt(\"SELECT key FROM store WHERE key = ?;\");\n" + " // Updates a key/value pair\n" + " public final SQLStmt updateStmt = new SQLStmt(\"UPDATE store SET value = ? WHERE key = ?;\");\n" + " // Inserts a key/value pair\n" + " public final SQLStmt insertStmt = new SQLStmt(\"INSERT INTO store (key, value) VALUES (?, ?);\");\n" + "\n" + " public VoltTable[] run(String key, byte[] value) {\n" + " // Check whether the pair exists\n" + " voltQueueSQL(checkStmt, key);\n" + " // Insert new or update existing key depending on result\n" + " if (voltExecuteSQL()[0].getRowCount() == 0)\n" + " voltQueueSQL(insertStmt, key, value);\n" + " else\n" + " voltQueueSQL(updateStmt, value, key);\n" + " return voltExecuteSQL(true);\n" + " }\n" + "}\n" + "### LANGUAGE GROOVY;\n" ); expectedError = "Procedure \"voltkv.procedures.Put\" is not a groovy script"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, DESCR VARCHAR(128), PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE Foo AS ###\n" + " stmt = new SQLStmt('SELECT PKEY, DESCR FROM PKEY_INTEGER WHERE PKEY = ?')\n" + " transactOn = 'Is it me that you wanted instead?'\n" + "### LANGUAGE GROOVY;\n" + "PARTITION PROCEDURE Foo ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Procedure \"Foo\" code block does not contain the required \"transactOn\" closure"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, DESCR VARCHAR(128), PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE Foo AS ###\n" + " // ###\n" + " stmt = new SQLStmt('SELECT PKEY, DESCR FROM PKEY_INTEGER WHERE PKEY = ?')\n" + " transactOn = { int key -> \n" + " voltQueueSQL(stmt,key)\n" + " voltExecuteSQL(true)\n" + " }\n" + "### LANGUAGE GROOVY;\n" + "PARTITION PROCEDURE Foo ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Schema file ended mid-statement (no semicolon found)"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, DESCR VARCHAR(128), PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE Foo AS ##\n" + " stmt = new SQLStmt('SELECT PKEY, DESCR FROM PKEY_INTEGER WHERE PKEY = ?')\n" + " transactOn = { int key -> \n" + " voltQueueSQL(stmt,key)\n" + " voltExecuteSQL(true)\n" + " }\n" + "### LANGUAGE GROOVY;\n" + "PARTITION PROCEDURE Foo ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Schema file ended mid-statement (no semicolon found)"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, DESCR VARCHAR(128), PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE Foo AS ###\n" + " stmt = new SQLStmt('SELECT PKEY, DESCR FROM PKEY_INTEGER WHERE PKEY = ?')\n" + " transactOn = { int key -> \n" + " voltQueueSQL(stmt,key)\n" + " voltExecuteSQL(true)\n" + " }\n" + "### LANGUAGE KROOVY;\n" + "PARTITION PROCEDURE Foo ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "### LANGUAGE KROOVY\", expected syntax: \"CREATE PROCEDURE [ALLOW"; assertTrue(isFeedbackPresent(expectedError, fbs)); } public void testValidGroovyProcedureDDL() throws Exception { if (Float.parseFloat(System.getProperty("java.specification.version")) < 1.7) return; Database db = goodDDLAgainstSimpleSchema( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, DESCR VARCHAR(128), PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE Foo AS ###\n" + " stmt = new SQLStmt('SELECT PKEY, DESCR FROM PKEY_INTEGER WHERE PKEY = ?')\n" + " transactOn = { int key -> \n" + " voltQueueSQL(stmt,key)\n" + " voltExecuteSQL(true)\n" + " }\n" + "### LANGUAGE GROOVY;\n" + "PARTITION PROCEDURE Foo ON TABLE PKEY_INTEGER COLUMN PKEY;" ); Procedure proc = db.getProcedures().get("Foo"); assertNotNull(proc); db = goodDDLAgainstSimpleSchema( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, DESCR VARCHAR(128), PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE Foo AS ###\n" + " // #\n" + " // ##\n" + " stmt = new SQLStmt('SELECT PKEY, DESCR FROM PKEY_INTEGER WHERE PKEY = ?')\n" + " transactOn = { int key -> \n" + " def str = '# ## # ##'\n" + " voltQueueSQL(stmt,key)\n" + " voltExecuteSQL(true)\n" + " }\n" + "### LANGUAGE GROOVY;\n" + "PARTITION PROCEDURE Foo ON TABLE PKEY_INTEGER COLUMN PKEY;" ); proc = db.getProcedures().get("Foo"); assertNotNull(proc); db = goodDDLAgainstSimpleSchema( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, DESCR VARCHAR(128), PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE \n" + "PROCEDURE Foo \n" + " AS \n" + "###\n" + " stmt = new SQLStmt('SELECT PKEY, DESCR FROM PKEY_INTEGER WHERE PKEY = ?')\n" + " transactOn = { int key -> \n" + " voltQueueSQL(stmt,key)\n" + " voltExecuteSQL(true)\n" + " }\n" + "###\n" + " LANGUAGE \n" + "GROOVY;\n" + "PARTITION PROCEDURE Foo ON TABLE PKEY_INTEGER COLUMN PKEY;" ); proc = db.getProcedures().get("Foo"); assertNotNull(proc); } private ArrayList<Feedback> checkInvalidProcedureDDL(String ddl) { final File schemaFile = VoltProjectBuilder.writeStringToTempFile(ddl); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas>" + "<schema path='" + schemaPath + "' />" + "</schemas>" + "<procedures/>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertFalse(success); return compiler.m_errors; } public void testValidAnnotatedProcedureDLL() throws Exception { final String simpleSchema = "create table books (cash integer default 23 not null, title varchar(3) default 'foo', PRIMARY KEY(cash));" + "PARTITION TABLE books ON COLUMN cash;" + "creAte PrOcEdUrE FrOm CLasS org.voltdb.compiler.procedures.AddBook;"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures/>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertTrue(success); final String catalogContents = VoltCompilerUtils.readFileFromJarfile(testout_jar, "catalog.txt"); final Catalog c2 = new Catalog(); c2.execute(catalogContents); final Database db = c2.getClusters().get("cluster").getDatabases().get("database"); final Procedure addBook = db.getProcedures().get("AddBook"); assertEquals(true, addBook.getSinglepartition()); } public void testValidNonAnnotatedProcedureDDL() throws Exception { final String simpleSchema = "create table books (cash integer default 23 not null, title varchar(3) default 'foo', PRIMARY KEY(cash));" + "PARTITION TABLE books ON COLUMN cash;" + "create procedure from class org.voltdb.compiler.procedures.NotAnnotatedAddBook;" + "paRtItiOn prOcEdure NotAnnotatedAddBook On taBLe books coLUmN cash ParaMETer 0;"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures/>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertTrue(success); final String catalogContents = VoltCompilerUtils.readFileFromJarfile(testout_jar, "catalog.txt"); final Catalog c2 = new Catalog(); c2.execute(catalogContents); final Database db = c2.getClusters().get("cluster").getDatabases().get("database"); final Procedure addBook = db.getProcedures().get("NotAnnotatedAddBook"); assertEquals(true, addBook.getSinglepartition()); } class TestRole { final String name; boolean adhoc = false; boolean sysproc = false; boolean defaultproc = false; public TestRole(String name) { this.name = name; } public TestRole(String name, boolean adhoc, boolean sysproc, boolean defaultproc) { this.name = name; this.adhoc = adhoc; this.sysproc = sysproc; this.defaultproc = defaultproc; } } private void checkRoleXMLAndDDL(String rolesElem, String ddl, String errorRegex, TestRole... roles) throws Exception { final File schemaFile = VoltProjectBuilder.writeStringToTempFile(ddl != null ? ddl : ""); final String schemaPath = schemaFile.getPath(); String rolesBlock = (rolesElem != null ? String.format("<roles>%s</roles>", rolesElem) : ""); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas>" + "<schema path='" + schemaPath + "' />" + "</schemas>" + rolesBlock + "<procedures/>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); String error = (success || compiler.m_errors.size() == 0 ? "" : compiler.m_errors.get(compiler.m_errors.size()-1).message); if (errorRegex == null) { assertTrue(String.format("Expected success\nXML: %s\nDDL: %s\nERR: %s", rolesElem, ddl, error), success); Database db = compiler.getCatalog().getClusters().get("cluster").getDatabases().get("database"); CatalogMap<Group> groups = db.getGroups(); CatalogMap<Connector> connectors = db.getConnectors(); if (connectors.get("0") == null ) { connectors.add("0"); } assertNotNull(groups); assertEquals(roles.length, groups.size()); for (TestRole role : roles) { Group group = groups.get(role.name); assertNotNull(String.format("Missing role \"%s\"", role.name), group); assertEquals(String.format("Role \"%s\" adhoc flag mismatch:", role.name), role.adhoc, group.getAdhoc()); assertEquals(String.format("Role \"%s\" sysproc flag mismatch:", role.name), role.sysproc, group.getSysproc()); assertEquals(String.format("Role \"%s\" defaultproc flag mismatch:", role.name), role.defaultproc, group.getDefaultproc()); } } else { assertFalse(String.format("Expected error (\"%s\")\nXML: %s\nDDL: %s", errorRegex, rolesElem, ddl), success); assertFalse("Expected at least one error message.", error.isEmpty()); Matcher m = Pattern.compile(errorRegex).matcher(error); assertTrue(String.format("%s\nEXPECTED: %s", error, errorRegex), m.matches()); } } private void goodRoleDDL(String ddl, TestRole... roles) throws Exception { checkRoleXMLAndDDL(null, ddl, null, roles); } private void badRoleDDL(String ddl, String errorRegex) throws Exception { checkRoleXMLAndDDL(null, ddl, errorRegex); } public void testRoleXML() throws Exception { checkRoleXMLAndDDL("<role name='r1'/>", null, null, new TestRole("r1")); } public void testBadRoleXML() throws Exception { checkRoleXMLAndDDL("<rolex name='r1'/>", null, ".*rolex.*[{]role[}].*expected.*"); checkRoleXMLAndDDL("<role name='r1'/>", "create role r1;", ".*already exists.*"); } public void testRoleDDL() throws Exception { goodRoleDDL("create role r1;", new TestRole("r1")); goodRoleDDL("create role r1;create role r2;", new TestRole("r1"), new TestRole("r2")); goodRoleDDL("create role r1 with adhoc;", new TestRole("r1", true, false, false)); goodRoleDDL("create role r1 with sysproc;", new TestRole("r1", false, true, false)); goodRoleDDL("create role r1 with defaultproc;", new TestRole("r1", false, false, true)); goodRoleDDL("create role r1 with adhoc,sysproc,defaultproc;", new TestRole("r1", true, true, true)); goodRoleDDL("create role r1 with adhoc,sysproc,sysproc;", new TestRole("r1", true, true, false)); goodRoleDDL("create role r1 with AdHoc,SysProc,DefaultProc;", new TestRole("r1", true, true, true)); } public void testBadRoleDDL() throws Exception { badRoleDDL("create role r1", ".*no semicolon.*"); badRoleDDL("create role r1;create role r1;", ".*already exists.*"); badRoleDDL("create role r1 with ;", ".*Invalid CREATE ROLE statement.*"); badRoleDDL("create role r1 with blah;", ".*Invalid permission \"blah\".*"); badRoleDDL("create role r1 with adhoc sysproc;", ".*Invalid CREATE ROLE statement.*"); badRoleDDL("create role r1 with adhoc, blah;", ".*Invalid permission \"blah\".*"); } private Database checkDDLAgainstSimpleSchema(String errorRegex, String... ddl) throws Exception { String schemaDDL = "create table books (cash integer default 23 NOT NULL, title varbinary(10) default NULL, PRIMARY KEY(cash)); " + "partition table books on column cash;" + StringUtils.join(ddl, " "); File schemaFile = VoltProjectBuilder.writeStringToTempFile(schemaDDL.toString()); String schemaPath = schemaFile.getPath(); String projectXML = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "</database>" + "</project>"; File projectFile = VoltProjectBuilder.writeStringToTempFile(projectXML); String projectPath = projectFile.getPath(); VoltCompiler compiler = new VoltCompiler(); boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); String error = (success || compiler.m_errors.size() == 0 ? "" : compiler.m_errors.get(compiler.m_errors.size()-1).message); if (errorRegex == null) { assertTrue(String.format("Expected success\nDDL: %s\n%s", ddl, error), success); Catalog cat = compiler.getCatalog(); return cat.getClusters().get("cluster").getDatabases().get("database"); } else { assertFalse(String.format("Expected error (\"%s\")\nDDL: %s", errorRegex, ddl), success); assertFalse("Expected at least one error message.", error.isEmpty()); Matcher m = Pattern.compile(errorRegex).matcher(error); assertTrue(String.format("%s\nEXPECTED: %s", error, errorRegex), m.matches()); return null; } } private Database goodDDLAgainstSimpleSchema(String... ddl) throws Exception { return checkDDLAgainstSimpleSchema(null, ddl); } private void badDDLAgainstSimpleSchema(String errorRegex, String... ddl) throws Exception { checkDDLAgainstSimpleSchema(errorRegex, ddl); } public void testGoodCreateProcedureWithAllow() throws Exception { Database db = goodDDLAgainstSimpleSchema( "create role r1;", "create procedure p1 allow r1 as select * from books;"); Procedure proc = db.getProcedures().get("p1"); assertNotNull(proc); CatalogMap<GroupRef> groups = proc.getAuthgroups(); assertEquals(1, groups.size()); assertNotNull(groups.get("r1")); db = goodDDLAgainstSimpleSchema( "create role r1;", "create role r2;", "create procedure p1 allow r1, r2 as select * from books;"); proc = db.getProcedures().get("p1"); assertNotNull(proc); groups = proc.getAuthgroups(); assertEquals(2, groups.size()); assertNotNull(groups.get("r1")); assertNotNull(groups.get("r2")); db = goodDDLAgainstSimpleSchema( "create role r1;", "create procedure allow r1 from class org.voltdb.compiler.procedures.AddBook;"); proc = db.getProcedures().get("AddBook"); assertNotNull(proc); groups = proc.getAuthgroups(); assertEquals(1, groups.size()); assertNotNull(groups.get("r1")); db = goodDDLAgainstSimpleSchema( "create role r1;", "create role r2;", "create procedure allow r1,r2 from class org.voltdb.compiler.procedures.AddBook;"); proc = db.getProcedures().get("AddBook"); assertNotNull(proc); groups = proc.getAuthgroups(); assertEquals(2, groups.size()); assertNotNull(groups.get("r1")); assertNotNull(groups.get("r2")); db = goodDDLAgainstSimpleSchema( "create role r1;", "create procedure allow r1,r1 from class org.voltdb.compiler.procedures.AddBook;"); proc = db.getProcedures().get("AddBook"); assertNotNull(proc); groups = proc.getAuthgroups(); assertEquals(1, groups.size()); assertNotNull(groups.get("r1")); } public void testBadCreateProcedureWithAllow() throws Exception { badDDLAgainstSimpleSchema(".*expected syntax.*", "create procedure p1 allow as select * from books;"); badDDLAgainstSimpleSchema(".*expected syntax.*", "create procedure p1 allow a b as select * from books;"); badDDLAgainstSimpleSchema(".*group rx that does not exist.*", "create procedure p1 allow rx as select * from books;"); badDDLAgainstSimpleSchema(".*group rx that does not exist.*", "create role r1;", "create procedure p1 allow r1, rx as select * from books;"); } private ConnectorTableInfo getConnectorTableInfoFor( Database db, String tableName) { Connector connector = db.getConnectors().get("0"); if( connector == null) return null; return connector.getTableinfo().getIgnoreCase(tableName); } public void testGoodExportTable() throws Exception { Database db; db = goodDDLAgainstSimpleSchema( "create table e1 (id integer, f1 varchar(16));", "export table e1;" ); assertNotNull(getConnectorTableInfoFor(db, "e1")); db = goodDDLAgainstSimpleSchema( "create table e1 (id integer, f1 varchar(16));", "create table e2 (id integer, f1 varchar(16));", "export table e1;", "eXpOrt TABle E2;" ); assertNotNull(getConnectorTableInfoFor(db, "e1")); assertNotNull(getConnectorTableInfoFor(db, "e2")); } public void testBadExportTable() throws Exception { badDDLAgainstSimpleSchema(".+\\sexport, table non_existant was not present in the catalog.*", "export table non_existant;" ); badDDLAgainstSimpleSchema(".+contains invalid identifier \"1table_name_not_valid\".*", "export table 1table_name_not_valid;" ); badDDLAgainstSimpleSchema(".+Invalid EXPORT TABLE statement.*", "export table one, two, three;" ); badDDLAgainstSimpleSchema(".+Invalid EXPORT TABLE statement.*", "export export table one;" ); badDDLAgainstSimpleSchema(".+Invalid EXPORT TABLE statement.*", "export table table one;" ); badDDLAgainstSimpleSchema("Table with indexes configured as an export table.*", "export table books;" ); badDDLAgainstSimpleSchema("Export table configured with materialized view.*", "create table view_source( id integer, f1 varchar(16), f2 varchar(12));", "create view my_view as select f2, count(*) as f2cnt from view_source group by f2;", "export table view_source;" ); badDDLAgainstSimpleSchema("View configured as an export table.*", "create table view_source( id integer, f1 varchar(16), f2 varchar(12));", "create view my_view as select f2, count(*) as f2cnt from view_source group by f2;", "export table my_view;" ); badDDLAgainstSimpleSchema("Table \"E1\" is already exported.*", "create table e1( id integer, f1 varchar(16), f2 varchar(12));", "export table e1;", "export table E1;" ); } public void testCompileFromDDL() throws IOException { final String simpleSchema1 = "create table table1r_el (pkey integer, column2_integer integer, PRIMARY KEY(pkey));\n" + "create view v_table1r_el (column2_integer, num_rows) as\n" + "select column2_integer as column2_integer,\n" + "count(*) as num_rows\n" + "from table1r_el\n" + "group by column2_integer;\n" + "create view v_table1r_el2 (column2_integer, num_rows) as\n" + "select column2_integer as column2_integer,\n" + "count(*) as num_rows\n" + "from table1r_el\n" + "group by column2_integer\n;\n"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema1); final String schemaPath = schemaFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); boolean success = compileFromDDL(compiler, testout_jar, schemaPath); assertTrue(success); success = compileFromDDL(compiler, testout_jar, schemaPath + "???"); assertFalse(success); success = compileFromDDL(compiler, testout_jar); assertFalse(success); } private int countStringsMatching(List<String> diagnostics, String pattern) { int count = 0; for (String string : diagnostics) { if (string.matches(pattern)) { ++count; } } return count; } public void testAlterTable() throws IOException { final String simpleSchema1 = "create table mytable (pkey integer, column2_integer integer);\n" + "alter table mytable add column newcol varchar(50);\n"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema1); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas>" + "<schema path='" + schemaPath + "' />" + "</schemas>" + "<procedures/>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertTrue(success); } public void testDropTable() throws IOException { final String simpleSchema1 = "create table mytable (pkey integer, column2_integer integer);\n" + "drop table mytable;\n"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema1); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas>" + "<schema path='" + schemaPath + "' />" + "</schemas>" + "<procedures/>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertTrue(success); } public void testDropTableIfExists() throws IOException { final String simpleSchema1 = "create table mytable (pkey integer, column2_integer integer);\n" + "drop table mytablenonexistant if exists;\n"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema1); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas>" + "<schema path='" + schemaPath + "' />" + "</schemas>" + "<procedures/>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertTrue(success); } }
tests/frontend/org/voltdb/compiler/TestVoltCompiler.java
/* This file is part of VoltDB. * Copyright (C) 2008-2014 VoltDB Inc. * * Permission is hereby granted, free of charge, to any person obtaining * a copy of this software and associated documentation files (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to * the following conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. * IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR * OTHER DEALINGS IN THE SOFTWARE. */ package org.voltdb.compiler; import java.io.File; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URL; import java.net.URLDecoder; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import junit.framework.TestCase; import org.apache.commons.lang3.StringUtils; import org.voltdb.ProcInfoData; import org.voltdb.VoltDB.Configuration; import org.voltdb.VoltType; import org.voltdb.benchmark.tpcc.TPCCProjectBuilder; import org.voltdb.catalog.Catalog; import org.voltdb.catalog.CatalogMap; import org.voltdb.catalog.Column; import org.voltdb.catalog.Connector; import org.voltdb.catalog.ConnectorTableInfo; import org.voltdb.catalog.Database; import org.voltdb.catalog.Group; import org.voltdb.catalog.GroupRef; import org.voltdb.catalog.Procedure; import org.voltdb.catalog.SnapshotSchedule; import org.voltdb.catalog.Table; import org.voltdb.compiler.VoltCompiler.Feedback; import org.voltdb.compiler.VoltCompiler.VoltCompilerException; import org.voltdb.types.IndexType; import org.voltdb.utils.BuildDirectoryUtils; import org.voltdb.utils.CatalogUtil; public class TestVoltCompiler extends TestCase { String nothing_jar; String testout_jar; @Override public void setUp() { nothing_jar = BuildDirectoryUtils.getBuildDirectoryPath() + File.pathSeparator + "nothing.jar"; testout_jar = BuildDirectoryUtils.getBuildDirectoryPath() + File.pathSeparator + "testout.jar"; } @Override public void tearDown() { File njar = new File(nothing_jar); njar.delete(); File tjar = new File(testout_jar); tjar.delete(); } public void testBrokenLineParsing() throws IOException { final String simpleSchema1 = "create table table1r_el (pkey integer, column2_integer integer, PRIMARY KEY(pkey));\n" + "create view v_table1r_el (column2_integer, num_rows) as\n" + "select column2_integer as column2_integer,\n" + "count(*) as num_rows\n" + "from table1r_el\n" + "group by column2_integer;\n" + "create view v_table1r_el2 (column2_integer, num_rows) as\n" + "select column2_integer as column2_integer,\n" + "count(*) as num_rows\n" + "from table1r_el\n" + "group by column2_integer\n;\n"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema1); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas>" + "<schema path='" + schemaPath + "' />" + "</schemas>" + "<procedures>" + "<procedure class='Foo'>" + "<sql>select * from table1r_el;</sql>" + "</procedure>" + "</procedures>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertTrue(success); } public void testUTF8XMLFromHSQL() throws IOException { final String simpleSchema = "create table blah (pkey integer not null, strval varchar(200), PRIMARY KEY(pkey));\n"; VoltProjectBuilder pb = new VoltProjectBuilder(); pb.addLiteralSchema(simpleSchema); pb.addStmtProcedure("utf8insert", "insert into blah values(1, 'něco za nic')"); pb.addPartitionInfo("blah", "pkey"); boolean success = pb.compile(Configuration.getPathToCatalogForTest("utf8xml.jar")); assertTrue(success); } private boolean isFeedbackPresent(String expectedError, ArrayList<Feedback> fbs) { for (Feedback fb : fbs) { if (fb.getStandardFeedbackLine().contains(expectedError)) { return true; } } return false; } public void testMismatchedPartitionParams() throws IOException { String expectedError; ArrayList<Feedback> fbs; fbs = checkPartitionParam("CREATE TABLE PKEY_BIGINT ( PKEY BIGINT NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_BIGINT ON COLUMN PKEY;", "org.voltdb.compiler.procedures.PartitionParamBigint", "PKEY_BIGINT"); expectedError = "Type mismatch between partition column and partition parameter for procedure " + "org.voltdb.compiler.procedures.PartitionParamBigint may cause overflow or loss of precision.\n" + "Partition column is type VoltType.BIGINT and partition parameter is type VoltType.STRING"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkPartitionParam("CREATE TABLE PKEY_BIGINT ( PKEY BIGINT NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_BIGINT ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.PartitionParamBigint;", "PKEY_BIGINT"); expectedError = "Type mismatch between partition column and partition parameter for procedure " + "org.voltdb.compiler.procedures.PartitionParamBigint may cause overflow or loss of precision.\n" + "Partition column is type VoltType.BIGINT and partition parameter is type VoltType.STRING"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkPartitionParam("CREATE TABLE PKEY_BIGINT ( PKEY BIGINT NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_BIGINT ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamBigint;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamBigint ON TABLE PKEY_BIGINT COLUMN PKEY;", "PKEY_BIGINT"); expectedError = "Type mismatch between partition column and partition parameter for procedure " + "org.voltdb.compiler.procedures.NotAnnotatedPartitionParamBigint may cause overflow or loss of precision.\n" + "Partition column is type VoltType.BIGINT and partition parameter is type VoltType.STRING"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkPartitionParam("CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;", "org.voltdb.compiler.procedures.PartitionParamInteger", "PKEY_INTEGER"); expectedError = "Type mismatch between partition column and partition parameter for procedure " + "org.voltdb.compiler.procedures.PartitionParamInteger may cause overflow or loss of precision.\n" + "Partition column is type VoltType.INTEGER and partition parameter " + "is type VoltType.BIGINT"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkPartitionParam("CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.PartitionParamInteger;", "PKEY_INTEGER"); expectedError = "Type mismatch between partition column and partition parameter for procedure " + "org.voltdb.compiler.procedures.PartitionParamInteger may cause overflow or loss of precision.\n" + "Partition column is type VoltType.INTEGER and partition parameter " + "is type VoltType.BIGINT"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkPartitionParam("CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN PKEY;", "PKEY_INTEGER"); expectedError = "Type mismatch between partition column and partition parameter for procedure " + "org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger may cause overflow or loss of precision.\n" + "Partition column is type VoltType.INTEGER and partition parameter " + "is type VoltType.BIGINT"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkPartitionParam("CREATE TABLE PKEY_SMALLINT ( PKEY SMALLINT NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_SMALLINT ON COLUMN PKEY;", "org.voltdb.compiler.procedures.PartitionParamSmallint", "PKEY_SMALLINT"); expectedError = "Type mismatch between partition column and partition parameter for procedure " + "org.voltdb.compiler.procedures.PartitionParamSmallint may cause overflow or loss of precision.\n" + "Partition column is type VoltType.SMALLINT and partition parameter " + "is type VoltType.BIGINT"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkPartitionParam("CREATE TABLE PKEY_SMALLINT ( PKEY SMALLINT NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_SMALLINT ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.PartitionParamSmallint;", "PKEY_SMALLINT"); expectedError = "Type mismatch between partition column and partition parameter for procedure " + "org.voltdb.compiler.procedures.PartitionParamSmallint may cause overflow or loss of precision.\n" + "Partition column is type VoltType.SMALLINT and partition parameter " + "is type VoltType.BIGINT"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkPartitionParam("CREATE TABLE PKEY_SMALLINT ( PKEY SMALLINT NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_SMALLINT ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamSmallint;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamSmallint ON TABLE PKEY_SMALLINT COLUMN PKEY;", "PKEY_SMALLINT"); expectedError = "Type mismatch between partition column and partition parameter for procedure " + "org.voltdb.compiler.procedures.NotAnnotatedPartitionParamSmallint may cause overflow or loss of precision.\n" + "Partition column is type VoltType.SMALLINT and partition parameter " + "is type VoltType.BIGINT"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkPartitionParam("CREATE TABLE PKEY_TINYINT ( PKEY TINYINT NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_TINYINT ON COLUMN PKEY;", "org.voltdb.compiler.procedures.PartitionParamTinyint", "PKEY_TINYINT"); expectedError = "Type mismatch between partition column and partition parameter for procedure " + "org.voltdb.compiler.procedures.PartitionParamTinyint may cause overflow or loss of precision.\n" + "Partition column is type VoltType.TINYINT and partition parameter " + "is type VoltType.SMALLINT"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkPartitionParam("CREATE TABLE PKEY_TINYINT ( PKEY TINYINT NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_TINYINT ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.PartitionParamTinyint;", "PKEY_TINYINT"); expectedError = "Type mismatch between partition column and partition parameter for procedure " + "org.voltdb.compiler.procedures.PartitionParamTinyint may cause overflow or loss of precision.\n" + "Partition column is type VoltType.TINYINT and partition parameter " + "is type VoltType.SMALLINT"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkPartitionParam("CREATE TABLE PKEY_TINYINT ( PKEY TINYINT NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_TINYINT ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamTinyint;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamTinyint ON TABLE PKEY_TINYINT COLUMN PKEY;", "PKEY_TINYINT"); expectedError = "Type mismatch between partition column and partition parameter for procedure " + "org.voltdb.compiler.procedures.NotAnnotatedPartitionParamTinyint may cause overflow or loss of precision.\n" + "Partition column is type VoltType.TINYINT and partition parameter " + "is type VoltType.SMALLINT"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkPartitionParam("CREATE TABLE PKEY_STRING ( PKEY VARCHAR(32) NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_STRING ON COLUMN PKEY;", "org.voltdb.compiler.procedures.PartitionParamString", "PKEY_STRING"); expectedError = "Type mismatch between partition column and partition parameter for procedure " + "org.voltdb.compiler.procedures.PartitionParamString may cause overflow or loss of precision.\n" + "Partition column is type VoltType.STRING and partition parameter " + "is type VoltType.INTEGER"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkPartitionParam("CREATE TABLE PKEY_STRING ( PKEY VARCHAR(32) NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_STRING ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.PartitionParamString;", "PKEY_STRING"); expectedError = "Type mismatch between partition column and partition parameter for procedure " + "org.voltdb.compiler.procedures.PartitionParamString may cause overflow or loss of precision.\n" + "Partition column is type VoltType.STRING and partition parameter " + "is type VoltType.INTEGER"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkPartitionParam("CREATE TABLE PKEY_STRING ( PKEY VARCHAR(32) NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_STRING ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamString;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamString ON TABLE PKEY_STRING COLUMN PKEY;", "PKEY_STRING"); expectedError = "Type mismatch between partition column and partition parameter for procedure " + "org.voltdb.compiler.procedures.NotAnnotatedPartitionParamString may cause overflow or loss of precision.\n" + "Partition column is type VoltType.STRING and partition parameter " + "is type VoltType.INTEGER"; assertTrue(isFeedbackPresent(expectedError, fbs)); } private ArrayList<Feedback> checkPartitionParam(String ddl, String procedureClass, String table) { final File schemaFile = VoltProjectBuilder.writeStringToTempFile(ddl); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas>" + "<schema path='" + schemaPath + "' />" + "</schemas>" + "<procedures>" + "<procedure class='" + procedureClass + "' />" + "</procedures>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertFalse(success); return compiler.m_errors; } private ArrayList<Feedback> checkPartitionParam(String ddl, String table) { final File schemaFile = VoltProjectBuilder.writeStringToTempFile(ddl); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas>" + "<schema path='" + schemaPath + "' />" + "</schemas>" + "<procedures/>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertFalse(success); return compiler.m_errors; } public void testSnapshotSettings() throws IOException { String schemaPath = ""; try { final URL url = TPCCProjectBuilder.class.getResource("tpcc-ddl.sql"); schemaPath = URLDecoder.decode(url.getPath(), "UTF-8"); } catch (final UnsupportedEncodingException e) { e.printStackTrace(); System.exit(-1); } VoltProjectBuilder builder = new VoltProjectBuilder(); builder.addProcedures(org.voltdb.compiler.procedures.TPCCTestProc.class); builder.setSnapshotSettings("32m", 5, "/tmp", "woobar"); builder.addSchema(schemaPath); try { assertTrue(builder.compile("/tmp/snapshot_settings_test.jar")); final String catalogContents = VoltCompilerUtils.readFileFromJarfile("/tmp/snapshot_settings_test.jar", "catalog.txt"); final Catalog cat = new Catalog(); cat.execute(catalogContents); CatalogUtil.compileDeployment(cat, builder.getPathToDeployment(), true, false); SnapshotSchedule schedule = cat.getClusters().get("cluster").getDatabases(). get("database").getSnapshotschedule().get("default"); assertEquals(32, schedule.getFrequencyvalue()); assertEquals("m", schedule.getFrequencyunit()); //Will be empty because the deployment file initialization is what sets this value assertEquals("/tmp", schedule.getPath()); assertEquals("woobar", schedule.getPrefix()); } finally { final File jar = new File("/tmp/snapshot_settings_test.jar"); jar.delete(); } } // TestExportSuite tests most of these options are tested end-to-end; however need to test // that a disabled connector is really disabled and that auth data is correct. public void testExportSetting() throws IOException { final VoltProjectBuilder project = new VoltProjectBuilder(); project.addSchema(getClass().getResource("ExportTester-ddl.sql")); project.addExport(false /* disabled */); project.setTableAsExportOnly("A"); project.setTableAsExportOnly("B"); try { boolean success = project.compile("/tmp/exportsettingstest.jar"); assertTrue(success); final String catalogContents = VoltCompilerUtils.readFileFromJarfile("/tmp/exportsettingstest.jar", "catalog.txt"); final Catalog cat = new Catalog(); cat.execute(catalogContents); Connector connector = cat.getClusters().get("cluster").getDatabases(). get("database").getConnectors().get("0"); assertFalse(connector.getEnabled()); } finally { final File jar = new File("/tmp/exportsettingstest.jar"); jar.delete(); } } // test that Export configuration is insensitive to the case of the table name public void testExportTableCase() throws IOException { final VoltProjectBuilder project = new VoltProjectBuilder(); project.addSchema(TestVoltCompiler.class.getResource("ExportTester-ddl.sql")); project.addStmtProcedure("Dummy", "insert into a values (?, ?, ?);", "a.a_id: 0"); project.addPartitionInfo("A", "A_ID"); project.addPartitionInfo("B", "B_ID"); project.addPartitionInfo("e", "e_id"); project.addPartitionInfo("f", "f_id"); project.addExport(true /* enabled */); project.setTableAsExportOnly("A"); // uppercase DDL, uppercase export project.setTableAsExportOnly("b"); // uppercase DDL, lowercase export project.setTableAsExportOnly("E"); // lowercase DDL, uppercase export project.setTableAsExportOnly("f"); // lowercase DDL, lowercase export try { assertTrue(project.compile("/tmp/exportsettingstest.jar")); final String catalogContents = VoltCompilerUtils.readFileFromJarfile("/tmp/exportsettingstest.jar", "catalog.txt"); final Catalog cat = new Catalog(); cat.execute(catalogContents); CatalogUtil.compileDeployment(cat, project.getPathToDeployment(), true, false); Connector connector = cat.getClusters().get("cluster").getDatabases(). get("database").getConnectors().get("0"); assertTrue(connector.getEnabled()); // Assert that all tables exist in the connector section of catalog assertNotNull(connector.getTableinfo().getIgnoreCase("a")); assertNotNull(connector.getTableinfo().getIgnoreCase("b")); assertNotNull(connector.getTableinfo().getIgnoreCase("e")); assertNotNull(connector.getTableinfo().getIgnoreCase("f")); } finally { final File jar = new File("/tmp/exportsettingstest.jar"); jar.delete(); } } // test that the source table for a view is not export only public void testViewSourceNotExportOnly() throws IOException { final VoltProjectBuilder project = new VoltProjectBuilder(); project.addSchema(TestVoltCompiler.class.getResource("ExportTesterWithView-ddl.sql")); project.addStmtProcedure("Dummy", "select * from v_table1r_el_only"); project.addExport(true /* enabled */); project.setTableAsExportOnly("table1r_el_only"); try { assertFalse(project.compile("/tmp/exporttestview.jar")); } finally { final File jar = new File("/tmp/exporttestview.jar"); jar.delete(); } } // test that a view is not export only public void testViewNotExportOnly() throws IOException { final VoltProjectBuilder project = new VoltProjectBuilder(); project.addSchema(TestVoltCompiler.class.getResource("ExportTesterWithView-ddl.sql")); project.addStmtProcedure("Dummy", "select * from table1r_el_only"); project.addExport(true /* enabled */); project.setTableAsExportOnly("v_table1r_el_only"); try { assertFalse(project.compile("/tmp/exporttestview.jar")); } finally { final File jar = new File("/tmp/exporttestview.jar"); jar.delete(); } } public void testBadPath() { final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML("invalidnonsense", nothing_jar); assertFalse(success); } public void testXSDSchemaOrdering() throws IOException { final File schemaFile = VoltProjectBuilder.writeStringToTempFile("create table T(ID INTEGER);"); final String schemaPath = schemaFile.getPath(); final String project = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database>" + "<schemas>" + "<schema path='" + schemaPath + "'/>" + "</schemas>" + "<procedures>" + "<procedure class='proc'><sql>select * from T</sql></procedure>" + "</procedures>" + "</database>" + "</project>"; final File xmlFile = VoltProjectBuilder.writeStringToTempFile(project); final String projectPath = xmlFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); boolean success = compiler.compileWithProjectXML(projectPath, nothing_jar); assertTrue(success); } public void testXMLFileWithDeprecatedElements() { final File schemaFile = VoltProjectBuilder.writeStringToTempFile("create table T(ID INTEGER);"); final String schemaPath = schemaFile.getPath(); final String project = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database>" + "<schemas>" + "<schema path='" + schemaPath + "'/>" + "</schemas>" + "<procedures>" + "<procedure class='proc'><sql>select * from T</sql></procedure>" + "</procedures>" + "</database>" + "<security enabled='true'/>" + "</project>"; final File xmlFile = VoltProjectBuilder.writeStringToTempFile(project); final String path = xmlFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); boolean success = compiler.compileWithProjectXML(path, nothing_jar); assertFalse(success); assertTrue( isFeedbackPresent("Found deprecated XML element \"security\"", compiler.m_errors) ); } public void testXMLFileWithInvalidSchemaReference() { final String simpleXML = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='my schema file.sql' /></schemas>" + "<procedures><procedure class='procedures/procs.jar' /></procedures>" + "</database>" + "</project>"; final File xmlFile = VoltProjectBuilder.writeStringToTempFile(simpleXML); final String projectPath = xmlFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, nothing_jar); assertFalse(success); } public void testXMLFileWithSchemaError() { final File schemaFile = VoltProjectBuilder.writeStringToTempFile("create table T(ID INTEGER);"); final String simpleXML = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='baddbname'>" + "<schemas>" + "<schema path='" + schemaFile.getAbsolutePath() + "'/>" + "</schemas>" + // invalid project file: no procedures // "<procedures>" + // "<procedure class='proc'><sql>select * from T</sql></procedure>" + //"</procedures>" + "</database>" + "</project>"; final File xmlFile = VoltProjectBuilder.writeStringToTempFile(simpleXML); final String projectPath = xmlFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, nothing_jar); assertFalse(success); } public void testXMLFileWithWrongDBName() { final File schemaFile = VoltProjectBuilder.writeStringToTempFile("create table T(ID INTEGER);"); final String simpleXML = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='baddbname'>" + "<schemas>" + "<schema path='" + schemaFile.getAbsolutePath() + "'/>" + "</schemas>" + "<procedures>" + "<procedure class='proc'><sql>select * from T</sql></procedure>" + "</procedures>" + "</database>" + "</project>"; final File xmlFile = VoltProjectBuilder.writeStringToTempFile(simpleXML); final String projectPath = xmlFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, nothing_jar); assertFalse(success); } public void testXMLFileWithDefaultDBName() { final File schemaFile = VoltProjectBuilder.writeStringToTempFile("create table T(ID INTEGER);"); final String simpleXML = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database>" + "<schemas>" + "<schema path='" + schemaFile.getAbsolutePath() + "'/>" + "</schemas>" + "<procedures>" + "<procedure class='proc'><sql>select * from T</sql></procedure>" + "</procedures>" + "</database>" + "</project>"; final File xmlFile = VoltProjectBuilder.writeStringToTempFile(simpleXML); final String path = xmlFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(path, nothing_jar); assertTrue(success); assertTrue(compiler.m_catalog.getClusters().get("cluster").getDatabases().get("database") != null); } public void testBadClusterConfig() throws IOException { // check no hosts ClusterConfig cluster_config = new ClusterConfig(0, 1, 0); assertFalse(cluster_config.validate()); // check no sites-per-hosts cluster_config = new ClusterConfig(1, 0, 0); assertFalse(cluster_config.validate()); } public void testXMLFileWithDDL() throws IOException { final String simpleSchema1 = "create table books (cash integer default 23 NOT NULL, title varchar(3) default 'foo', PRIMARY KEY(cash)); " + "PARTITION TABLE books ON COLUMN cash;"; // newline inserted to test catalog friendliness final String simpleSchema2 = "create table books2\n (cash integer default 23 NOT NULL, title varchar(3) default 'foo', PRIMARY KEY(cash));"; final File schemaFile1 = VoltProjectBuilder.writeStringToTempFile(simpleSchema1); final String schemaPath1 = schemaFile1.getPath(); final File schemaFile2 = VoltProjectBuilder.writeStringToTempFile(simpleSchema2); final String schemaPath2 = schemaFile2.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<!-- xml comment check -->" + "<database name='database'>" + "<!-- xml comment check -->" + "<schemas>" + "<!-- xml comment check -->" + "<schema path='" + schemaPath1 + "' />" + "<schema path='" + schemaPath2 + "' />" + "<!-- xml comment check -->" + "</schemas>" + "<!-- xml comment check -->" + "<procedures>" + "<!-- xml comment check -->" + "<procedure class='org.voltdb.compiler.procedures.AddBook' />" + "<procedure class='Foo'>" + "<sql>select * from books;</sql>" + "</procedure>" + "</procedures>" + "<!-- xml comment check -->" + "</database>" + "<!-- xml comment check -->" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertTrue(success); final Catalog c1 = compiler.getCatalog(); final String catalogContents = VoltCompilerUtils.readFileFromJarfile(testout_jar, "catalog.txt"); final Catalog c2 = new Catalog(); c2.execute(catalogContents); assertTrue(c2.serialize().equals(c1.serialize())); } public void testProcWithBoxedParam() throws IOException { final String simpleSchema = "create table books (cash integer default 23, title varchar(3) default 'foo', PRIMARY KEY(cash));"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas>" + "<schema path='" + schemaPath + "' />" + "</schemas>" + "<procedures>" + "<procedure class='org.voltdb.compiler.procedures.AddBookBoxed' />" + "</procedures>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertFalse(success); } public void testDDLWithNoLengthString() throws IOException { // DO NOT COPY PASTE THIS INVALID EXAMPLE! final String simpleSchema1 = "create table books (cash integer default 23, title varchar default 'foo', PRIMARY KEY(cash));"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema1); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas>" + "<schema path='" + schemaPath + "' />" + "</schemas>" + "<procedures>" + "<procedure class='org.voltdb.compiler.procedures.AddBook' />" + "<procedure class='Foo'>" + "<sql>select * from books;</sql>" + "</procedure>" + "</procedures>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertFalse(success); } public void testDDLWithLongStringInCharacters() throws IOException { int length = VoltType.MAX_VALUE_LENGTH_IN_CHARACTERS + 10; final String simpleSchema1 = "create table books (cash integer default 23, " + "title varchar("+length+") default 'foo', PRIMARY KEY(cash));"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema1); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas>" + "<schema path='" + schemaPath + "' />" + "</schemas>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertTrue(success); // Check warnings assertEquals(1, compiler.m_warnings.size()); String warningMsg = compiler.m_warnings.get(0).getMessage(); String expectedMsg = "The size of VARCHAR column TITLE in table BOOKS greater than " + "262144 will be enforced as byte counts rather than UTF8 character counts. " + "To eliminate this warning, specify \"VARCHAR(262154 BYTES)\""; assertEquals(expectedMsg, warningMsg); Database db = compiler.getCatalog().getClusters().get("cluster").getDatabases().get("database"); Column var = db.getTables().get("BOOKS").getColumns().get("TITLE"); assertTrue(var.getInbytes()); } public void testDDLWithTooLongVarbinaryVarchar() throws IOException { int length = VoltType.MAX_VALUE_LENGTH + 10; String simpleSchema1 = "create table books (cash integer default 23, " + "title varbinary("+length+") , PRIMARY KEY(cash));"; String error1 = "VARBINARY column size for column BOOKS.TITLE is > " + VoltType.MAX_VALUE_LENGTH+" char maximum."; checkDDLErrorMessage(simpleSchema1, error1); String simpleSchema2 = "create table books (cash integer default 23, " + "title varchar("+length+") , PRIMARY KEY(cash));"; String error2 = "VARCHAR column size for column BOOKS.TITLE is > " + VoltType.MAX_VALUE_LENGTH+" char maximum."; checkDDLErrorMessage(simpleSchema2, error2); } public void testNullablePartitionColumn() throws IOException { final String simpleSchema = "create table books (cash integer default 23, title varchar(3) default 'foo', PRIMARY KEY(cash));" + "partition table books on column cash;"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures><procedure class='org.voltdb.compiler.procedures.AddBook'/></procedures>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertFalse(success); boolean found = false; for (final VoltCompiler.Feedback fb : compiler.m_errors) { if (fb.message.indexOf("Partition column") > 0) found = true; } assertTrue(found); } public void testXMLFileWithBadDDL() throws IOException { final String simpleSchema = "create table books (id integer default 0, strval varchar(33000) default '', PRIMARY KEY(id));"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures><procedure class='org.voltdb.compiler.procedures.AddBook' /></procedures>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertFalse(success); } // NOTE: TPCCTest proc also tests whitespaces regressions in SQL literals public void testWithTPCCDDL() { String schemaPath = ""; try { final URL url = TPCCProjectBuilder.class.getResource("tpcc-ddl.sql"); schemaPath = URLDecoder.decode(url.getPath(), "UTF-8"); } catch (final UnsupportedEncodingException e) { e.printStackTrace(); System.exit(-1); } final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures><procedure class='org.voltdb.compiler.procedures.TPCCTestProc' /></procedures>" + "</database>" + "</project>"; //System.out.println(simpleProject); final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertTrue(success); } public void testSeparateCatalogCompilation() throws IOException { String schemaPath = ""; try { final URL url = TPCCProjectBuilder.class.getResource("tpcc-ddl.sql"); schemaPath = URLDecoder.decode(url.getPath(), "UTF-8"); } catch (final UnsupportedEncodingException e) { e.printStackTrace(); System.exit(-1); } final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures><procedure class='org.voltdb.compiler.procedures.TPCCTestProc' /></procedures>" + "</database>" + "</project>"; //System.out.println(simpleProject); final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler1 = new VoltCompiler(); final VoltCompiler compiler2 = new VoltCompiler(); final Catalog catalog = compileCatalogFromProject(compiler1, projectPath); final String cat1 = catalog.serialize(); final boolean success = compiler2.compileWithProjectXML(projectPath, testout_jar); final String cat2 = VoltCompilerUtils.readFileFromJarfile(testout_jar, "catalog.txt"); assertTrue(success); assertTrue(cat1.compareTo(cat2) == 0); } private Catalog compileCatalogFromProject( final VoltCompiler compiler, final String projectPath) { try { return compiler.compileCatalogFromProject(projectPath); } catch (VoltCompilerException e) { e.printStackTrace(); fail(); return null; } } private boolean compileFromDDL( final VoltCompiler compiler, final String jarPath, final String... schemaPaths) { try { return compiler.compileFromDDL(jarPath, schemaPaths); } catch (VoltCompilerException e) { e.printStackTrace(); fail(); return false; } } public void testDDLTableTooManyColumns() throws IOException { String schemaPath = ""; try { final URL url = TestVoltCompiler.class.getResource("toowidetable-ddl.sql"); schemaPath = URLDecoder.decode(url.getPath(), "UTF-8"); } catch (final UnsupportedEncodingException e) { e.printStackTrace(); System.exit(-1); } final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures><procedure class='org.voltdb.compiler.procedures.TPCCTestProc' /></procedures>" + "</database>" + "</project>"; //System.out.println(simpleProject); final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertFalse(success); boolean found = false; for (final VoltCompiler.Feedback fb : compiler.m_errors) { if (fb.message.startsWith("Table MANY_COLUMNS has")) found = true; } assertTrue(found); } public void testExtraFilesExist() throws IOException { String schemaPath = ""; try { final URL url = TPCCProjectBuilder.class.getResource("tpcc-ddl.sql"); schemaPath = URLDecoder.decode(url.getPath(), "UTF-8"); } catch (final UnsupportedEncodingException e) { e.printStackTrace(); System.exit(-1); } final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures><procedure class='org.voltdb.compiler.procedures.TPCCTestProc' /></procedures>" + "</database>" + "</project>"; //System.out.println(simpleProject); final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertTrue(success); final String sql = VoltCompilerUtils.readFileFromJarfile(testout_jar, VoltCompiler.AUTOGEN_DDL_FILE_NAME); assertNotNull(sql); } public void testXMLFileWithELEnabled() throws IOException { final String simpleSchema = "create table books (cash integer default 23 NOT NULL, title varchar(3) default 'foo');"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + " <database name='database'>" + " <partitions><partition table='books' column='cash'/></partitions> " + " <schemas><schema path='" + schemaPath + "' /></schemas>" + " <procedures><procedure class='org.voltdb.compiler.procedures.AddBook' /></procedures>" + " <export>" + " <tables><table name='books'/></tables>" + " </export>" + " </database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertTrue(success); final Catalog c1 = compiler.getCatalog(); //System.out.println("PRINTING Catalog 1"); //System.out.println(c1.serialize()); final String catalogContents = VoltCompilerUtils.readFileFromJarfile(testout_jar, "catalog.txt"); final Catalog c2 = new Catalog(); c2.execute(catalogContents); assertTrue(c2.serialize().equals(c1.serialize())); } public void testOverrideProcInfo() throws IOException { final String simpleSchema = "create table books (cash integer default 23 not null, title varchar(3) default 'foo', PRIMARY KEY(cash));" + "PARTITION TABLE books ON COLUMN cash;"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures><procedure class='org.voltdb.compiler.procedures.AddBook' /></procedures>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final ProcInfoData info = new ProcInfoData(); info.singlePartition = true; info.partitionInfo = "BOOKS.CASH: 0"; final Map<String, ProcInfoData> overrideMap = new HashMap<String, ProcInfoData>(); overrideMap.put("AddBook", info); final VoltCompiler compiler = new VoltCompiler(); compiler.setProcInfoOverrides(overrideMap); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertTrue(success); final String catalogContents = VoltCompilerUtils.readFileFromJarfile(testout_jar, "catalog.txt"); final Catalog c2 = new Catalog(); c2.execute(catalogContents); final Database db = c2.getClusters().get("cluster").getDatabases().get("database"); final Procedure addBook = db.getProcedures().get("AddBook"); assertEquals(true, addBook.getSinglepartition()); } public void testOverrideNonAnnotatedProcInfo() throws IOException { final String simpleSchema = "create table books (cash integer default 23 not null, title varchar(3) default 'foo', PRIMARY KEY(cash));" + "PARTITION TABLE books ON COLUMN cash;" + "create procedure from class org.voltdb.compiler.procedures.AddBook;" + "partition procedure AddBook ON TABLE books COLUMN cash;"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures/>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final ProcInfoData info = new ProcInfoData(); info.singlePartition = true; info.partitionInfo = "BOOKS.CASH: 0"; final Map<String, ProcInfoData> overrideMap = new HashMap<String, ProcInfoData>(); overrideMap.put("AddBook", info); final VoltCompiler compiler = new VoltCompiler(); compiler.setProcInfoOverrides(overrideMap); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertTrue(success); final String catalogContents = VoltCompilerUtils.readFileFromJarfile(testout_jar, "catalog.txt"); final Catalog c2 = new Catalog(); c2.execute(catalogContents); final Database db = c2.getClusters().get("cluster").getDatabases().get("database"); final Procedure addBook = db.getProcedures().get("AddBook"); assertEquals(true, addBook.getSinglepartition()); } public void testBadStmtProcName() throws IOException { final String simpleSchema = "create table books (cash integer default 23 not null, title varchar(10) default 'foo', PRIMARY KEY(cash));"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures><procedure class='@Foo'><sql>select * from books;</sql></procedure></procedures>" + "<partitions><partition table='BOOKS' column='CASH' /></partitions>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertFalse(success); } public void testBadDdlStmtProcName() throws IOException { final String simpleSchema = "create table books (cash integer default 23 not null, title varchar(10) default 'foo', PRIMARY KEY(cash));" + "create procedure @Foo as select * from books;"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures/>" + "<partitions><partition table='BOOKS' column='CASH' /></partitions>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertFalse(success); } public void testGoodStmtProcName() throws IOException { final String simpleSchema = "create table books (cash integer default 23 not null, title varchar(3) default 'foo', PRIMARY KEY(cash));" + "PARTITION TABLE books ON COLUMN cash;"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures><procedure class='Foo'><sql>select * from books;</sql></procedure></procedures>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertTrue(success); } public void testGoodDdlStmtProcName() throws IOException { final String simpleSchema = "create table books (cash integer default 23 not null, title varchar(3) default 'foo', PRIMARY KEY(cash));" + "PARTITION TABLE books ON COLUMN cash;" + "CREATE PROCEDURE Foo AS select * from books where cash = ?;" + "PARTITION PROCEDURE Foo ON TABLE BOOKS COLUMN CASH PARAMETER 0;"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures/>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertTrue(success); } public void testMaterializedView() throws IOException { final String simpleSchema = "create table books (cash integer default 23 NOT NULL, title varchar(10) default 'foo', PRIMARY KEY(cash));\n" + "partition table books on column cash;\n" + "create view matt (title, cash, num, foo) as select title, cash, count(*), sum(cash) from books group by title, cash;"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures><procedure class='org.voltdb.compiler.procedures.AddBook' /></procedures>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); // final ClusterConfig cluster_config = new ClusterConfig(1, 1, 0, "localhost"); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertTrue(success); final Catalog c1 = compiler.getCatalog(); final String catalogContents = VoltCompilerUtils.readFileFromJarfile(testout_jar, "catalog.txt"); final Catalog c2 = new Catalog(); c2.execute(catalogContents); assertTrue(c2.serialize().equals(c1.serialize())); } public void testVarbinary() throws IOException { final String simpleSchema = "create table books (cash integer default 23 NOT NULL, title varbinary(10) default NULL, PRIMARY KEY(cash));" + "partition table books on column cash;"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures>" + "<procedure class='get'><sql>select * from books;</sql></procedure>" + "<procedure class='i1'><sql>insert into books values(5, 'AA');</sql></procedure>" + "<procedure class='i2'><sql>insert into books values(5, ?);</sql></procedure>" + "<procedure class='s1'><sql>update books set title = 'bb';</sql></procedure>" + "</procedures>" + //"<procedures><procedure class='org.voltdb.compiler.procedures.AddBook' /></procedures>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); // final ClusterConfig cluster_config = new ClusterConfig(1, 1, 0, "localhost"); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertTrue(success); final Catalog c1 = compiler.getCatalog(); final String catalogContents = VoltCompilerUtils.readFileFromJarfile(testout_jar, "catalog.txt"); final Catalog c2 = new Catalog(); c2.execute(catalogContents); assertTrue(c2.serialize().equals(c1.serialize())); } public void testDdlProcVarbinary() throws IOException { final String simpleSchema = "create table books (cash integer default 23 NOT NULL, title varbinary(10) default NULL, PRIMARY KEY(cash));" + "partition table books on column cash;" + "create procedure get as select * from books;" + "create procedure i1 as insert into books values(5, 'AA');" + "create procedure i2 as insert into books values(5, ?);" + "create procedure s1 as update books set title = 'bb';" + "create procedure i3 as insert into books values( ?, ?);" + "partition procedure i3 on table books column cash;" + "create procedure d1 as delete from books where title = ? and cash = ?;" + "partition procedure d1 on table books column cash parameter 1;"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures/>" + //"<procedures><procedure class='org.voltdb.compiler.procedures.AddBook' /></procedures>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); // final ClusterConfig cluster_config = new ClusterConfig(1, 1, 0, "localhost"); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertTrue(success); final Catalog c1 = compiler.getCatalog(); final String catalogContents = VoltCompilerUtils.readFileFromJarfile(testout_jar, "catalog.txt"); final Catalog c2 = new Catalog(); c2.execute(catalogContents); assertTrue(c2.serialize().equals(c1.serialize())); } // // There are DDL tests a number of places. TestDDLCompiler seems more about // verifying HSQL behaviour. Additionally, there are users of PlannerAideDeCamp // that verify plans for various DDL/SQL combinations. // // I'm going to add some DDL parsing validation tests here, as they seem to have // more to do with compiling a catalog.. and there are some related tests already // in this file. // private VoltCompiler compileForDDLTest(String schemaPath, boolean expectSuccess) { final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures><procedure class='sample'><sql>select * from t</sql></procedure></procedures>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); projectFile.deleteOnExit(); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertEquals(expectSuccess, success); return compiler; } private String getPathForSchema(String s) { final File schemaFile = VoltProjectBuilder.writeStringToTempFile(s); schemaFile.deleteOnExit(); return schemaFile.getPath(); } public void testDDLCompilerLeadingGarbage() throws IOException { final String s = "-- a valid comment\n" + "- an invalid comment\n" + "create table t(id integer);"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), false); assertTrue(c.hasErrors()); } public void testDDLCompilerLeadingWhitespace() throws IOException { final String s = " \n" + "\n" + "create table t(id integer);"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 1); } public void testDDLCompilerLeadingComment() throws IOException { final String s = "-- this is a leading comment\n" + " -- with some leading whitespace\n" + " create table t(id integer);"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 1); } public void testDDLCompilerLeadingCommentAndHashMarks() throws IOException { final String s = "-- ### this is a leading comment\n" + " -- with some ### leading whitespace\n" + " create table t(id integer);"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 1); } public void testDDLCompilerNoNewlines() throws IOException { final String s = "create table t(id integer); create table r(id integer);"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 2); } public void testDDLCompilerSplitLines() throws IOException { final String s = "create\n" + "table\n" + "t(id\n" + "integer);"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 1); } public void testDDLCompilerTrailingComment1() throws IOException { final String s = "create table t(id integer) -- this is a trailing comment\n" + "-- and a line full of comments\n" + ";\n"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 1); } public void testDDLCompilerTrailingComment2() throws IOException { final String s = "create table t(id integer) -- this is a trailing comment\n" + ";\n"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 1); } public void testDDLCompilerTrailingCommentAndHashMarks() throws IOException { final String s = "create table t(id varchar(128) default '###') -- ### this ###### is a trailing comment\n" + ";\n"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 1); } public void testDDLCompilerTrailingComment3() throws IOException { final String s = "create table t(id integer) -- this is a trailing comment\n" + "-- and a line full of comments\n" + ";"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 1); } public void testDDLCompilerTrailingComment4() throws IOException { final String s = "create table t(id integer) -- this is a trailing comment\n" + ";"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 1); } public void testDDLCompilerTrailingComment5() throws IOException { final String s = "create table t(id integer) -- this is a trailing comment\n" + "-- and a line full of comments\n" + " ;\n"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 1); } public void testDDLCompilerTrailingComment6() throws IOException { final String s = "create table t(id integer) -- this is a trailing comment\n" + "-- and a line full of comments\n" + " ;\n" + "-- ends with a comment\n"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 1); } public void testDDLCompilerInvalidStatement() throws IOException { final String s = "create table t for justice -- with a comment\n"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), false); assertTrue(c.hasErrors()); } public void testDDLCompilerCommentThatLooksLikeStatement() throws IOException { final String s = "create table t(id integer); -- create table r(id integer);"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 1); } public void testDDLCompilerLeadingSemicolon() throws IOException { final String s = "; create table t(id integer);"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), false); assertTrue(c.hasErrors()); } public void testDDLCompilerMultipleStatementsOnMultipleLines() throws IOException { final String s = "create table t(id integer); create\n" + "table r(id integer); -- second table"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 2); } public void testDDLCompilerStringLiteral() throws IOException { final String s = "create table t(id varchar(3) default 'abc');"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 1); Table tbl = c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().getIgnoreCase("t"); String defaultvalue = tbl.getColumns().getIgnoreCase("id").getDefaultvalue(); assertTrue(defaultvalue.equalsIgnoreCase("abc")); } public void testDDLCompilerSemiColonInStringLiteral() throws IOException { final String s = "create table t(id varchar(5) default 'a;bc');"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 1); Table tbl = c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().getIgnoreCase("t"); String defaultvalue = tbl.getColumns().getIgnoreCase("id").getDefaultvalue(); assertTrue(defaultvalue.equalsIgnoreCase("a;bc")); } public void testDDLCompilerDashDashInStringLiteral() throws IOException { final String s = "create table t(id varchar(5) default 'a--bc');"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 1); Table tbl = c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().getIgnoreCase("t"); String defaultvalue = tbl.getColumns().getIgnoreCase("id").getDefaultvalue(); assertTrue(defaultvalue.equalsIgnoreCase("a--bc")); } public void testDDLCompilerNewlineInStringLiteral() throws IOException { final String s = "create table t(id varchar(5) default 'a\n" + "bc');"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 1); Table tbl = c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().getIgnoreCase("t"); String defaultvalue = tbl.getColumns().getIgnoreCase("id").getDefaultvalue(); // In the debugger, this looks valid at parse time but is mangled somewhere // later, perhaps in HSQL or in the catalog assembly? // ENG-681 System.out.println(defaultvalue); // assertTrue(defaultvalue.equalsIgnoreCase("a\nbc")); } public void testDDLCompilerEscapedStringLiterals() throws IOException { final String s = "create table t(id varchar(10) default 'a''b''''c');"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().size() == 1); Table tbl = c.m_catalog.getClusters().get("cluster").getDatabases().get("database").getTables().getIgnoreCase("t"); String defaultvalue = tbl.getColumns().getIgnoreCase("id").getDefaultvalue(); assertTrue(defaultvalue.equalsIgnoreCase("a'b''c")); } // Test that DDLCompiler's index creation adheres to the rules implicit in // the EE's tableindexfactory. Currently (10/3/2010) these are: // All column types can be used in a tree array. Only int types can // be used in hash tables or array indexes String[] column_types = {"tinyint", "smallint", "integer", "bigint", "float", "varchar(10)", "timestamp", "decimal"}; IndexType[] default_index_types = {IndexType.BALANCED_TREE, IndexType.BALANCED_TREE, IndexType.BALANCED_TREE, IndexType.BALANCED_TREE, IndexType.BALANCED_TREE, IndexType.BALANCED_TREE, IndexType.BALANCED_TREE, IndexType.BALANCED_TREE}; boolean[] can_be_hash = {true, true, true, true, false, false, true, false}; boolean[] can_be_tree = {true, true, true, true, true, true, true, true}; public void testDDLCompilerIndexDefaultTypes() { for (int i = 0; i < column_types.length; i++) { String s = "create table t(id " + column_types[i] + " not null, num integer not null);\n" + "create index idx_t_id on t(id);\n" + "create index idx_t_idnum on t(id,num);"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); Database d = c.m_catalog.getClusters().get("cluster").getDatabases().get("database"); assertEquals(default_index_types[i].getValue(), d.getTables().getIgnoreCase("t").getIndexes().getIgnoreCase("idx_t_id").getType()); assertEquals(default_index_types[i].getValue(), d.getTables().getIgnoreCase("t").getIndexes().getIgnoreCase("idx_t_idnum").getType()); } } public void testDDLCompilerHashIndexAllowed() { for (int i = 0; i < column_types.length; i++) { final String s = "create table t(id " + column_types[i] + " not null, num integer not null);\n" + "create index idx_t_id_hash on t(id);\n" + "create index idx_t_idnum_hash on t(id,num);"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), can_be_hash[i]); if (can_be_hash[i]) { // do appropriate index exists checks assertFalse(c.hasErrors()); Database d = c.m_catalog.getClusters().get("cluster").getDatabases().get("database"); assertEquals(IndexType.HASH_TABLE.getValue(), d.getTables().getIgnoreCase("t").getIndexes().getIgnoreCase("idx_t_id_hash").getType()); assertEquals(IndexType.HASH_TABLE.getValue(), d.getTables().getIgnoreCase("t").getIndexes().getIgnoreCase("idx_t_idnum_hash").getType()); } else { assertTrue(c.hasErrors()); } } } public void testUniqueIndexAllowed() { final String s = "create table t(id integer not null, num integer not null);\n" + "create unique index idx_t_unique on t(id,num);\n" + "create index idx_t on t(num);"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); Database d = c.m_catalog.getClusters().get("cluster").getDatabases().get("database"); assertTrue(d.getTables().getIgnoreCase("t").getIndexes().getIgnoreCase("idx_t_unique").getUnique()); assertFalse(d.getTables().getIgnoreCase("t").getIndexes().getIgnoreCase("idx_t").getUnique()); // also validate that simple column indexes don't trigger the generalized expression index handling String noExpressionFound = ""; assertEquals(noExpressionFound, d.getTables().getIgnoreCase("t").getIndexes().getIgnoreCase("idx_t_unique").getExpressionsjson()); assertEquals(noExpressionFound, d.getTables().getIgnoreCase("t").getIndexes().getIgnoreCase("idx_t").getExpressionsjson()); } public void testFunctionIndexAllowed() { final String s = "create table t(id integer not null, num integer not null);\n" + "create unique index idx_ft_unique on t(abs(id+num));\n" + "create index idx_ft on t(abs(num));"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); Database d = c.m_catalog.getClusters().get("cluster").getDatabases().get("database"); assertTrue(d.getTables().getIgnoreCase("t").getIndexes().getIgnoreCase("idx_ft_unique").getUnique()); assertFalse(d.getTables().getIgnoreCase("t").getIndexes().getIgnoreCase("idx_ft").getUnique()); // Validate that general expression indexes get properly annotated with an expressionjson attribute String noExpressionFound = ""; assertNotSame(noExpressionFound, d.getTables().getIgnoreCase("t").getIndexes().getIgnoreCase("idx_ft_unique").getExpressionsjson()); assertNotSame(noExpressionFound, d.getTables().getIgnoreCase("t").getIndexes().getIgnoreCase("idx_ft").getExpressionsjson()); } public void testDDLCompilerVarcharTreeIndexAllowed() { for (int i = 0; i < column_types.length; i++) { final String s = "create table t(id " + column_types[i] + " not null, num integer not null);\n" + "create index idx_t_id_tree on t(id);\n" + "create index idx_t_idnum_tree on t(id,num);"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), can_be_tree[i]); assertFalse(c.hasErrors()); Database d = c.m_catalog.getClusters().get("cluster").getDatabases().get("database"); assertEquals(IndexType.BALANCED_TREE.getValue(), d.getTables().getIgnoreCase("t").getIndexes().getIgnoreCase("idx_t_id_tree").getType()); assertEquals(IndexType.BALANCED_TREE.getValue(), d.getTables().getIgnoreCase("t").getIndexes().getIgnoreCase("idx_t_idnum_tree").getType()); } } public void testDDLCompilerTwoIdenticalIndexes() { final String s = "create table t(id integer not null, num integer not null);\n" + "create index idx_t_idnum1 on t(id,num);\n" + "create index idx_t_idnum2 on t(id,num);"; VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertFalse(c.hasErrors()); assertTrue(c.hasErrorsOrWarnings()); } public void testDDLCompilerSameNameIndexesOnTwoTables() { final String s = "create table t1(id integer not null, num integer not null);\n" + "create table t2(id integer not null, num integer not null);\n" + "create index idx_t_idnum on t1(id,num);\n" + "create index idx_t_idnum on t2(id,num);"; // if this test ever fails, it's worth figuring out why // When written, HSQL wouldn't allow two indexes with the same name, // even across tables. compileForDDLTest(getPathForSchema(s), false); } public void testDDLCompilerTwoCoveringIndexes() { final String s = "create table t(id integer not null, num integer not null);\n" + "create index idx_t_idnum_hash on t(id,num);\n" + "create index idx_t_idnum_tree on t(id,num);"; compileForDDLTest(getPathForSchema(s), true); } public void testDDLCompilerTwoSwappedOrderIndexes() { final String s = "create table t(id integer not null, num integer not null);\n" + "create index idx_t_idnum_a on t(num,id);\n" + "create index idx_t_idnum_b on t(id,num);"; final VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertEquals(false, c.hasErrorsOrWarnings()); } public void testDDLCompilerDropOneOfThreeIndexes() { final String s = "create table t(id integer not null, num integer not null);\n" + "create index idx_t_idnum_a on t(num,id);\n" + "create index idx_t_idnum_b on t(id,num);\n" + "create index idx_t_idnum_c on t(id,num);\n"; final VoltCompiler c = compileForDDLTest(getPathForSchema(s), true); assertEquals(true, c.hasErrorsOrWarnings()); int foundCount = 0; for (VoltCompiler.Feedback f : c.m_warnings) { if (f.message.contains("Dropping index")) { foundCount++; } } assertEquals(1, foundCount); } public void testDDLCompilerUniqueAndNonUniqueIndexOnSameColumns() { final String s = "create table t(id integer not null, num integer not null);\n" + "create unique index idx_t_idnum_unique on t(id,num);\n" + "create index idx_t_idnum on t(id,num);"; compileForDDLTest(getPathForSchema(s), true); } public void testDDLCompilerTwoIndexesWithSameName() { final String s = "create table t(id integer not null, num integer not null);\n" + "create index idx_t_idnum on t(id);\n" + "create index idx_t_idnum on t(id,num);"; compileForDDLTest(getPathForSchema(s), false); } public void testDDLCompilerIndexesOrMatViewContainSQLFunctionNOW() { // Test indexes. String ddl = ""; String errorIndexMsg = "Index IDX_T_TM cannot include the function NOW or CURRENT_TIMESTAMP."; ddl = "create table t(id integer not null, tm timestamp);\n" + "create index idx_t_tm on t(since_epoch(second, CURRENT_TIMESTAMP) - since_epoch(second, tm));"; checkDDLErrorMessage(ddl, errorIndexMsg); ddl = "create table t(id integer not null, tm timestamp);\n" + "create index idx_t_tm on t(since_epoch(second, NOW) - since_epoch(second, tm));"; checkDDLErrorMessage(ddl, errorIndexMsg); ddl = "create table t(id integer not null, tm timestamp);\n" + "create index idx_t_tm on t(CURRENT_TIMESTAMP);"; checkDDLErrorMessage(ddl, errorIndexMsg); // Test MatView. String errorMatviewMsg = "Materialized view \"MY_VIEW\" cannot include the function NOW or CURRENT_TIMESTAMP."; ddl = "create table t(id integer not null, tm timestamp);\n" + "create view my_view as select since_epoch(second, CURRENT_TIMESTAMP) - since_epoch(second, tm), " + "count(*) from t group by since_epoch(second, CURRENT_TIMESTAMP) - since_epoch(second, tm);"; checkDDLErrorMessage(ddl, errorMatviewMsg); ddl = "create table t(id integer not null, tm timestamp);\n" + "create view my_view as select since_epoch(second, NOW) - since_epoch(second, tm), " + "count(*) from t group by since_epoch(second, NOW) - since_epoch(second, tm);"; checkDDLErrorMessage(ddl, errorMatviewMsg); ddl = "create table t(id integer not null, tm timestamp);\n" + "create view my_view as select tm, count(*), count(CURRENT_TIMESTAMP) from t group by tm;"; checkDDLErrorMessage(ddl, errorMatviewMsg); ddl = "create table t(id integer not null, tm timestamp);\n" + "create view my_view as select tm, count(*), count(NOW) from t group by tm;"; checkDDLErrorMessage(ddl, errorMatviewMsg); ddl = "create table t(id integer not null, tm timestamp);\n" + "create view my_view as select tm, count(*) from t " + "where since_epoch(second, CURRENT_TIMESTAMP) - since_epoch(second, tm) > 60 " + "group by tm;"; checkDDLErrorMessage(ddl, errorMatviewMsg); } private static final String msgP = "does not include the partitioning column"; private static final String msgPR = "ASSUMEUNIQUE is not valid for an index that includes the partitioning column. " + "Please use UNIQUE instead"; private static final String msgR = "ASSUMEUNIQUE is not valid for replicated tables. " + "Please use UNIQUE instead"; public void testColumnUniqueGiveException() { String schema; // (1) ****** Replicate tables // A unique index on the non-primary key for replicated table gets no error. schema = "create table t0 (id bigint not null, name varchar(32) not null UNIQUE, age integer, primary key (id));\n"; checkValidUniqueAndAssumeUnique(schema, null, msgR); // Similar to above, but use a different way to define unique column. schema = "create table t0 (id bigint not null, name varchar(32) not null, age integer, " + "primary key (id), UNIQUE (name) );\n"; checkValidUniqueAndAssumeUnique(schema, null, msgR); // (2) ****** Partition Table: UNIQUE valid, ASSUMEUNIQUE not valid // A unique index on the partitioning key ( no primary key) gets no error. schema = "create table t0 (id bigint not null UNIQUE, name varchar(32) not null, age integer);\n" + "PARTITION TABLE t0 ON COLUMN id;\n"; checkValidUniqueAndAssumeUnique(schema, null, msgPR); // Similar to above, but use a different way to define unique column. schema = "create table t0 (id bigint not null, name varchar(32) not null, age integer, " + "primary key (id), UNIQUE(id) );\n" + "PARTITION TABLE t0 ON COLUMN id;\n"; checkValidUniqueAndAssumeUnique(schema, null, msgPR); // A unique index on the partitioning key ( also primary key) gets no error. schema = "create table t0 (id bigint not null UNIQUE, name varchar(32) not null, age integer, primary key (id));\n" + "PARTITION TABLE t0 ON COLUMN id;\n"; checkValidUniqueAndAssumeUnique(schema, null, msgPR); // A unique compound index on the partitioning key and another column gets no error. schema = "create table t0 (id bigint not null, name varchar(32) not null, age integer, " + "UNIQUE (id, age), primary key (id));\n" + "PARTITION TABLE t0 ON COLUMN id;\n"; checkValidUniqueAndAssumeUnique(schema, null, msgPR); // A unique index on the partitioning key and an expression like abs(age) gets no error. schema = "create table t0 (id bigint not null, name varchar(32) not null, age integer, " + "primary key (id), UNIQUE (id, abs(age)) );\n" + "PARTITION TABLE t0 ON COLUMN id;\n"; checkValidUniqueAndAssumeUnique(schema, null, msgPR); // (3) ****** Partition Table: UNIQUE not valid // A unique index on the partitioning key ( non-primary key) gets one error. schema = "create table t0 (id bigint not null, name varchar(32) not null UNIQUE, age integer, primary key (id));\n" + "PARTITION TABLE t0 ON COLUMN name;\n"; checkValidUniqueAndAssumeUnique(schema, msgP, msgPR); // A unique index on the partitioning key ( no primary key) gets one error. schema = "create table t0 (id bigint not null, name varchar(32) not null UNIQUE, age integer);\n" + "PARTITION TABLE t0 ON COLUMN id;\n"; checkValidUniqueAndAssumeUnique(schema, msgP, null); // A unique index on the non-partitioning key gets one error. schema = "create table t0 (id bigint not null, name varchar(32) UNIQUE, age integer, primary key (id));\n" + "PARTITION TABLE t0 ON COLUMN id;\n"; checkValidUniqueAndAssumeUnique(schema, msgP, null); // A unique index on an unrelated expression like abs(age) gets a error. schema = "create table t0 (id bigint not null, name varchar(32), age integer, UNIQUE (abs(age)), primary key (id));\n" + "PARTITION TABLE t0 ON COLUMN id;\n"; checkValidUniqueAndAssumeUnique(schema, msgP, null); // A unique index on an expression of the partitioning key like substr(1, 2, name) gets two errors. schema = "create table t0 (id bigint not null, name varchar(32) not null, age integer, " + "primary key (id), UNIQUE (substr(name, 1, 2 )) );\n" + "PARTITION TABLE t0 ON COLUMN name;\n"; // 1) unique index, 2) primary key checkValidUniqueAndAssumeUnique(schema, msgP, msgP); // A unique index on the non-partitioning key, non-partitioned column gets two errors. schema = "create table t0 (id bigint not null, name varchar(32) not null, age integer UNIQUE, primary key (id));\n" + "PARTITION TABLE t0 ON COLUMN name;\n"; // 1) unique index, 2) primary key checkValidUniqueAndAssumeUnique(schema, msgP, msgP); } private void checkDDLErrorMessage(String ddl, String errorMsg) { final File schemaFile = VoltProjectBuilder.writeStringToTempFile(ddl); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas>" + "<schema path='" + schemaPath + "' />" + "</schemas>" + "<procedures/>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); boolean expectSuccess = errorMsg == null ? true : false; assertEquals(expectSuccess, success); if (!expectSuccess) { assertTrue(isFeedbackPresent(errorMsg, compiler.m_errors)); } } private void checkValidUniqueAndAssumeUnique(String ddl, String errorUnique, String errorAssumeUnique) { checkDDLErrorMessage(ddl, errorUnique); checkDDLErrorMessage(ddl.replace("UNIQUE", "ASSUMEUNIQUE"), errorAssumeUnique); } public void testUniqueIndexGiveException() { String schema; // (1) ****** Replicate tables // A unique index on the non-primary key for replicated table gets no error. schema = "create table t0 (id bigint not null, name varchar(32) not null, age integer, primary key (id));\n" + "CREATE UNIQUE INDEX user_index0 ON t0 (name) ;"; checkValidUniqueAndAssumeUnique(schema, null, msgR); // (2) ****** Partition Table: UNIQUE valid, ASSUMEUNIQUE not valid // A unique index on the partitioning key ( no primary key) gets no error. schema = "create table t0 (id bigint not null, name varchar(32) not null, age integer);\n" + "PARTITION TABLE t0 ON COLUMN id;\n" + "CREATE UNIQUE INDEX user_index1 ON t0 (id) ;"; checkValidUniqueAndAssumeUnique(schema, null, msgPR); // A unique index on the partitioning key ( also primary key) gets no error. schema = "create table t0 (id bigint not null, name varchar(32) not null, age integer, primary key (id));\n" + "PARTITION TABLE t0 ON COLUMN id;\n" + "CREATE UNIQUE INDEX user_index2 ON t0 (id) ;"; checkValidUniqueAndAssumeUnique(schema, null, msgPR); // A unique compound index on the partitioning key and another column gets no error. schema = "create table t0 (id bigint not null, name varchar(32) not null, age integer, primary key (id));\n" + "PARTITION TABLE t0 ON COLUMN id;\n" + "CREATE UNIQUE INDEX user_index3 ON t0 (id, age) ;"; checkValidUniqueAndAssumeUnique(schema, null, msgPR); // A unique index on the partitioning key and an expression like abs(age) gets no error. schema = "create table t0 (id bigint not null, name varchar(32) not null, age integer, primary key (id));\n" + "PARTITION TABLE t0 ON COLUMN id;\n" + "CREATE UNIQUE INDEX user_index4 ON t0 (id, abs(age)) ;"; checkValidUniqueAndAssumeUnique(schema, null, msgPR); // (3) ****** Partition Table: UNIQUE not valid // A unique index on the partitioning key ( no primary key) gets one error. schema = "create table t0 (id bigint not null, name varchar(32) not null, age integer);\n" + "PARTITION TABLE t0 ON COLUMN id;\n" + "CREATE UNIQUE INDEX user_index7 ON t0 (name) ;"; checkValidUniqueAndAssumeUnique(schema, msgP, null); // A unique index on the non-partitioning key gets one error. schema = "create table t0 (id bigint not null, name varchar(32), age integer, primary key (id));\n" + "PARTITION TABLE t0 ON COLUMN id;\n" + "CREATE UNIQUE INDEX user_index8 ON t0 (name) ;"; checkValidUniqueAndAssumeUnique(schema, msgP, null); // A unique index on an unrelated expression like abs(age) gets a error. schema = "create table t0 (id bigint not null, name varchar(32), age integer, primary key (id));\n" + "PARTITION TABLE t0 ON COLUMN id;\n" + "CREATE UNIQUE INDEX user_index9 ON t0 (abs(age)) ;"; checkValidUniqueAndAssumeUnique(schema, msgP, null); // A unique index on the partitioning key ( non-primary key) gets one error. schema = "create table t0 (id bigint not null, name varchar(32) not null, age integer, primary key (id));\n" + "PARTITION TABLE t0 ON COLUMN name;"; checkValidUniqueAndAssumeUnique(schema, msgP, msgP); // A unique index on an expression of the partitioning key like substr(1, 2, name) gets two errors. schema = "create table t0 (id bigint not null, name varchar(32) not null, age integer, primary key (id));\n" + "PARTITION TABLE t0 ON COLUMN name;\n" + "CREATE UNIQUE INDEX user_index10 ON t0 (substr(name, 1, 2 )) ;"; // 1) unique index, 2) primary key checkValidUniqueAndAssumeUnique(schema, msgP, msgP); // A unique index on the non-partitioning key, non-partitioned column gets two errors. schema = "create table t0 (id bigint not null, name varchar(32) not null, age integer, primary key (id));\n" + "PARTITION TABLE t0 ON COLUMN name;\n" + "CREATE UNIQUE INDEX user_index12 ON t0 (age) ;"; // 1) unique index, 2) primary key checkValidUniqueAndAssumeUnique(schema, msgP, msgP); } public void testDDLCompilerMatView() { // Test MatView. String ddl; ddl = "create table t(id integer not null, num integer);\n" + "create view my_view as select num, count(*) from t group by num order by num;"; checkDDLErrorMessage(ddl, "Materialized view \"MY_VIEW\" with ORDER BY clause is not supported."); ddl = "create table t(id integer not null, num integer, wage integer);\n" + "create view my_view1 (num, total, sumwage) " + "as select num, count(*), sum(wage) from t group by num; \n" + "create view my_view2 (num, total, sumwage) " + "as select num, count(*), sum(sumwage) from my_view1 group by num; "; checkDDLErrorMessage(ddl, "A materialized view (MY_VIEW2) can not be defined on another view (MY_VIEW1)"); } public void testDDLCompilerTableLimit() { String ddl; // test failed cases ddl = "create table t(id integer not null, num integer," + "CONSTRAINT tblimit1 LIMIT PARTITION ROWS 6xx);"; checkDDLErrorMessage(ddl, "unexpected token: XX"); ddl = "create table t(id integer not null, num integer," + "CONSTRAINT tblimit1 LIMIT PARTITION ROWS 66666666666666666666666666666666);"; checkDDLErrorMessage(ddl, "incompatible data type in operation"); ddl = "create table t(id integer not null, num integer," + "CONSTRAINT tblimit1 LIMIT PARTITION ROWS -10);"; checkDDLErrorMessage(ddl, "Invalid constraint limit number '-10'"); ddl = "create table t(id integer not null, num integer," + "CONSTRAINT tblimit1 LIMIT PARTITION ROWS 5, CONSTRAINT tblimit2 LIMIT PARTITION ROWS 7);"; checkDDLErrorMessage(ddl, "Too many table limit constraints for table T"); ddl = "create table t(id integer not null, num integer," + "CONSTRAINT tblimit1 LIMIT PARTITION Row 6);"; checkDDLErrorMessage(ddl, "unexpected token: ROW required: ROWS"); ddl = "create table t(id integer not null, num integer," + "CONSTRAINT tblimit1 LIMIT Rows 6);"; checkDDLErrorMessage(ddl, "unexpected token: ROWS required: PARTITION"); // Test success cases ddl = "create table t(id integer not null, num integer," + "CONSTRAINT tblimit1 LIMIT PARTITION ROWS 6);"; checkDDLErrorMessage(ddl, null); ddl = "create table t(id integer not null, num integer," + "LIMIT PARTITION ROWS 6);"; checkDDLErrorMessage(ddl, null); } public void testPartitionOnBadType() { final String simpleSchema = "create table books (cash float default 0.0 NOT NULL, title varchar(10) default 'foo', PRIMARY KEY(cash));"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<partitions><partition table='books' column='cash'/></partitions> " + "<procedures><procedure class='org.voltdb.compiler.procedures.AddBook' /></procedures>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertFalse(success); } public void testOmittedProcedureList() { final String simpleSchema = "create table books (cash float default 0.0 NOT NULL, title varchar(10) default 'foo', PRIMARY KEY(cash));"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertTrue(success); } public void test3324MPPlan() throws IOException { final String simpleSchema = "create table blah (pkey integer not null, strval varchar(200), PRIMARY KEY(pkey));\n"; VoltProjectBuilder pb = new VoltProjectBuilder(); pb.enableDiagnostics(); pb.addLiteralSchema(simpleSchema); pb.addPartitionInfo("blah", "pkey"); pb.addStmtProcedure("undeclaredspquery1", "select strval UNDECLARED1 from blah where pkey = ?"); pb.addStmtProcedure("undeclaredspquery2", "select strval UNDECLARED2 from blah where pkey = 12"); pb.addStmtProcedure("declaredspquery1", "select strval SODECLARED1 from blah where pkey = ?", "blah.pkey:0"); // Currently no way to do this? // pb.addStmtProcedure("declaredspquery2", "select strval SODECLARED2 from blah where pkey = 12", "blah.pkey=12"); boolean success = pb.compile(Configuration.getPathToCatalogForTest("test3324.jar")); assertTrue(success); List<String> diagnostics = pb.harvestDiagnostics(); // This asserts that the undeclared SP plans don't mistakenly get SP treatment // -- they must each include a RECEIVE plan node. assertEquals(2, countStringsMatching(diagnostics, ".*\"UNDECLARED.\".*\"PLAN_NODE_TYPE\":\"RECEIVE\".*")); // This asserts that the methods used to prevent undeclared SP plans from getting SP treatment // don't over-reach to declared SP plans. assertEquals(0, countStringsMatching(diagnostics, ".*\"SODECLARED.\".*\"PLAN_NODE_TYPE\":\"RECEIVE\".*")); // System.out.println("test3324MPPlan"); // System.out.println(diagnostics); } public void testBadDDLErrorLineNumber() throws IOException { final String schema = "-- a comment\n" + // 1 "create table books (\n" + // 2 " id integer default 0,\n" + // 3 " strval varchar(33000) default '',\n" + // 4 " PRIMARY KEY(id)\n" + // 5 ");\n" + // 6 "\n" + // 7 "-- another comment\n" + // 8 "create view badview (\n" + // 9 * error reported here * " id,\n" + " COUNT(*),\n" + " total\n" + " as\n" + "select id, COUNT(*), SUM(cnt)\n" + " from books\n" + " group by id;"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(schema); final String schemaPath = schemaFile.getPath(); final String project = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures><procedure class='org.voltdb.compiler.procedures.AddBook' /></procedures>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(project); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertFalse(success); for (Feedback error: compiler.m_errors) { assertEquals(9, error.lineNo); } } public void testInvalidCreateProcedureDDL() throws Exception { ArrayList<Feedback> fbs; String expectedError; fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NonExistentPartitionParamInteger;" + "PARTITION PROCEDURE NonExistentPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Cannot load class for procedure: org.voltdb.compiler.procedures.NonExistentPartitionParamInteger"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "PARTITION PROCEDURE NotDefinedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Partition in referencing an undefined procedure \"NotDefinedPartitionParamInteger\""; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.PartitionParamInteger;" + "PARTITION PROCEDURE PartitionParamInteger ON TABLE PKEY_WHAAAT COLUMN PKEY;" ); expectedError = "PartitionParamInteger has partition properties defined both in class " + "\"org.voltdb.compiler.procedures.PartitionParamInteger\" and in the schema defintion file(s)"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_WHAAAT COLUMN PKEY;" ); expectedError = "PartitionInfo for procedure " + "org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger refers to a column " + "in schema which can't be found."; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN PSURROGATE;" ); expectedError = "PartitionInfo for procedure " + "org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger refers to a column " + "in schema which can't be found."; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN PKEY PARAMETER 8;" ); expectedError = "PartitionInfo specifies invalid parameter index for procedure: " + "org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM GLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Invalid CREATE PROCEDURE statement: " + "\"CREATE PROCEDURE FROM GLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger\"" + ", expected syntax: \"CREATE PROCEDURE"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger FOR TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Invalid PARTITION statement: \"PARTITION PROCEDURE " + "NotAnnotatedPartitionParamInteger FOR TABLE PKEY_INTEGER COLUMN PKEY\", " + "expected syntax: PARTITION PROCEDURE <procedure> ON " + "TABLE <table> COLUMN <column> [PARAMETER <parameter-index-no>]"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER CLUMN PKEY PARMTR 0;" ); expectedError = "Invalid PARTITION statement: \"PARTITION PROCEDURE " + "NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER CLUMN PKEY PARMTR 0\", " + "expected syntax: PARTITION PROCEDURE <procedure> ON " + "TABLE <table> COLUMN <column> [PARAMETER <parameter-index-no>]"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN PKEY PARAMETER hello;" ); expectedError = "Invalid PARTITION statement: \"PARTITION PROCEDURE " + "NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN PKEY PARAMETER hello\", " + "expected syntax: PARTITION PROCEDURE <procedure> ON " + "TABLE <table> COLUMN <column> [PARAMETER <parameter-index-no>]"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROGEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN PKEY PARAMETER hello;" ); expectedError = "Invalid PARTITION statement: " + "\"PARTITION PROGEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER " + "COLUMN PKEY PARAMETER hello\", expected syntax: \"PARTITION TABLE <table> " + "ON COLUMN <column>\" or \"PARTITION PROCEDURE <procedure> ON " + "TABLE <table> COLUMN <column> [PARAMETER <parameter-index-no>]\""; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE OUTOF CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN PKEY PARAMETER 2;" ); expectedError = "Invalid CREATE PROCEDURE statement: " + "\"CREATE PROCEDURE OUTOF CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger\"" + ", expected syntax: \"CREATE PROCEDURE"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "MAKE PROCEDURE OUTOF CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN PKEY PARAMETER 2;" ); expectedError = "DDL Error: \"unexpected token: MAKE\" in statement starting on lineno: 1"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE 1PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN;" ); expectedError = "Unknown indentifier in DDL: \"PARTITION TABLE 1PKEY_INTEGER ON COLUMN PKEY\" " + "contains invalid identifier \"1PKEY_INTEGER\""; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN 2PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Unknown indentifier in DDL: \"PARTITION TABLE PKEY_INTEGER ON COLUMN 2PKEY\" " + "contains invalid identifier \"2PKEY\""; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS 0rg.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Unknown indentifier in DDL: \""+ "CREATE PROCEDURE FROM CLASS 0rg.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger" + "\" contains invalid identifier \"0rg.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger\""; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.3compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Unknown indentifier in DDL: \""+ "CREATE PROCEDURE FROM CLASS org.voltdb.3compiler.procedures.NotAnnotatedPartitionParamInteger" + "\" contains invalid identifier \"org.voltdb.3compiler.procedures.NotAnnotatedPartitionParamInteger\""; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.4NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Unknown indentifier in DDL: \""+ "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.4NotAnnotatedPartitionParamInteger" + "\" contains invalid identifier \"org.voltdb.compiler.procedures.4NotAnnotatedPartitionParamInteger\""; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE 5NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Unknown indentifier in DDL: \""+ "PARTITION PROCEDURE 5NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN PKEY" + "\" contains invalid identifier \"5NotAnnotatedPartitionParamInteger\""; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE 6PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Unknown indentifier in DDL: \""+ "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE 6PKEY_INTEGER COLUMN PKEY" + "\" contains invalid identifier \"6PKEY_INTEGER\""; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN 7PKEY;" ); expectedError = "Unknown indentifier in DDL: \""+ "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger ON TABLE PKEY_INTEGER COLUMN 7PKEY" + "\" contains invalid identifier \"7PKEY\""; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE FROM CLASS org.voltdb.compiler.procedures.NotAnnotatedPartitionParamInteger;" + "PARTITION PROCEDURE NotAnnotatedPartitionParamInteger TABLE PKEY_INTEGER ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Invalid PARTITION statement: \"PARTITION PROCEDURE " + "NotAnnotatedPartitionParamInteger TABLE PKEY_INTEGER ON TABLE PKEY_INTEGER COLUMN PKEY\", " + "expected syntax: PARTITION PROCEDURE <procedure> ON " + "TABLE <table> COLUMN <column> [PARAMETER <parameter-index-no>]"; assertTrue(isFeedbackPresent(expectedError, fbs)); } public void testInvalidSingleStatementCreateProcedureDDL() throws Exception { ArrayList<Feedback> fbs; String expectedError; fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, DESCR VARCHAR(128), PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE Foo AS BANBALOO pkey FROM PKEY_INTEGER;" + "PARTITION PROCEDURE Foo ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Failed to plan for statement (sql) BANBALOO pkey FROM PKEY_INTEGER"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, DESCR VARCHAR(128), PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE Foo AS SELEC pkey FROM PKEY_INTEGER;" + "PARTITION PROCEDURE Foo ON TABLE PKEY_INTEGER COLUMN PKEY PARAMETER 0;" ); expectedError = "Failed to plan for statement (sql) SELEC pkey FROM PKEY_INTEGER"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, DESCR VARCHAR(128), PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE Foo AS DELETE FROM PKEY_INTEGER WHERE PKEY = ?;" + "PARTITION PROCEDURE Foo ON TABLE PKEY_INTEGER COLUMN PKEY PARAMETER 2;" ); expectedError = "PartitionInfo specifies invalid parameter index for procedure: Foo"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, DESCR VARCHAR(128), PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE Foo AS DELETE FROM PKEY_INTEGER;" + "PARTITION PROCEDURE Foo ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "PartitionInfo specifies invalid parameter index for procedure: Foo"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, DESCR VARCHAR(128), PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE org.kanamuri.Foo AS DELETE FROM PKEY_INTEGER;" + "PARTITION PROCEDURE Foo ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "PartitionInfo specifies invalid parameter index for procedure: org.kanamuri.Foo"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, DESCR VARCHAR(128), PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE 7Foo AS DELETE FROM PKEY_INTEGER WHERE PKEY = ?;" + "PARTITION PROCEDURE 7Foo ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Unknown indentifier in DDL: \""+ "CREATE PROCEDURE 7Foo AS DELETE FROM PKEY_INTEGER WHERE PKEY = ?" + "\" contains invalid identifier \"7Foo\""; assertTrue(isFeedbackPresent(expectedError, fbs)); } public void testInvalidGtroovyProcedureDDL() throws Exception { ArrayList<Feedback> fbs; String expectedError; if (Float.parseFloat(System.getProperty("java.specification.version")) < 1.7) return; fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, DESCR VARCHAR(128), PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE Foo AS ###\n" + " stmt = new SQLStmt('SELECT PKEY, DESCR FROM PKEY = ?')\n" + " transactOn = { int key -> \n" + " voltQueueSQL(stmt,key)\n" + " voltExecuteSQL(true)\n" + " }\n" + "### LANGUAGE GROOVY;\n" + "PARTITION PROCEDURE Foo ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "user lacks privilege or object not found: PKEY"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, DESCR VARCHAR(128), PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE Foo AS ###\n" + " stmt = new SQLStmt('SELECT PKEY, DESCR FROM PKEY_INTEGER WHERE PKEY = ?')\n" + " transactOn = { int key -> \n" + " voltQueueSQL(stmt,key)\n" + " voltExecuteSQL(true)\n" + " \n" + "### LANGUAGE GROOVY;\n" + "PARTITION PROCEDURE Foo ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Procedure \"Foo\" code block has syntax errors"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, DESCR VARCHAR(128), PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE Foo AS ###\n" + " stmt = new SQLStmt('SELECT PKEY, DESCR FROM PKEY_INTEGER WHERE PKEY = ?')\n" + " runMeInstead = { int key -> \n" + " voltQueueSQL(stmt,key)\n" + " voltExecuteSQL(true)\n" + " }\n" + "### LANGUAGE GROOVY;\n" + "PARTITION PROCEDURE Foo ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Procedure \"Foo\" code block does not contain the required \"transactOn\" closure"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, DESCR VARCHAR(128), PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE Foo AS ###\n" + "package voltkv.procedures;\n" + "\n" + "import org.voltdb.*;\n" + "\n" + "@ProcInfo(partitionInfo=\"store.key:0\", singlePartition=true)\n" + "public class Put extends VoltProcedure {\n" + " // Checks if key exists\n" + " public final SQLStmt checkStmt = new SQLStmt(\"SELECT key FROM store WHERE key = ?;\");\n" + " // Updates a key/value pair\n" + " public final SQLStmt updateStmt = new SQLStmt(\"UPDATE store SET value = ? WHERE key = ?;\");\n" + " // Inserts a key/value pair\n" + " public final SQLStmt insertStmt = new SQLStmt(\"INSERT INTO store (key, value) VALUES (?, ?);\");\n" + "\n" + " public VoltTable[] run(String key, byte[] value) {\n" + " // Check whether the pair exists\n" + " voltQueueSQL(checkStmt, key);\n" + " // Insert new or update existing key depending on result\n" + " if (voltExecuteSQL()[0].getRowCount() == 0)\n" + " voltQueueSQL(insertStmt, key, value);\n" + " else\n" + " voltQueueSQL(updateStmt, value, key);\n" + " return voltExecuteSQL(true);\n" + " }\n" + "}\n" + "### LANGUAGE GROOVY;\n" ); expectedError = "Procedure \"voltkv.procedures.Put\" is not a groovy script"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, DESCR VARCHAR(128), PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE Foo AS ###\n" + " stmt = new SQLStmt('SELECT PKEY, DESCR FROM PKEY_INTEGER WHERE PKEY = ?')\n" + " transactOn = 'Is it me that you wanted instead?'\n" + "### LANGUAGE GROOVY;\n" + "PARTITION PROCEDURE Foo ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Procedure \"Foo\" code block does not contain the required \"transactOn\" closure"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, DESCR VARCHAR(128), PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE Foo AS ###\n" + " // ###\n" + " stmt = new SQLStmt('SELECT PKEY, DESCR FROM PKEY_INTEGER WHERE PKEY = ?')\n" + " transactOn = { int key -> \n" + " voltQueueSQL(stmt,key)\n" + " voltExecuteSQL(true)\n" + " }\n" + "### LANGUAGE GROOVY;\n" + "PARTITION PROCEDURE Foo ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Schema file ended mid-statement (no semicolon found)"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, DESCR VARCHAR(128), PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE Foo AS ##\n" + " stmt = new SQLStmt('SELECT PKEY, DESCR FROM PKEY_INTEGER WHERE PKEY = ?')\n" + " transactOn = { int key -> \n" + " voltQueueSQL(stmt,key)\n" + " voltExecuteSQL(true)\n" + " }\n" + "### LANGUAGE GROOVY;\n" + "PARTITION PROCEDURE Foo ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "Schema file ended mid-statement (no semicolon found)"; assertTrue(isFeedbackPresent(expectedError, fbs)); fbs = checkInvalidProcedureDDL( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, DESCR VARCHAR(128), PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE Foo AS ###\n" + " stmt = new SQLStmt('SELECT PKEY, DESCR FROM PKEY_INTEGER WHERE PKEY = ?')\n" + " transactOn = { int key -> \n" + " voltQueueSQL(stmt,key)\n" + " voltExecuteSQL(true)\n" + " }\n" + "### LANGUAGE KROOVY;\n" + "PARTITION PROCEDURE Foo ON TABLE PKEY_INTEGER COLUMN PKEY;" ); expectedError = "### LANGUAGE KROOVY\", expected syntax: \"CREATE PROCEDURE [ALLOW"; assertTrue(isFeedbackPresent(expectedError, fbs)); } public void testValidGroovyProcedureDDL() throws Exception { if (Float.parseFloat(System.getProperty("java.specification.version")) < 1.7) return; Database db = goodDDLAgainstSimpleSchema( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, DESCR VARCHAR(128), PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE Foo AS ###\n" + " stmt = new SQLStmt('SELECT PKEY, DESCR FROM PKEY_INTEGER WHERE PKEY = ?')\n" + " transactOn = { int key -> \n" + " voltQueueSQL(stmt,key)\n" + " voltExecuteSQL(true)\n" + " }\n" + "### LANGUAGE GROOVY;\n" + "PARTITION PROCEDURE Foo ON TABLE PKEY_INTEGER COLUMN PKEY;" ); Procedure proc = db.getProcedures().get("Foo"); assertNotNull(proc); db = goodDDLAgainstSimpleSchema( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, DESCR VARCHAR(128), PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE PROCEDURE Foo AS ###\n" + " // #\n" + " // ##\n" + " stmt = new SQLStmt('SELECT PKEY, DESCR FROM PKEY_INTEGER WHERE PKEY = ?')\n" + " transactOn = { int key -> \n" + " def str = '# ## # ##'\n" + " voltQueueSQL(stmt,key)\n" + " voltExecuteSQL(true)\n" + " }\n" + "### LANGUAGE GROOVY;\n" + "PARTITION PROCEDURE Foo ON TABLE PKEY_INTEGER COLUMN PKEY;" ); proc = db.getProcedures().get("Foo"); assertNotNull(proc); db = goodDDLAgainstSimpleSchema( "CREATE TABLE PKEY_INTEGER ( PKEY INTEGER NOT NULL, DESCR VARCHAR(128), PRIMARY KEY (PKEY) );" + "PARTITION TABLE PKEY_INTEGER ON COLUMN PKEY;" + "CREATE \n" + "PROCEDURE Foo \n" + " AS \n" + "###\n" + " stmt = new SQLStmt('SELECT PKEY, DESCR FROM PKEY_INTEGER WHERE PKEY = ?')\n" + " transactOn = { int key -> \n" + " voltQueueSQL(stmt,key)\n" + " voltExecuteSQL(true)\n" + " }\n" + "###\n" + " LANGUAGE \n" + "GROOVY;\n" + "PARTITION PROCEDURE Foo ON TABLE PKEY_INTEGER COLUMN PKEY;" ); proc = db.getProcedures().get("Foo"); assertNotNull(proc); } private ArrayList<Feedback> checkInvalidProcedureDDL(String ddl) { final File schemaFile = VoltProjectBuilder.writeStringToTempFile(ddl); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas>" + "<schema path='" + schemaPath + "' />" + "</schemas>" + "<procedures/>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertFalse(success); return compiler.m_errors; } public void testValidAnnotatedProcedureDLL() throws Exception { final String simpleSchema = "create table books (cash integer default 23 not null, title varchar(3) default 'foo', PRIMARY KEY(cash));" + "PARTITION TABLE books ON COLUMN cash;" + "creAte PrOcEdUrE FrOm CLasS org.voltdb.compiler.procedures.AddBook;"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures/>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertTrue(success); final String catalogContents = VoltCompilerUtils.readFileFromJarfile(testout_jar, "catalog.txt"); final Catalog c2 = new Catalog(); c2.execute(catalogContents); final Database db = c2.getClusters().get("cluster").getDatabases().get("database"); final Procedure addBook = db.getProcedures().get("AddBook"); assertEquals(true, addBook.getSinglepartition()); } public void testValidNonAnnotatedProcedureDDL() throws Exception { final String simpleSchema = "create table books (cash integer default 23 not null, title varchar(3) default 'foo', PRIMARY KEY(cash));" + "PARTITION TABLE books ON COLUMN cash;" + "create procedure from class org.voltdb.compiler.procedures.NotAnnotatedAddBook;" + "paRtItiOn prOcEdure NotAnnotatedAddBook On taBLe books coLUmN cash ParaMETer 0;"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema); final String schemaPath = schemaFile.getPath(); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "<procedures/>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); assertTrue(success); final String catalogContents = VoltCompilerUtils.readFileFromJarfile(testout_jar, "catalog.txt"); final Catalog c2 = new Catalog(); c2.execute(catalogContents); final Database db = c2.getClusters().get("cluster").getDatabases().get("database"); final Procedure addBook = db.getProcedures().get("NotAnnotatedAddBook"); assertEquals(true, addBook.getSinglepartition()); } class TestRole { final String name; boolean adhoc = false; boolean sysproc = false; boolean defaultproc = false; public TestRole(String name) { this.name = name; } public TestRole(String name, boolean adhoc, boolean sysproc, boolean defaultproc) { this.name = name; this.adhoc = adhoc; this.sysproc = sysproc; this.defaultproc = defaultproc; } } private void checkRoleXMLAndDDL(String rolesElem, String ddl, String errorRegex, TestRole... roles) throws Exception { final File schemaFile = VoltProjectBuilder.writeStringToTempFile(ddl != null ? ddl : ""); final String schemaPath = schemaFile.getPath(); String rolesBlock = (rolesElem != null ? String.format("<roles>%s</roles>", rolesElem) : ""); final String simpleProject = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas>" + "<schema path='" + schemaPath + "' />" + "</schemas>" + rolesBlock + "<procedures/>" + "</database>" + "</project>"; final File projectFile = VoltProjectBuilder.writeStringToTempFile(simpleProject); final String projectPath = projectFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); final boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); String error = (success || compiler.m_errors.size() == 0 ? "" : compiler.m_errors.get(compiler.m_errors.size()-1).message); if (errorRegex == null) { assertTrue(String.format("Expected success\nXML: %s\nDDL: %s\nERR: %s", rolesElem, ddl, error), success); Database db = compiler.getCatalog().getClusters().get("cluster").getDatabases().get("database"); CatalogMap<Group> groups = db.getGroups(); CatalogMap<Connector> connectors = db.getConnectors(); if (connectors.get("0") == null ) { connectors.add("0"); } assertNotNull(groups); assertEquals(roles.length, groups.size()); for (TestRole role : roles) { Group group = groups.get(role.name); assertNotNull(String.format("Missing role \"%s\"", role.name), group); assertEquals(String.format("Role \"%s\" adhoc flag mismatch:", role.name), role.adhoc, group.getAdhoc()); assertEquals(String.format("Role \"%s\" sysproc flag mismatch:", role.name), role.sysproc, group.getSysproc()); assertEquals(String.format("Role \"%s\" defaultproc flag mismatch:", role.name), role.defaultproc, group.getDefaultproc()); } } else { assertFalse(String.format("Expected error (\"%s\")\nXML: %s\nDDL: %s", errorRegex, rolesElem, ddl), success); assertFalse("Expected at least one error message.", error.isEmpty()); Matcher m = Pattern.compile(errorRegex).matcher(error); assertTrue(String.format("%s\nEXPECTED: %s", error, errorRegex), m.matches()); } } private void goodRoleDDL(String ddl, TestRole... roles) throws Exception { checkRoleXMLAndDDL(null, ddl, null, roles); } private void badRoleDDL(String ddl, String errorRegex) throws Exception { checkRoleXMLAndDDL(null, ddl, errorRegex); } public void testRoleXML() throws Exception { checkRoleXMLAndDDL("<role name='r1'/>", null, null, new TestRole("r1")); } public void testBadRoleXML() throws Exception { checkRoleXMLAndDDL("<rolex name='r1'/>", null, ".*rolex.*[{]role[}].*expected.*"); checkRoleXMLAndDDL("<role name='r1'/>", "create role r1;", ".*already exists.*"); } public void testRoleDDL() throws Exception { goodRoleDDL("create role r1;", new TestRole("r1")); goodRoleDDL("create role r1;create role r2;", new TestRole("r1"), new TestRole("r2")); goodRoleDDL("create role r1 with adhoc;", new TestRole("r1", true, false, false)); goodRoleDDL("create role r1 with sysproc;", new TestRole("r1", false, true, false)); goodRoleDDL("create role r1 with defaultproc;", new TestRole("r1", false, false, true)); goodRoleDDL("create role r1 with adhoc,sysproc,defaultproc;", new TestRole("r1", true, true, true)); goodRoleDDL("create role r1 with adhoc,sysproc,sysproc;", new TestRole("r1", true, true, false)); goodRoleDDL("create role r1 with AdHoc,SysProc,DefaultProc;", new TestRole("r1", true, true, true)); } public void testBadRoleDDL() throws Exception { badRoleDDL("create role r1", ".*no semicolon.*"); badRoleDDL("create role r1;create role r1;", ".*already exists.*"); badRoleDDL("create role r1 with ;", ".*Invalid CREATE ROLE statement.*"); badRoleDDL("create role r1 with blah;", ".*Invalid permission \"blah\".*"); badRoleDDL("create role r1 with adhoc sysproc;", ".*Invalid CREATE ROLE statement.*"); badRoleDDL("create role r1 with adhoc, blah;", ".*Invalid permission \"blah\".*"); } private Database checkDDLAgainstSimpleSchema(String errorRegex, String... ddl) throws Exception { String schemaDDL = "create table books (cash integer default 23 NOT NULL, title varbinary(10) default NULL, PRIMARY KEY(cash)); " + "partition table books on column cash;" + StringUtils.join(ddl, " "); File schemaFile = VoltProjectBuilder.writeStringToTempFile(schemaDDL.toString()); String schemaPath = schemaFile.getPath(); String projectXML = "<?xml version=\"1.0\"?>\n" + "<project>" + "<database name='database'>" + "<schemas><schema path='" + schemaPath + "' /></schemas>" + "</database>" + "</project>"; File projectFile = VoltProjectBuilder.writeStringToTempFile(projectXML); String projectPath = projectFile.getPath(); VoltCompiler compiler = new VoltCompiler(); boolean success = compiler.compileWithProjectXML(projectPath, testout_jar); String error = (success || compiler.m_errors.size() == 0 ? "" : compiler.m_errors.get(compiler.m_errors.size()-1).message); if (errorRegex == null) { assertTrue(String.format("Expected success\nDDL: %s\n%s", ddl, error), success); Catalog cat = compiler.getCatalog(); return cat.getClusters().get("cluster").getDatabases().get("database"); } else { assertFalse(String.format("Expected error (\"%s\")\nDDL: %s", errorRegex, ddl), success); assertFalse("Expected at least one error message.", error.isEmpty()); Matcher m = Pattern.compile(errorRegex).matcher(error); assertTrue(String.format("%s\nEXPECTED: %s", error, errorRegex), m.matches()); return null; } } private Database goodDDLAgainstSimpleSchema(String... ddl) throws Exception { return checkDDLAgainstSimpleSchema(null, ddl); } private void badDDLAgainstSimpleSchema(String errorRegex, String... ddl) throws Exception { checkDDLAgainstSimpleSchema(errorRegex, ddl); } public void testGoodCreateProcedureWithAllow() throws Exception { Database db = goodDDLAgainstSimpleSchema( "create role r1;", "create procedure p1 allow r1 as select * from books;"); Procedure proc = db.getProcedures().get("p1"); assertNotNull(proc); CatalogMap<GroupRef> groups = proc.getAuthgroups(); assertEquals(1, groups.size()); assertNotNull(groups.get("r1")); db = goodDDLAgainstSimpleSchema( "create role r1;", "create role r2;", "create procedure p1 allow r1, r2 as select * from books;"); proc = db.getProcedures().get("p1"); assertNotNull(proc); groups = proc.getAuthgroups(); assertEquals(2, groups.size()); assertNotNull(groups.get("r1")); assertNotNull(groups.get("r2")); db = goodDDLAgainstSimpleSchema( "create role r1;", "create procedure allow r1 from class org.voltdb.compiler.procedures.AddBook;"); proc = db.getProcedures().get("AddBook"); assertNotNull(proc); groups = proc.getAuthgroups(); assertEquals(1, groups.size()); assertNotNull(groups.get("r1")); db = goodDDLAgainstSimpleSchema( "create role r1;", "create role r2;", "create procedure allow r1,r2 from class org.voltdb.compiler.procedures.AddBook;"); proc = db.getProcedures().get("AddBook"); assertNotNull(proc); groups = proc.getAuthgroups(); assertEquals(2, groups.size()); assertNotNull(groups.get("r1")); assertNotNull(groups.get("r2")); db = goodDDLAgainstSimpleSchema( "create role r1;", "create procedure allow r1,r1 from class org.voltdb.compiler.procedures.AddBook;"); proc = db.getProcedures().get("AddBook"); assertNotNull(proc); groups = proc.getAuthgroups(); assertEquals(1, groups.size()); assertNotNull(groups.get("r1")); } public void testBadCreateProcedureWithAllow() throws Exception { badDDLAgainstSimpleSchema(".*expected syntax.*", "create procedure p1 allow as select * from books;"); badDDLAgainstSimpleSchema(".*expected syntax.*", "create procedure p1 allow a b as select * from books;"); badDDLAgainstSimpleSchema(".*group rx that does not exist.*", "create procedure p1 allow rx as select * from books;"); badDDLAgainstSimpleSchema(".*group rx that does not exist.*", "create role r1;", "create procedure p1 allow r1, rx as select * from books;"); } private ConnectorTableInfo getConnectorTableInfoFor( Database db, String tableName) { Connector connector = db.getConnectors().get("0"); if( connector == null) return null; return connector.getTableinfo().getIgnoreCase(tableName); } public void testGoodExportTable() throws Exception { Database db; db = goodDDLAgainstSimpleSchema( "create table e1 (id integer, f1 varchar(16));", "export table e1;" ); assertNotNull(getConnectorTableInfoFor(db, "e1")); db = goodDDLAgainstSimpleSchema( "create table e1 (id integer, f1 varchar(16));", "create table e2 (id integer, f1 varchar(16));", "export table e1;", "eXpOrt TABle E2;" ); assertNotNull(getConnectorTableInfoFor(db, "e1")); assertNotNull(getConnectorTableInfoFor(db, "e2")); } public void testBadExportTable() throws Exception { badDDLAgainstSimpleSchema(".+\\sexport, table non_existant was not present in the catalog.*", "export table non_existant;" ); badDDLAgainstSimpleSchema(".+contains invalid identifier \"1table_name_not_valid\".*", "export table 1table_name_not_valid;" ); badDDLAgainstSimpleSchema(".+Invalid EXPORT TABLE statement.*", "export table one, two, three;" ); badDDLAgainstSimpleSchema(".+Invalid EXPORT TABLE statement.*", "export export table one;" ); badDDLAgainstSimpleSchema(".+Invalid EXPORT TABLE statement.*", "export table table one;" ); badDDLAgainstSimpleSchema("Table with indexes configured as an export table.*", "export table books;" ); badDDLAgainstSimpleSchema("Export table configured with materialized view.*", "create table view_source( id integer, f1 varchar(16), f2 varchar(12));", "create view my_view as select f2, count(*) as f2cnt from view_source group by f2;", "export table view_source;" ); badDDLAgainstSimpleSchema("View configured as an export table.*", "create table view_source( id integer, f1 varchar(16), f2 varchar(12));", "create view my_view as select f2, count(*) as f2cnt from view_source group by f2;", "export table my_view;" ); badDDLAgainstSimpleSchema("Table \"E1\" is already exported.*", "create table e1( id integer, f1 varchar(16), f2 varchar(12));", "export table e1;", "export table E1;" ); } public void testCompileFromDDL() throws IOException { final String simpleSchema1 = "create table table1r_el (pkey integer, column2_integer integer, PRIMARY KEY(pkey));\n" + "create view v_table1r_el (column2_integer, num_rows) as\n" + "select column2_integer as column2_integer,\n" + "count(*) as num_rows\n" + "from table1r_el\n" + "group by column2_integer;\n" + "create view v_table1r_el2 (column2_integer, num_rows) as\n" + "select column2_integer as column2_integer,\n" + "count(*) as num_rows\n" + "from table1r_el\n" + "group by column2_integer\n;\n"; final File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema1); final String schemaPath = schemaFile.getPath(); final VoltCompiler compiler = new VoltCompiler(); boolean success = compileFromDDL(compiler, testout_jar, schemaPath); assertTrue(success); success = compileFromDDL(compiler, testout_jar, schemaPath + "???"); assertFalse(success); success = compileFromDDL(compiler, testout_jar); assertFalse(success); } private int countStringsMatching(List<String> diagnostics, String pattern) { int count = 0; for (String string : diagnostics) { if (string.matches(pattern)) { ++count; } } return count; } }
ENG-5863: ALTER and DROP table for jenkins.
tests/frontend/org/voltdb/compiler/TestVoltCompiler.java
ENG-5863: ALTER and DROP table for jenkins.
Java
apache-2.0
82703c2fd150bd94da83c24df0724f3041ab73d2
0
anthcp/cdap,chtyim/cdap,hsaputra/cdap,anthcp/cdap,hsaputra/cdap,mpouttuclarke/cdap,chtyim/cdap,caskdata/cdap,anthcp/cdap,caskdata/cdap,caskdata/cdap,caskdata/cdap,hsaputra/cdap,mpouttuclarke/cdap,mpouttuclarke/cdap,chtyim/cdap,caskdata/cdap,anthcp/cdap,chtyim/cdap,caskdata/cdap,anthcp/cdap,chtyim/cdap,chtyim/cdap,mpouttuclarke/cdap,mpouttuclarke/cdap,hsaputra/cdap,hsaputra/cdap
/* * Copyright 2012-2014 Continuuity, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.continuuity.metrics.collect; import com.continuuity.common.conf.CConfiguration; import com.continuuity.common.metrics.MetricsScope; import com.continuuity.data2.OperationException; import com.continuuity.metrics.MetricsConstants; import com.continuuity.metrics.data.MetricsTableFactory; import com.continuuity.metrics.data.TimeSeriesTable; import com.continuuity.metrics.process.MetricsProcessor; import com.continuuity.metrics.transport.MetricsRecord; import com.google.common.collect.ImmutableList; import com.google.inject.Inject; import com.google.inject.Singleton; import org.apache.twill.common.Threads; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; /** * A {@link com.continuuity.common.metrics.MetricsCollectionService} that writes to MetricsTable directly. * It also has a scheduling job that clean up old metrics periodically. */ @Singleton public final class LocalMetricsCollectionService extends AggregatedMetricsCollectionService { private static final Logger LOG = LoggerFactory.getLogger(LocalMetricsCollectionService.class); private final CConfiguration cConf; private final Set<MetricsProcessor> processors; private final MetricsTableFactory tableFactory; private ScheduledExecutorService scheduler; @Inject public LocalMetricsCollectionService(CConfiguration cConf, MetricsTableFactory tableFactory, Set<MetricsProcessor> processors) { this.cConf = cConf; this.processors = processors; this.tableFactory = tableFactory; } @Override protected void publish(MetricsScope scope, Iterator<MetricsRecord> metrics) throws Exception { List<MetricsRecord> records = ImmutableList.copyOf(metrics); for (MetricsProcessor processor : processors) { processor.process(scope, records.iterator()); } } @Override protected void startUp() throws Exception { super.startUp(); // It will only do cleanup if the underlying table doesn't supports TTL. scheduler = Executors.newSingleThreadScheduledExecutor(Threads.createDaemonThreadFactory("metrics-cleanup")); long retention = cConf.getLong(MetricsConstants.ConfigKeys.RETENTION_SECONDS + ".1.seconds", MetricsConstants.DEFAULT_RETENTION_HOURS); scheduler.schedule(createCleanupTask(retention), 1, TimeUnit.SECONDS); } @Override protected void shutDown() throws Exception { if (scheduler != null) { scheduler.shutdownNow(); } super.shutDown(); } /** * Creates a task for cleanup. * @param retention Retention in seconds. */ private Runnable createCleanupTask(final long retention) { return new Runnable() { @Override public void run() { // Only do cleanup if the underlying table doesn't supports TTL. try { if (!tableFactory.isTTLSupported()) { return; } } catch (Exception e) { scheduler.schedule(this, 1, TimeUnit.SECONDS); } long currentTime = TimeUnit.SECONDS.convert(System.currentTimeMillis(), TimeUnit.MILLISECONDS); long deleteBefore = currentTime - retention; for (MetricsScope scope : MetricsScope.values()) { TimeSeriesTable timeSeriesTable = tableFactory.createTimeSeries(scope.name(), 1); try { timeSeriesTable.deleteBefore(deleteBefore); } catch (OperationException e) { LOG.error("Failed in cleaning up metrics table: {}", e.getMessage(), e); } } scheduler.schedule(this, 1, TimeUnit.HOURS); } }; } }
watchdog/src/main/java/com/continuuity/metrics/collect/LocalMetricsCollectionService.java
/* * Copyright 2012-2014 Continuuity, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.continuuity.metrics.collect; import com.continuuity.common.conf.CConfiguration; import com.continuuity.common.metrics.MetricsScope; import com.continuuity.data2.OperationException; import com.continuuity.metrics.MetricsConstants; import com.continuuity.metrics.data.MetricsTableFactory; import com.continuuity.metrics.data.TimeSeriesTable; import com.continuuity.metrics.process.MetricsProcessor; import com.continuuity.metrics.transport.MetricsRecord; import com.google.common.collect.ImmutableList; import com.google.inject.Inject; import com.google.inject.Singleton; import org.apache.twill.common.Threads; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; /** * A {@link com.continuuity.common.metrics.MetricsCollectionService} that writes to MetricsTable directly. * It also has a scheduling job that clean up old metrics periodically. */ @Singleton public final class LocalMetricsCollectionService extends AggregatedMetricsCollectionService { private static final Logger LOG = LoggerFactory.getLogger(LocalMetricsCollectionService.class); private final CConfiguration cConf; private final Set<MetricsProcessor> processors; private final MetricsTableFactory tableFactory; private ScheduledExecutorService scheduler; @Inject public LocalMetricsCollectionService(CConfiguration cConf, MetricsTableFactory tableFactory, Set<MetricsProcessor> processors) { this.cConf = cConf; this.processors = processors; this.tableFactory = tableFactory; } @Override protected void publish(MetricsScope scope, Iterator<MetricsRecord> metrics) throws Exception { List<MetricsRecord> records = ImmutableList.copyOf(metrics); for (MetricsProcessor processor : processors) { processor.process(scope, records.iterator()); } } @Override protected void startUp() throws Exception { super.startUp(); // Only do cleanup if the underlying table doesn't supports TTL. if (!tableFactory.isTTLSupported()) { scheduler = Executors.newSingleThreadScheduledExecutor(Threads.createDaemonThreadFactory("metrics-cleanup")); long retention = cConf.getLong(MetricsConstants.ConfigKeys.RETENTION_SECONDS + ".1.seconds", MetricsConstants.DEFAULT_RETENTION_HOURS); scheduler.scheduleAtFixedRate(createCleanupTask(retention), 0, 1, TimeUnit.HOURS); } } @Override protected void shutDown() throws Exception { if (scheduler != null) { scheduler.shutdownNow(); } super.shutDown(); } /** * Creates a task for cleanup. * @param retention Retention in seconds. */ private Runnable createCleanupTask(final long retention) { return new Runnable() { @Override public void run() { long currentTime = TimeUnit.SECONDS.convert(System.currentTimeMillis(), TimeUnit.MILLISECONDS); long deleteBefore = currentTime - retention; for (MetricsScope scope : MetricsScope.values()) { TimeSeriesTable timeSeriesTable = tableFactory.createTimeSeries(scope.name(), 1); try { timeSeriesTable.deleteBefore(deleteBefore); } catch (OperationException e) { LOG.error("Failed in cleaning up metrics table: {}", e.getMessage(), e); } } } }; } }
fix cleanup of metrics by ttl when datasets service is not up
watchdog/src/main/java/com/continuuity/metrics/collect/LocalMetricsCollectionService.java
fix cleanup of metrics by ttl when datasets service is not up
Java
apache-2.0
555a5c52b55f132a3dbbae590c0f3b824e66feba
0
trejkaz/derby,apache/derby,apache/derby,apache/derby,trejkaz/derby,apache/derby,trejkaz/derby
/** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.derbyTesting.functionTests.tests.jdbcapi; import java.io.IOException; import java.io.InputStream; import java.io.Reader; import java.math.BigDecimal; import java.sql.Blob; import java.sql.CallableStatement; import java.sql.Clob; import java.sql.Connection; import java.sql.Date; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Statement; import java.sql.Time; import java.sql.Timestamp; import java.sql.Types; import junit.framework.Test; import junit.framework.TestSuite; import org.apache.derbyTesting.junit.BigDecimalHandler; import org.apache.derbyTesting.junit.BaseJDBCTestCase; import org.apache.derbyTesting.junit.JDBC; import org.apache.derbyTesting.junit.TestConfiguration; /** * */ public class ParameterMappingTest extends BaseJDBCTestCase { private static boolean HAVE_BIG_DECIMAL; static { if (JDBC.vmSupportsJSR169()) HAVE_BIG_DECIMAL = false; else HAVE_BIG_DECIMAL = true; } private static int[] jdbcTypes = { Types.TINYINT, Types.SMALLINT, Types.INTEGER, Types.BIGINT, Types.REAL, Types.FLOAT, Types.DOUBLE, Types.DECIMAL, Types.NUMERIC, Types.BIT, Types.NULL, // Types.BOOLEAN Types.CHAR, Types.VARCHAR, Types.LONGVARCHAR, Types.NULL, // Types.BINARY, Types.VARBINARY, Types.NULL, // Types.LONGVARBINARY, Types.DATE, Types.TIME, Types.TIMESTAMP, Types.CLOB, Types.BLOB, }; private static String[] SQLTypes = { null, "SMALLINT", "INTEGER", "BIGINT", "REAL", "FLOAT", "DOUBLE", "DECIMAL(10,5)", null, null, null, "CHAR(60)", "VARCHAR(60)", "LONG VARCHAR", "CHAR(60) FOR BIT DATA", "VARCHAR(60) FOR BIT DATA", "LONG VARCHAR FOR BIT DATA", "DATE", "TIME", "TIMESTAMP", "CLOB(1k)", "BLOB(1k)", }; private static String[] validString = {null,"98","98","98", "98","98", "98","98",null,null,null, "98","98","98","0x4", "0x4","0x4", "2004-02-14", "00:00:00","2004-02-14 00:00:00","98","0x4"}; private static Class[] B3_GET_OBJECT; static { if (HAVE_BIG_DECIMAL) { B3_GET_OBJECT = new Class[] { java.lang.Integer.class, // Types.TINYINT, java.lang.Integer.class, // Types.SMALLINT, java.lang.Integer.class, // Types.INTEGER, java.lang.Long.class, // Types.BIGINT, java.lang.Float.class, // Types.REAL, java.lang.Double.class, // Types.FLOAT, java.lang.Double.class, // Types.DOUBLE, java.math.BigDecimal.class, // Types.DECIMAL, java.math.BigDecimal.class, // Types.NUMERIC, java.lang.Boolean.class, // Types.BIT, java.lang.Boolean.class, // Types.BOOLEAN java.lang.String.class, // Types.CHAR, java.lang.String.class, // Types.VARCHAR, java.lang.String.class, // Types.LONGVARCHAR, byte[].class, // Types.NULL, //Types.BINARY, byte[].class, // Types.VARBINARY, byte[].class, // Types.LONGVARBINARY, java.sql.Date.class, // Types.DATE, java.sql.Time.class, // Types.TIME, java.sql.Timestamp.class, // Types.TIMESTAMP, java.sql.Clob.class, // Types.CLOB, java.sql.Blob.class, // Types.BLOB, }; } else { B3_GET_OBJECT = new Class[] { java.lang.Integer.class, // Types.TINYINT, java.lang.Integer.class, // Types.SMALLINT, java.lang.Integer.class, // Types.INTEGER, java.lang.Long.class, // Types.BIGINT, java.lang.Float.class, // Types.REAL, java.lang.Double.class, // Types.FLOAT, java.lang.Double.class, // Types.DOUBLE, java.lang.String.class, // Types.DECIMAL, java.lang.String.class, // Types.NUMERIC, java.lang.Boolean.class, // Types.BIT, java.lang.Boolean.class, // Types.BOOLEAN java.lang.String.class, // Types.CHAR, java.lang.String.class, // Types.VARCHAR, java.lang.String.class, // Types.LONGVARCHAR, byte[].class, // Types.NULL, //Types.BINARY, byte[].class, // Types.VARBINARY, byte[].class, // Types.LONGVARBINARY, java.sql.Date.class, // Types.DATE, java.sql.Time.class, // Types.TIME, java.sql.Timestamp.class, // Types.TIMESTAMP, java.sql.Clob.class, // Types.CLOB, java.sql.Blob.class, // Types.BLOB, }; } } private static final boolean _ = false; private static final boolean X = true; /** JDBC 3.0 spec Table B6 - Use of ResultSet getter Methods to Retrieve JDBC Data Types */ public static final boolean[][] B6 = { // Types. T S I B R F D D N B B C V L B V L D T T C B // I M N I E L O E U I O H A O I A O A I I L L // N A T G A O U C M T O A R N N R N T M M O O // Y L E I L A B I E L R C G A B G E E E B B // I L G N T L M R E H V R I V S // N I E T E A I A A A Y N A T // T N R L C N R R A R A // T C R B M // H B I P // A I N // R N /* 0 getByte*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 1 getShort*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 2 getInt*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 3 getLong*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 4 getFloat*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 5 getDouble*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 6 getBigDecimal*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 7 getBoolean*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 8 getString*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _}, /* 9 getBytes*/ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, _, _}, /*10 getDate*/ { _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, X, _, X, _, _}, /*11 getTime*/ { _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, _, X, X, _, _}, /*12 getTimestamp*/ { _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, X, X, X, _, _}, /*13 getAsciiStream*/ { _, _, _, _, _, _, _, _, _, _, _, X, X, X, X, X, X, _, _, _, _, _}, /*14 getBinaryStream*/ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, _, _}, /*15 getCharStream*/ { _, _, _, _, _, _, _, _, _, _, _, X, X, X, X, X, X, _, _, _, _, _}, /*16 getClob */ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, X, _}, /*17 getBlob */ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, X}, /*18 getUnicodeStream */{ _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _}, }; /** JDBC 3.0 Section 13.2.2.1 specifies that table B-2 is used to specify type mappings from the Java types (e.g. int as setInt) to the JDBC SQL Type (Types.INT). This table does not include stream methods and does not include conversions specified elsewhere in the text, Namely Section 16.3.2 setBinaryStream may be used to set a BLOB setAsciiStream and setCharacterStream may be used to set a CLOB Thus this B2_MOD table is laid out like the B6 table and makes the assumptions that - Any Java numeric type can be used to set any SQL numeric type - Any Java numeric type can be used to set any SQL CHAR type - Numeric and date/time java types can be converted to SQL Char values. */ // Types. T S I B R F D D N B B C V L B V L D T T C B // I M N I E L O E U I O H A O I A O A I I L L // N A T G A O U C M T O A R N N R N T M M O O // Y L E I L A B I E L R C G A B G E E E B B // I L G N T L M R E H V R I V S // N I E T E A I A A A Y N A T // T N R L C N R R A R A // T C R B M // H B I P // A I N // R N public static boolean[][] B2_MOD = { /* 0 setByte*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 1 setShort*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 2 setInt*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 3 setLong*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 4 setFloat*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 5 setDouble*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 6 setBigDecimal*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 7 setBoolean*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 8 setString*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, X, X, X, _, _}, /* 9 setBytes*/ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, _, _}, /*10 setDate*/ { _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, X, _, X, _, _}, /*11 setTime*/ { _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, _, X, _, _, _}, /*12 setTimestamp*/ { _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, X, X, X, _, _}, /*13 setAsciiStream*/ { _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, _, _, _, X, _}, /*14 setBinaryStream*/ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, _, X}, /*15 setCharStream*/ { _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, _, _, _, X, _}, /*16 setClob */ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, X, _}, /*17 setBlob */ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, X}, /*18 setUnicodeStream */{ _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _}, }; /** Table B5 conversion of Objects using setObject*/ // Types. T S I B R F D D N B B C V L B V L D T T C B // I M N I E L O E U I O H A O I A O A I I L L // N A T G A O U C M T O A R N N R N T M M O O // Y L E I L A B I E L R C G A B G E E E B B // I L G N T L M R E H V R I V S // N I E T E A I A A A Y N A T // T N R L C N R R A R A // T C R B M // H B I P // A I N // R N public static boolean[][] B5 = { /* 0 String */ { X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, X, X, X, X, _, _}, /* 1 BigDecimal */ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 2 Boolean */ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 3 Integer */ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 4 Long */ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 5 Float */ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 6 Double */ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 7 byte[] */ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, _, _}, /* 8 Date */ { _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, X, _, X, _, _}, /* 9 Time */ { _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, _, X, _, _, _}, /*10 Timestamp */ { _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, X, X, X, _, _}, /*11 Blob */ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, X}, /*12 Clob */ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, X, _}, //Byte and Short were added to this table in JDBC 4.0. (See DERBY-1500.) /*13 Byte */ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /*14 Short */ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, }; public static final boolean[][] allowRegisterOut = { // Types. T S I B R F D D N B B C V L B V L D T T C B // I M N I E L O E U I O H A O I A O A I I L L // N A T G A O U C M T O A R N N R N T M M O O // Y L E I L A B I E L R C G A B G E E E B B // I L G N T L M R E H V R I V S // N I E T E A I A A A Y N A T // T N R L C N R R A R A // T C R B M // H B I P // A I N // param sqlType R N /* 0 null */ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _}, /* 1 SMALLINT*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 2 INTEGER*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 3 BIGINT */ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 4 REAL */ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 5 FLOAT */ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 6 DOUBLE */ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 7 DECIMAL*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 8 null */ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, X, X, X, _, _}, /* 9 null*/ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, _, _}, /*10 null */ { _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, X, _, X, _, _}, /*11 CHAR(60) */ { _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, _, X, _, _, _}, /*12 VARCHAR(60) */ { _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, X, X, X, _, _}, /*13 LONG VARCHAR */ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _}, /*14 CHAR FOR BIT */ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, _, _}, /*15 VARCHAR FOR BIT*/ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, _, _}, /*16 LONGVARCHAR FOR B*/{ _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _}, /*17 DATE */ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, X, _, _, _, _}, /*18 TIME */ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, X, _, _, _}, /*19 TIMESTAMP */ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, X, _, _}, /*20 CLOB */ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _}, /*21 BLOB */ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _}, }; /** * @param arg0 */ public ParameterMappingTest(String arg0) { super(arg0); } /* * (non-Javadoc) * * @see junit.framework.TestCase#setUp() */ protected void setUp() throws Exception { Connection conn = getConnection(); conn.setAutoCommit(false); // create simple a table with BLOB and CLOB thta // can be used to for setBlob/setClob testing. Statement scb = conn.createStatement(); scb.execute("CREATE TABLE PM.LOB_GET(ID INT, B BLOB, C CLOB)"); PreparedStatement pscb = conn .prepareStatement("INSERT INTO PM.LOB_GET VALUES (?, ?, ?)"); pscb.setInt(1, 0); pscb.setNull(2, Types.BLOB); pscb.setNull(3, Types.CLOB); pscb.executeUpdate(); pscb.setInt(1, 1); { byte[] data = new byte[6]; data[0] = (byte) 0x4; data[1] = (byte) 0x3; data[2] = (byte) 0x72; data[3] = (byte) 0x43; data[4] = (byte) 0x00; data[5] = (byte) 0x37; pscb.setBinaryStream(2, new java.io.ByteArrayInputStream(data), 6); } pscb.setCharacterStream(3, new java.io.StringReader("72"), 2); pscb.executeUpdate(); scb.close(); pscb.close(); conn.commit(); } public void testParameterMapping() throws Exception { Connection conn = getConnection(); for (int type = 0; type < SQLTypes.length; type++) { String sqlType = SQLTypes[type]; if (sqlType == null || jdbcTypes[type] == Types.NULL) { continue; } Statement s = conn.createStatement(); try { s.execute("DROP TABLE PM.TYPE_AS"); } catch (SQLException seq) { } s.execute("CREATE TABLE PM.TYPE_AS(VAL " + SQLTypes[type] + ")"); PreparedStatement psi = conn .prepareStatement("INSERT INTO PM.TYPE_AS(VAL) VALUES(?)"); psi.setNull(1, jdbcTypes[type]); psi.executeUpdate(); PreparedStatement psq = conn .prepareStatement("SELECT VAL FROM PM.TYPE_AS"); ResultSet rs = psq.executeQuery(); ResultSetMetaData rsmd = rs.getMetaData(); assertEquivalentDataType(jdbcTypes[type], rsmd.getColumnType(1)); rs.close(); // For this data type // Test inserting a NULL value and then performing all the getXXX() // calls on it. // System.out.println(" NULL VALUE"); getXXX(psq, type, true); s.execute("DELETE FROM PM.TYPE_AS"); // For this data type // Test inserting a valid value and then performing all the getXXX() // calls on it. if (setValidValue(psi, 1, jdbcTypes[type])) { psi.executeUpdate(); getXXX(psq, type, false); } setXXX(s, psi, psq, type); psi.close(); psq.close(); s.execute("DROP TABLE PM.TYPE_AS"); conn.commit(); // NOW PROCEDURE PARAMETERS try { s.execute("DROP PROCEDURE PMP.TYPE_AS"); }catch (SQLException seq) { } String procSQL; if(HAVE_BIG_DECIMAL) { procSQL = "CREATE PROCEDURE PMP.TYPE_AS(" + "IN P1 " + SQLTypes[type] + ", INOUT P2 " + SQLTypes[type] + ", OUT P3 " + SQLTypes[type] + ") LANGUAGE JAVA PARAMETER STYLE JAVA NO SQL " + " EXTERNAL NAME 'org.apache.derbyTesting.functionTests.util.ProcedureTest.pmap'"; } else { procSQL = "CREATE PROCEDURE PMP.TYPE_AS(" + "IN P1 " + SQLTypes[type] + ", INOUT P2 " + SQLTypes[type] + ", OUT P3 " + SQLTypes[type] + ") LANGUAGE JAVA PARAMETER STYLE JAVA NO SQL " + " EXTERNAL NAME 'org.apache.derbyTesting.functionTests.util.SimpleProcedureTest.pmap'"; } try { if (!HAVE_BIG_DECIMAL && SQLTypes[type].equals("DECIMAL(10,5)")) continue; //System.out.println(procSQL); s.execute(procSQL); } catch (SQLException sqle) { // may get error that column is not allowed if ("42962".equals(sqle.getSQLState())) continue; else fail(sqle.getSQLState() + ":" + sqle.getMessage()); continue; } // For each JDBC type try to register the out parameters with that type. for (int opt = 0; opt < jdbcTypes.length; opt++) { int jopt = jdbcTypes[opt]; if (jopt == Types.NULL) continue; CallableStatement csp = conn.prepareCall("CALL PMP.TYPE_AS(?, ?, ?)"); boolean bothRegistered = true; //System.out.print("INOUT " + sqlType + " registerOutParameter(" + JDBC.sqlNameFromJdbc(jopt) + ") "); try { csp.registerOutParameter(2, jopt); } catch (SQLException sqle) { assertFalse("INOUT " + sqlType + " registerOutParameter(" + JDBC.sqlNameFromJdbc(jopt) + ") failed",allowRegisterOut[type][opt]); if (!"XCL25".equals(sqle.getSQLState())) fail("-- " + sqle.getSQLState()); bothRegistered = false; } //System.out.print("OUT " + sqlType + " registerOutParameter(" + TestUtil.getNameFromJdbcType(jopt) + ") "); try { csp.registerOutParameter(3, jopt); } catch (SQLException sqle) { if (!"XCL25".equals(sqle.getSQLState())) fail("-- " + sqle.getSQLState()); assertFalse("OUT " + sqlType + " registerOutParameter(" + JDBC.sqlNameFromJdbc(jopt) + "failed",allowRegisterOut[type][opt]); bothRegistered = false; } if (bothRegistered) { try { // set the IN value with an accepted value according to its type // set the INOUT value with an accepted value according to its registered type if (setValidValue(csp, 1, jdbcTypes[type]) && setValidValue(csp, 2, jopt)) { csp.execute(); // now get the INOUT, OUT parameters according to their registered type. getOutValue(csp, 2, jopt,type); getOutValue(csp, 3, jopt,type); } } catch (SQLException sqle) { boolean expectedConversionError = ("22018".equals(sqle.getSQLState())|| "22007".equals(sqle.getSQLState())); assertTrue("FAIL: Unexpected exception" + sqle.getSQLState() + ":" + sqle.getMessage(), expectedConversionError); } } csp.close(); } s.execute("DROP PROCEDURE PMP.TYPE_AS"); s.close(); conn.commit(); } } /* * (non-Javadoc) * * @see org.apache.derbyTesting.junit.BaseJDBCTestCase#tearDown() */ protected void tearDown() throws Exception { Connection conn = getConnection(); Statement scb = conn.createStatement(); scb.execute("DROP TABLE PM.LOB_GET"); scb.close(); commit(); } private static void getXXX(PreparedStatement ps, int type, boolean isNull) throws SQLException, java.io.IOException { { // getByte(); ResultSet rs = ps.executeQuery(); rs.next(); boolean worked; SQLException sqleResult = null; ; try { byte b = rs.getByte(1); boolean wn = rs.wasNull(); if (isNull) { assertTrue(wn); } else { assertFalse(wn); assertEquals(32, b); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } rs.close(); judge_getXXX(worked, sqleResult, 0, type); } { // getShort() ResultSet rs = ps.executeQuery(); rs.next(); boolean worked; SQLException sqleResult = null; try { short s = rs.getShort(1); boolean wn = rs.wasNull(); if (isNull) { assertTrue(wn); } else { assertFalse(wn); assertEquals(32, s); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } rs.close(); judge_getXXX(worked, sqleResult, 1, type); } { // getInt() ResultSet rs = ps.executeQuery(); rs.next(); boolean worked; SQLException sqleResult = null; ; try { int i = rs.getInt(1); boolean wn = rs.wasNull(); if (isNull) { assertTrue(wn); } else { assertFalse(isNull); assertEquals(32, i); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } rs.close(); judge_getXXX(worked, sqleResult, 2, type); } { // getLong(); ResultSet rs = ps.executeQuery(); rs.next(); boolean worked; SQLException sqleResult = null; ; try { long l = rs.getLong(1); boolean wn = rs.wasNull(); if (isNull) { assertTrue(wn); } else { assertEquals(32, l); assertFalse(wn); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } rs.close(); judge_getXXX(worked, sqleResult, 3, type); } { // getFloat() ResultSet rs = ps.executeQuery(); rs.next(); boolean worked; SQLException sqleResult = null; try { float f = rs.getFloat(1); boolean wn = rs.wasNull(); if (isNull) { assertTrue(wn); } else { assertFalse(wn); assertEquals(32.0, f, .000001); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } rs.close(); judge_getXXX(worked, sqleResult, 4, type); } { // getDouble(); ResultSet rs = ps.executeQuery(); rs.next(); SQLException sqleResult = null; boolean worked; try { double d = rs.getDouble(1); boolean wn = rs.wasNull(); if (isNull) { assertTrue(wn); } else { assertFalse(wn); assertEquals(32.0, d, .00001); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } rs.close(); judge_getXXX(worked, sqleResult, 5, type); } if (HAVE_BIG_DECIMAL) { // getBigDecimal() ResultSet rs = ps.executeQuery(); rs.next(); boolean worked; SQLException sqleResult = null; ; try { BigDecimal bd = rs.getBigDecimal(1); boolean wn = rs.wasNull(); if (isNull) { assertTrue(wn); assertNull(bd); } else { assertFalse(wn); assertEquals("BigDecimal comparison failed", 0, new BigDecimal("32.0").compareTo(bd)); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } rs.close(); judge_getXXX(worked, sqleResult, 6, type); } { // getBoolean() ResultSet rs = ps.executeQuery(); rs.next(); boolean worked; SQLException sqleResult = null; ; try { boolean b = rs.getBoolean(1); boolean wn = rs.wasNull(); if (isNull) { assertTrue(wn); } else { assertFalse(wn); assertTrue(b); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } rs.close(); judge_getXXX(worked, sqleResult, 7, type); } { // getString() ResultSet rs = ps.executeQuery(); rs.next(); boolean worked; SQLException sqleResult = null; ; try { String s = rs.getString(1); boolean wn = rs.wasNull(); if (isNull) { assertNull(s); assertTrue(wn); } else { s = s.trim(); int jdbcType = jdbcTypes[type]; switch(jdbcType) { case java.sql.Types.SMALLINT: case java.sql.Types.INTEGER: case java.sql.Types.BIGINT: case java.sql.Types.CHAR: case java.sql.Types.VARCHAR: case java.sql.Types.LONGVARCHAR: assertEquals("32",s); break; case java.sql.Types.REAL: case java.sql.Types.FLOAT: case java.sql.Types.DOUBLE: assertEquals("32.0",s); break; case java.sql.Types.DECIMAL: case java.sql.Types.NUMERIC: assertEquals("32.00000",s); break; case java.sql.Types.VARBINARY: case java.sql.Types.BINARY: assertEquals("0403fdc373",s); break; case java.sql.Types.DATE: assertEquals("2004-02-14",s); break; case java.sql.Types.TIME: assertEquals("17:14:24",s); break; case java.sql.Types.TIMESTAMP: assertEquals("2004-02-14 17:14:24.097625551",s); break; case java.sql.Types.CLOB: assertEquals("67",s); break; case java.sql.Types.BLOB: assertEquals("8243cafe0032",s); break; } assertFalse(wn); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } rs.close(); judge_getXXX(worked, sqleResult, 8, type); } { // getBytes() ResultSet rs = ps.executeQuery(); rs.next(); boolean worked; SQLException sqleResult = null; ; try { byte[] data = rs.getBytes(1); boolean wn = rs.wasNull(); if (isNull) { assertNull(data); assertTrue(wn); } else { int jdbcType = jdbcTypes[type]; switch (jdbcType) { case java.sql.Types.BINARY: case java.sql.Types.VARBINARY: case java.sql.Types.LONGVARBINARY: assertEquals("0x4,0x3", showFirstTwo(data)); break; case java.sql.Types.BLOB: assertEquals("0x82,0x43", showFirstTwo(data)); } assertNotNull(data); assertFalse(wn); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } rs.close(); judge_getXXX(worked, sqleResult, 9, type); } { // getDate() boolean worked; SQLException sqleResult = null; ; ResultSet rs = null; try { rs = ps.executeQuery(); rs.next(); Date d = rs.getDate(1); boolean wn = rs.wasNull(); if (isNull) { assertNull(d); assertTrue(wn); } else { assertEquals("2004-02-14", d.toString()); assertNotNull(d); assertFalse(wn); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; // 22007 invalid date time conversion worked = "22007".equals(sqle.getSQLState()); } catch (Throwable t) { // System.out.print(t.toString()); worked = false; } if (rs != null) rs.close(); judge_getXXX(worked, sqleResult, 10, type); } { boolean worked; SQLException sqleResult = null; ; ResultSet rs = null; try { // getTime() rs = ps.executeQuery(); rs.next(); Time t = rs.getTime(1); boolean wn = rs.wasNull(); if (isNull) { assertNull(t); assertTrue(wn); } else { assertFalse(wn); assertEquals("17:14:24", t.toString()); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; // 22007 invalid date time conversion worked = "22007".equals(sqle.getSQLState()); } catch (Throwable t) { // System.out.println(t); worked = false; } if (rs != null) rs.close(); judge_getXXX(worked, sqleResult, 11, type); } { boolean worked; SQLException sqleResult = null; ; ResultSet rs = null; try { // getTimestamp(); rs = ps.executeQuery(); rs.next(); Timestamp ts = rs.getTimestamp(1); boolean wn = rs.wasNull(); if (isNull) { assertNull(ts); assertTrue(wn); } else { if (type == java.sql.Types.DATE || type == java.sql.Types.TIMESTAMP) assertEquals("2004-02-14 00:00:00.0", ts.toString()); assertFalse(rs.wasNull()); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; // 22007 invalid date time conversion worked = "22007".equals(sqle.getSQLState()); } catch (Throwable t) { // System.out.println(t); worked = false; } if (rs != null) rs.close(); judge_getXXX(worked, sqleResult, 12, type); } { // getAsciiStream() ResultSet rs = ps.executeQuery(); rs.next(); boolean worked; SQLException sqleResult = null; ; try { InputStream is = rs.getAsciiStream(1); boolean wn = rs.wasNull(); if (isNull) { assertTrue(wn); assertNull(is); } else { assertFalse(wn); if (B6[13][type]) assertNotNull(showFirstTwo(is)); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } // getAsciiStream on a NULL value for an invalid conversion // is handled differently in JCC to Cloudscape. On a non-NULL // value an exception is correctly raised by both JCC and CS. // here we check this specific case to reduce canon differences // between CNS and CS. boolean judge = B6[13][type] || specificCheck(rs, worked, sqleResult, isNull); rs.close(); if (judge) judge_getXXX(worked, sqleResult, 13, type); } { // getBinaryStream() ResultSet rs = ps.executeQuery(); rs.next(); boolean worked; SQLException sqleResult = null; ; try { InputStream is = rs.getBinaryStream(1); if (isNull) { assertTrue(rs.wasNull()); assertNull(is); } else if (B6[14][type]) { assertNotNull(showFirstTwo(is)); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } boolean judge = B6[14][type] || specificCheck(rs, worked, sqleResult, isNull); rs.close(); if (judge) judge_getXXX(worked, sqleResult, 14, type); } { // getCharacterStream() ResultSet rs = ps.executeQuery(); rs.next(); boolean worked; SQLException sqleResult = null; ; try { Reader r = rs.getCharacterStream(1); boolean wn = rs.wasNull(); if (isNull) { assertNull(r); assertTrue(wn); } else if (B6[15][type]) { assertFalse(wn); assertNotNull(showFirstTwo(r)); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } boolean judge = B6[15][type] || specificCheck(rs, worked, sqleResult, isNull); rs.close(); if (judge) judge_getXXX(worked, sqleResult, 15, type); } { // getClob(); ResultSet rs = ps.executeQuery(); rs.next(); boolean worked; SQLException sqleResult = null; ; try { Clob clob = rs.getClob(1); boolean wn = rs.wasNull(); if (isNull) { assertNull(clob); assertTrue(wn); } else if (B6[16][type]) { assertFalse(wn); assertNotNull(clob.getSubString(1, 10)); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } boolean judge = B6[16][type] || specificCheck(rs, worked, sqleResult, isNull); rs.close(); if (judge) judge_getXXX(worked, sqleResult, 16, type); } { // getBlob() ResultSet rs = ps.executeQuery(); rs.next(); boolean worked; SQLException sqleResult = null; ; try { Blob blob = rs.getBlob(1); boolean wn = rs.wasNull(); if (isNull) { assertTrue(wn); assertNull(blob); } else if (B6[17][type]) { assertNotNull(showFirstTwo(blob.getBinaryStream())); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } boolean judge = B6[17][type] || specificCheck(rs, worked, sqleResult, isNull); rs.close(); if (judge) judge_getXXX(worked, sqleResult, 17, type); } { // getUnicodeStream() ResultSet rs = ps.executeQuery(); rs.next(); boolean worked; SQLException sqleResult = null; ; try { InputStream is = rs.getUnicodeStream(1); boolean wn = rs.wasNull(); if (isNull) { assertTrue(wn); assertNull(is); } else { assertFalse(wn); assertNotNull(is); } worked = true; } catch (NoSuchMethodError e) { worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } rs.close(); if (JDBC.vmSupportsJDBC2()) judge_getXXX(worked, sqleResult, 18, type); } // Check to see getObject returns the correct type { // getObject(); ResultSet rs = ps.executeQuery(); rs.next(); SQLException sqleResult = null; ; try { boolean worked; if (!SQLTypes[type].equals("DECIMAL(10,5)") || HAVE_BIG_DECIMAL) { Object o = rs.getObject(1); boolean wn = rs.wasNull(); Class cgo = B3_GET_OBJECT[type]; String cname; if (cgo.equals(byte[].class)) cname = "byte[]"; else cname = cgo.getName(); if (isNull) { assertTrue(wn); assertNull(o); worked = true; } else if (cgo.isInstance(o)) { worked = true; } else { worked = false; fail("FAIL NOT :" + cgo.getName() + " is " + o.getClass().getName()); } } else { // "ResultSet.getObject not called for DECIMAL type for // JSR169"; worked = true; } assertTrue(worked); } catch (SQLException sqle) { sqleResult = sqle; } rs.close(); } } private static boolean specificCheck(ResultSet rs, boolean worked, SQLException sqleResult, boolean isNull) throws SQLException { boolean judge = true; if (worked && isNull && rs.wasNull()) { // JCC returns NULL if (usingDerbyNetClient()) judge = false; } else if (!worked && isNull) { if (usingDerbyNetClient() && "22005".equals(sqleResult.getSQLState())) judge = false; } return judge; } private static void judge_getXXX(boolean worked, SQLException sqleResult, int whichCall, int type) { boolean validSQLState = false; // verify valid conversion worked if (B6[whichCall][type] && !worked) fail(" JDBC FAIL " + SQLTypes[type] + " " + sqleResult); else if (!worked) { // make sure not implemented or conversion error was thrown if it // didn't work String sqlState = sqleResult.getSQLState(); if ("0A000".equals(sqlState)) validSQLState = true; if ("0A000".equals(sqlState)) validSQLState = true; if ("22005".equals(sqlState)) // embedded invalid conversion error validSQLState = true; else if (sqlState == null) { // client invalid conversion error if (sqleResult.getMessage().indexOf( "Wrong result column type for requested conversion") != -1) validSQLState = true; } assertTrue("FAIL: Expected conversion error but got " + sqleResult, validSQLState); } } private static void judge_setXXX(boolean worked, SQLException sqleResult, int whichCall, int type) { String msg; if (worked && B2_MOD[whichCall][type]) msg = " JDBC MATCH(OK)"; else if (worked) msg = " CLOUD EXT (OK)"; else if (sqleResult != null && "0A000".equals(sqleResult.getSQLState())) msg = " Not Implemented (OK)"; else if (B2_MOD[whichCall][type]) { if (sqleResult != null) showException(sqleResult); msg = " JDBC FAIL " + SQLTypes[type]; } else { msg = checkForInvalidConversion(sqleResult); if (msg == null) return; } if (msg.startsWith("JDBC FAIL")) fail(" JDBC FAIL " + SQLTypes[type]); } private static void judge_setObject(boolean worked, SQLException sqleResult, int b5o, int type) { String msg; if (worked && B5[b5o][type]) msg = " JDBC MATCH(OK)"; else if (worked) msg = " CLOUD EXT (OK)"; else if ("0A000".equals(sqleResult.getSQLState())) msg = " Not Implemented (OK)"; else if (B5[b5o][type]) { if (sqleResult != null) showException(sqleResult); msg = " JDBC FAIL " + SQLTypes[type]; } else { msg = checkForInvalidConversion(sqleResult); if (msg == null) return; } if (msg.startsWith("JDBC FAIL")) fail(" JDBC FAIL " + SQLTypes[type]); } /** * Look for an "Invalid Conversion" exception and format it for display. * * Look for an "Invalid Conversion" exception. If one is found, print "IC". * If one is not found, dump the actual exception to the output instead. * * Note that the actual invalid conversion exception may be wrapped inside a * BatchUpdateException, so we may need to hunt through the exception chain * to find it. */ private static String checkForInvalidConversion(SQLException sqle) { if (sqle == null) return null; boolean unknownException = true; SQLException e = sqle; while (e != null && unknownException == true) { // XCL12 is temp if ("22005".equals(e.getSQLState()) || "XCL12".equals(e.getSQLState()) || e.getMessage().indexOf("Illegal Conv") != -1) { unknownException = false; if ("0A000".equals(e.getSQLState()) && e.getMessage().indexOf("setUnicodeStream") != -1) unknownException = false; // System.out.print("IC"); break; } e = e.getNextException(); } if (unknownException) showException(sqle); return " JDBC MATCH (INVALID)"; } private static void setXXX(Statement s, PreparedStatement psi, PreparedStatement psq, int type) throws SQLException, java.io.IOException { { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setByte() psi.setByte(1, (byte) 98); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setByte"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 0, type); } // and as a batch { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { psi.setByte(1, (byte) 98); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setByte"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 0, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setShort() psi.setShort(1, (short) 98); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setShort"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 1, type); } // and as a batch { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setShort() as batch psi.setShort(1, (short) 98); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setShort"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 1, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setInt() psi.setInt(1, 98); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setInt"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 2, type); } // and as a batch { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setInt() as batch psi.setInt(1, 98); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setInt"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 2, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setLong() psi.setLong(1, 98L); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setLong"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 3, type); } // as a batch { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setLong() as batch psi.setLong(1, 98L); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setLong"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 3, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setFloat() psi.setFloat(1, 98.4f); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setFloat"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 4, type); } // and as a batch { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setFloat() as batch psi.setFloat(1, 98.4f); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setFloat"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 4, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setDouble() psi.setDouble(1, 98.5); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setDouble"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 5, type); } // as a batch { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setDouble() as batch psi.setDouble(1, 98.5); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setDouble"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 5, type); } if (HAVE_BIG_DECIMAL) { { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setBigDecimal() psi.setBigDecimal(1, new BigDecimal(98.0)); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setBigDecimal"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 6, type); } // as a batch { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setBigDecimal() as batch psi.setBigDecimal(1, new BigDecimal(98.0)); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setBigDecimal"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 6, type); } // null BigDecimal { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setBigDecimal(null) psi.setBigDecimal(1, null); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setBigDecimal"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 6, type); } // null BigDecimal { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setBigDecimal(null) as batch psi.setBigDecimal(1, null); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setBigDecimal"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 6, type); } } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setBoolean() psi.setBoolean(1, true); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setBoolean"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 7, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setBoolean() as batch psi.setBoolean(1, true); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setBoolean"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 7, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { psi.setString(1,validString[type]); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setString"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } catch (Throwable t) { // JCC has some bugs // System.out.println(t.getMessage()); worked = false; sqleResult = null; } judge_setXXX(worked, sqleResult, 8, type); } // as batch { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setString() as batch psi.setString(1,validString[type]); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setString"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } catch (Throwable t) { // JCC has some bugs // System.out.println(t.getMessage()); worked = false; sqleResult = null; } judge_setXXX(worked, sqleResult, 8, type); } // null String { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setString(null) psi.setString(1, null); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setString"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } catch (Throwable t) { // JCC has some bugs // System.out.println(t.getMessage()); worked = false; sqleResult = null; } judge_setXXX(worked, sqleResult, 8, type); } // null String as batch { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setString(null) as batch psi.setString(1, null); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setString"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } catch (Throwable t) { // JCC has some bugs // System.out.println(t.getMessage()); worked = false; sqleResult = null; } judge_setXXX(worked, sqleResult, 8, type); } { s.execute("DELETE FROM PM.TYPE_AS"); // Set Invalid String for nonString types (DERBY-149) testSetStringInvalidValue(type, psi); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setBytes() byte[] data = { (byte) 0x04, (byte) 0x03, (byte) 0xfd, (byte) 0xc3, (byte) 0x73 }; psi.setBytes(1, data); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setBytes"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 9, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setBytes() as batch byte[] data = { (byte) 0x04, (byte) 0x03, (byte) 0xfd, (byte) 0xc3, (byte) 0x73 }; psi.setBytes(1, data); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setBytes"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 9, type); } // null byte[] { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setBytes(null) psi.setBytes(1, null); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setBytes"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 9, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setBytes(null) as batch psi.setBytes(1, null); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setBytes"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 9, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setDate() psi.setDate(1, java.sql.Date.valueOf("2004-02-14")); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setDate"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 10, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setDate() as batch psi.setDate(1, java.sql.Date.valueOf("2004-02-14")); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setDate"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 10, type); } // null Date { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setDate(null) psi.setDate(1, null); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setDate"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 10, type); } // null Date { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setDate(null) as batch psi.setDate(1, null); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setDate"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 10, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setTime() psi.setTime(1, java.sql.Time.valueOf("00:00:00")); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setTime"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 11, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setTime() as batch psi.setTime(1, java.sql.Time.valueOf("00:00:00")); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setTime"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 11, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setTime(null) psi.setTime(1, null); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setTime"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 11, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setTime(null) as batch psi.setTime(1, null); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setTime"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 11, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setTimestamp() psi.setTimestamp(1, java.sql.Timestamp .valueOf("2004-02-14 00:00:00.0")); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setTimestamp"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 12, type); } // as batch { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setTimestamp() as batch psi.setTimestamp(1, java.sql.Timestamp .valueOf("2004-02-14 00:00:00.0")); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setTimestamp"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 12, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setTimestamp(null) psi.setTimestamp(1, null); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setTimestamp"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 12, type); } // as batch { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setTimestamp(null) as batch psi.setTimestamp(1, null); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setTimestamp"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 12, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setAsciiStream() byte[] data = new byte[6]; data[0] = (byte) 0x65; data[1] = (byte) 0x67; data[2] = (byte) 0x30; data[3] = (byte) 0x31; data[4] = (byte) 0x32; data[5] = (byte) 0x64; psi .setAsciiStream(1, new java.io.ByteArrayInputStream( data), 6); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setAsciiStream"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 13, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setAsciiStream() as batch byte[] data = new byte[6]; data[0] = (byte) 0x65; data[1] = (byte) 0x67; data[2] = (byte) 0x30; data[3] = (byte) 0x31; data[4] = (byte) 0x32; data[5] = (byte) 0x64; psi .setAsciiStream(1, new java.io.ByteArrayInputStream( data), 6); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setAsciiStream"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 13, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setAsciiStream(null) psi.setAsciiStream(1, null, 0); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setAsciiStream"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 13, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setAsciiStream(null) as batch psi.setAsciiStream(1, null, 0); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setAsciiStream"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 13, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setBinaryStream() byte[] data = new byte[6]; data[0] = (byte) 0x4; data[1] = (byte) 0x3; data[2] = (byte) 0xca; data[3] = (byte) 0xfe; data[4] = (byte) 0x00; data[5] = (byte) 0x32; psi.setBinaryStream(1, new java.io.ByteArrayInputStream(data), 6); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setBinaryStream"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 14, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setBinaryStream() as batch byte[] data = new byte[6]; data[0] = (byte) 0x4; data[1] = (byte) 0x3; data[2] = (byte) 0xca; data[3] = (byte) 0xfe; data[4] = (byte) 0x00; data[5] = (byte) 0x32; psi.setBinaryStream(1, new java.io.ByteArrayInputStream(data), 6); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "getBinaryStream"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 14, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setBinaryStream(null) psi.setBinaryStream(1, null, 0); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setBinaryStream"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 14, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setBinaryStream(null) as batch psi.setBinaryStream(1, null, 0); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setBinaryStream"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 14, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setCharacterStream() psi.setCharacterStream(1, new java.io.StringReader("89"), 2); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setCharacterStream"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 15, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setCharacterStream() as batch psi.setCharacterStream(1, new java.io.StringReader("89"), 2); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setCharacterStream"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 15, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setCharacterStream(null) psi.setCharacterStream(1, null, 0); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setCharacterStream"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 15, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setCharacterStream(null) as batch psi.setCharacterStream(1, null, 0); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setCharacterStream"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 15, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setClob() ResultSet rsc = s .executeQuery("SELECT C FROM PM.LOB_GET WHERE ID = 1"); rsc.next(); Clob tester = rsc.getClob(1); rsc.close(); psi.setClob(1, tester); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setClob"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 16, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setClob() as batch ResultSet rsc = s .executeQuery("SELECT C FROM PM.LOB_GET WHERE ID = 1"); rsc.next(); Clob tester = rsc.getClob(1); rsc.close(); psi.setClob(1, tester); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setClob"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 16, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setClob(null) psi.setClob(1, null); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setClob"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 16, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setClob(null) as batch psi.setClob(1, null); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setClob"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 16, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setBlob() ResultSet rsc = s .executeQuery("SELECT B FROM PM.LOB_GET WHERE ID = 1"); rsc.next(); Blob tester = rsc.getBlob(1); rsc.close(); psi.setBlob(1, tester); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setBlob"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 17, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setBlob() as batch ResultSet rsc = s .executeQuery("SELECT B FROM PM.LOB_GET WHERE ID = 1"); rsc.next(); Blob tester = rsc.getBlob(1); rsc.close(); psi.setBlob(1, tester); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setBlob"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 17, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // Blob(null) psi.setBlob(1, null); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setBlob"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 17, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setBlob(null) as batch psi.setBlob(1, null); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setBlob"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 17, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setUnicodeStream() byte[] data = new byte[6]; data[0] = (byte) 0x4; data[1] = (byte) 0x3; data[2] = (byte) 0xca; data[3] = (byte) 0xfe; data[4] = (byte) 0x00; data[5] = (byte) 0x32; try { psi.setUnicodeStream(1, new java.io.ByteArrayInputStream( data), 6); } catch (NoSuchMethodError e) { // ResultSet.setUnicodeStream not present - correct for // JSR169 } if (JDBC.vmSupportsJDBC2()) { psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setUnicodeStream"); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } if (JDBC.vmSupportsJDBC2()) judge_setXXX(worked, sqleResult, 14, type); } // setObject(null) { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // should never work! // setObject(null) psi.setObject(1, null); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setObject"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } if (worked) fail("FAIL: setObject(null) not valid"); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // should never work! // setObject(null) as batch psi.setObject(1, null); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setObject"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } if (worked) fail("FAIL: setObject(1,null) did not throw exception"); } setXXX_setObject(s, psi, psq, type, validString[type], "java.lang.String", 0); if (HAVE_BIG_DECIMAL) setXXX_setObject(s, psi, psq, type, BigDecimal.valueOf(98L), "java.math.BigDecimal", 1); setXXX_setObject(s, psi, psq, type, Boolean.TRUE, "java.lang.Boolean", 2); // DERBY-1500: setObject() should work for Byte and Short too. setXXX_setObject(s, psi, psq, type, new Byte((byte) 98), "java.lang.Byte", 1); setXXX_setObject(s, psi, psq, type, new Short((short) 98), "java.lang.Short", 2); setXXX_setObject(s, psi, psq, type, new Integer(98), "java.lang.Integer", 3); setXXX_setObject(s, psi, psq, type, new Long(98), "java.lang.Long", 4); setXXX_setObject(s, psi, psq, type, new Float(98.0f), "java.lang.Float", 5); setXXX_setObject(s, psi, psq, type, new Double(98.0d), "java.lang.Double", 6); { byte[] data = { 0x4, 0x3 }; setXXX_setObject(s, psi, psq, type, data, "byte[]", 7); } setXXX_setObject(s, psi, psq, type, java.sql.Date.valueOf("2004-02-14"), "java.sql.Date", 8); setXXX_setObject(s, psi, psq, type, java.sql.Time.valueOf("00:00:00"), "java.sql.Time", 9); setXXX_setObject(s, psi, psq, type, java.sql.Timestamp .valueOf("2004-02-14 00:00:00.0"), "java.sql.Timestamp", 10); s.getConnection().commit(); if (!usingDerbyNetClient()) { { ResultSet rsc = s .executeQuery("SELECT B FROM PM.LOB_GET WHERE ID = 1"); rsc.next(); Blob tester = rsc.getBlob(1); rsc.close(); setXXX_setObject(s, psi, psq, type, tester, "java.sql.Blob", 11); } { ResultSet rsc = s .executeQuery("SELECT C FROM PM.LOB_GET WHERE ID = 1"); rsc.next(); Clob tester = rsc.getClob(1); rsc.close(); setXXX_setObject(s, psi, psq, type, tester, "java.sql.Clob", 12); } } } private static void setXXX_setObject(Statement s, PreparedStatement psi, PreparedStatement psq, int type, Object value, String className, int b5o) throws SQLException, java.io.IOException { { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setObject(" + className + ") psi.setObject(1, value); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setObject(" + className + ")"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setObject(worked, sqleResult, b5o, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setObject(" + className + ") as batch psi.setObject(1, value); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setObject(" + className + ")"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } catch (Throwable t) { fail("FAIL " + t.getMessage()); return; } judge_setObject(worked, sqleResult, b5o, type); } } private static void unexpectedException(SQLException sqle) { fail("FAIL unexpected exception - "); showException(sqle); sqle.printStackTrace(System.out); } private static void showException(SQLException sqle) { do { String state = sqle.getSQLState(); if (state == null) state = "?????"; String msg = sqle.getMessage(); if (msg == null) msg = "?? no message ??"; sqle.printStackTrace(); fail(" (" + state + "):" + msg); sqle = sqle.getNextException(); } while (sqle != null); } private static boolean setValidValue(PreparedStatement ps, int param, int jdbcType) throws SQLException { switch (jdbcType) { case Types.BIT: ps.setBoolean(param, true); return true; case Types.TINYINT: ps.setByte(param, (byte) 32); return true; case Types.SMALLINT: ps.setShort(param, (short) 32); return true; case Types.INTEGER: ps.setInt(param, 32); return true; case Types.BIGINT: ps.setLong(param, 32L); return true; case Types.REAL: ps.setFloat(param, 32.0f); return true; case Types.FLOAT: case Types.DOUBLE: ps.setDouble(param, 32.0); return true; case Types.DECIMAL: BigDecimalHandler.setBigDecimalString(ps, param, "32.0"); return true; case Types.CHAR: case Types.VARCHAR: case Types.LONGVARCHAR: ps.setString(param, "32"); return true; case Types.BINARY: case Types.VARBINARY: { byte[] data = { (byte) 0x04, (byte) 0x03, (byte) 0xfd, (byte) 0xc3, (byte) 0x73 }; ps.setBytes(param, data); return true; } // Types.LONGVARBINARY: case Types.DATE: ps.setDate(param, java.sql.Date.valueOf("2004-02-14")); return true; case Types.TIME: ps.setTime(param, java.sql.Time.valueOf("17:14:24")); return true; case Types.TIMESTAMP: ps.setTimestamp(param, java.sql.Timestamp .valueOf("2004-02-14 17:14:24.097625551")); return true; case Types.CLOB: // JDBC 3.0 spec section 16.3.2 explictly states setCharacterStream // is OK for setting a CLOB ps.setCharacterStream(param, new java.io.StringReader("67"), 2); return true; case Types.BLOB: // JDBC 3.0 spec section 16.3.2 explictly states setBinaryStream is // OK for setting a BLOB { byte[] data = new byte[6]; data[0] = (byte) 0x82; data[1] = (byte) 0x43; data[2] = (byte) 0xca; data[3] = (byte) 0xfe; data[4] = (byte) 0x00; data[5] = (byte) 0x32; ps .setBinaryStream(param, new java.io.ByteArrayInputStream( data), 6); return true; } default: return false; } } private static boolean getValidValue(PreparedStatement ps, int jdbcType, String method) throws SQLException, IOException { ResultSet rs = ps.executeQuery(); rs.next(); switch (jdbcType) { case Types.SMALLINT: { short val = rs.getShort(1); boolean wn = rs.wasNull(); if (wn) assertEquals(0, val); else if (isBooleanMethod(method)) assertEquals(1, val); else assertEquals(98, val); return true; } case Types.INTEGER: { int val = rs.getInt(1); boolean wn = rs.wasNull(); if (wn) assertEquals(0, val); else if (isBooleanMethod(method)) assertEquals(1, val); else assertEquals(98, val); return true; } case Types.BIGINT: { long val = rs.getLong(1); boolean wn = rs.wasNull(); if (wn) assertEquals(0, val); else if (isBooleanMethod(method)) assertEquals(1, val); else assertEquals(98, val); return true; } case Types.REAL: { float val = rs.getFloat(1); boolean wn = rs.wasNull(); if (wn) assertEquals(0.0, val, .001); else if (isBooleanMethod(method)) assertEquals(1.0, val, .001); else if (method.equals("setFloat")) assertEquals(98.4, val, .001); else if (method.equals("setDouble")) assertEquals(98.5, val, .001); else assertEquals(98.0, val, .001); return true; } case Types.FLOAT: case Types.DOUBLE: { double val = rs.getDouble(1); boolean wn = rs.wasNull(); if (wn) assertEquals(0.0, val, .001); else if (isBooleanMethod(method)) assertEquals(1.0, val, .001); else if (method.equals("setFloat")) assertEquals(98.4, val, .001); else if (method.equals("setDouble")) assertEquals(98.5, val, .001); else assertEquals(98.0, val, .001); return true; } case Types.DECIMAL: { String val = BigDecimalHandler.getBigDecimalString(rs, 1); boolean wn = rs.wasNull(); if (wn) assertNull(val); else if (isBooleanMethod(method)) assertEquals("1.00000", val); else if (method.equals("setFloat")) assertEquals("98.40000", val); else if (method.equals("setDouble")) assertEquals("98.50000", val); else assertEquals("98.00000", val); return true; } case Types.CHAR: case Types.VARCHAR: case Types.LONGVARCHAR: { String s = rs.getString(1); boolean wn = rs.wasNull(); if (wn) assertNull(s); else { // With IBM's DB2 universal driver. // Setting a java.sql.Clob value works with // a character column but sets the value to // be the object's toString. This is probably a bug with JCC. if (s.startsWith("com.ibm.db2.jcc.") || s.startsWith("org.apache.derby.client")) s = "<OBJECT.toString()>"; boolean hasNonAscii = false; // check for any characters in the control range for (int si = 0; si < s.length(); si++) { char c = s.charAt(si); if (c < (char) 0x20 || c >= (char) 0x7f) { hasNonAscii = true; break; } } if (hasNonAscii) { StringBuffer sb = new StringBuffer(); sb.append("EncodedString: >"); for (int si = 0; si < s.length(); si++) { sb.append(' '); sb.append((int) s.charAt(si)); } sb.append(" <"); s = sb.toString(); } checkValidStringValue(method, s); } return true; } case Types.BINARY: case Types.VARBINARY: { byte[] data = rs.getBytes(1); boolean wn = rs.wasNull(); if (wn) assertNull(data); else assertEquals("0x4,0x3", showFirstTwo(data)); return true; } case Types.LONGVARBINARY: { InputStream is = rs.getBinaryStream(1); boolean wn = rs.wasNull(); if (wn) assertNull(is); else assertEquals("0x4,0x3", showFirstTwo(is)); return true; } case Types.DATE: { Date d = rs.getDate(1); boolean wn = rs.wasNull(); if (wn) assertNull(d); else assertEquals(Date.valueOf("2004-02-14"), d); return true; } case Types.TIME: { Time t = rs.getTime(1); boolean wn = rs.wasNull(); if (wn) assertNull(t); else assertEquals(Time.valueOf("00:00:00"), t); return true; } case Types.TIMESTAMP: { Timestamp ts = rs.getTimestamp(1); boolean wn = rs.wasNull(); if (wn) assertNull(rs.getTimestamp(1)); else assertEquals(Timestamp.valueOf("2004-02-14 00:00:00.0"), ts); return true; } case Types.CLOB: { Clob clob = rs.getClob(1); boolean wn = rs.wasNull(); if (wn) assertNull(clob); else { char[] charray = new char[20]; int numchar = clob.getCharacterStream().read(charray); String s = new String(charray,0,numchar); if ("setString".equals(method)) assertEquals("98",s); else if ("setAsciiStream".equals(method)) assertEquals("eg012d", s); else if ("setCharacterStream".equals(method)) assertEquals("89",s); else if ("setClob".equals(method)) assertEquals("72",s); else if ("setObject(java.lang.String)".equals(method)) assertEquals("98",s); else if ("setObject(java.lang.Clob)".equals(method)) assertEquals("72",s); } return true; } case Types.BLOB: { Blob blob = rs.getBlob(1); boolean wn = rs.wasNull(); if (wn) assertNull(blob); else { assertEquals("0x4,0x3", showFirstTwo(blob.getBinaryStream())); } return true; } default: fail("FAIL JDBC TYPE IN getValidValue " + JDBC.sqlNameFromJdbc(jdbcType)); return false; } } private static void checkValidStringValue(String method, String s) { s = s.trim(); if ("setBoolean".equals(method) || "setObject(java.lang.Boolean)".equals(method) ) assertEquals("1",s); else if ("setBytes".equals(method) || ("setObject(byte[])".equals(method))) assertEquals("EncodedString: > 1027 ",s.substring(0,22)); else if ("setFloat".equals(method)) assertEquals("98.4", s); else if ("setDouble".equals(method)) assertEquals("98.5",s); else if ("setDate".equals(method) || "setObject(java.sql.Date)".equals(method)) assertEquals("2004-02-14", s); else if ("setTime".equals(method) || "setObject(java.sql.Time)".equals(method)) assertEquals("00:00:00",s); else if ("setTimestamp".equals(method)|| "setObject(java.sql.Timestamp)".equals(method)) assertEquals("2004-02-14 00:00:00.0",s); else if ("setAsciiStream".equals(method)) assertEquals("eg012d",s); else if ("setCharacterStream".equals(method)) assertEquals("89",s); else if ("setObject(java.lang.Float)".equals(method) || "setObject(java.lang.Double)".equals(method)) assertEquals("98.0",s); else assertEquals("98",s.trim()); } private static boolean isBooleanMethod(String method) { return method.equals("setBoolean") || method.equals("setObject(java.lang.Boolean)"); } private static boolean getOutValue(CallableStatement cs, int param, int regJdbcType, int paramType) throws SQLException, IOException { int paramJdbcType= jdbcTypes[paramType]; switch (regJdbcType) { case Types.BIT: { boolean val = cs.getBoolean(param); boolean wn = cs.wasNull(); if (!wn) assertTrue(val); return true; } case Types.TINYINT: { // Check out and inout params for procedures byte val = cs.getByte(param); boolean wn = cs.wasNull(); if (!wn) checkProcedureOutput(param, paramType, val); return true; } case Types.SMALLINT: { short val = cs.getShort(param); boolean wn = cs.wasNull(); if (!wn) checkProcedureOutput(param, paramType, val); return true; } case Types.INTEGER: { int val = cs.getInt(param); boolean wn = cs.wasNull(); if (!wn) checkProcedureOutput(param, paramType, val); return true; } case Types.BIGINT: { long val = cs.getLong(param); boolean wn = cs.wasNull(); if(!wn) checkProcedureOutput(param, paramType, val); return true; } case Types.REAL: { float val = cs.getFloat(param); boolean wn = cs.wasNull(); if(!wn) checkProcedureOutput(param, paramType, val); return true; } case Types.FLOAT: case Types.DOUBLE: { double val = cs.getDouble(param); boolean wn = cs.wasNull(); if (!wn) checkProcedureOutput(param, paramType, val); return true; } case Types.DECIMAL: { String val = BigDecimalHandler.getBigDecimalString(cs, param, regJdbcType); boolean wn = cs.wasNull(); if (!wn) checkProcedureOutput(param,paramType,val); return true; } case Types.CHAR: case Types.VARCHAR: case Types.LONGVARCHAR: { String val = cs.getString(param); boolean wn = cs.wasNull(); if (!wn) checkProcedureOutput(param,paramType,val.trim()); return true; } case Types.BINARY: case Types.VARBINARY: case Types.LONGVARBINARY: { byte[] data = cs.getBytes(param); boolean wn = cs.wasNull(); if (!wn) checkProcedureOutput(param,paramType,data); return true; } case Types.DATE: { Date val = cs.getDate(param); boolean wn = cs.wasNull(); if (!wn) checkProcedureOutput(param,paramType,val); return true; } case Types.TIME: { Time val = cs.getTime(param); boolean wn = cs.wasNull(); if (!wn) checkProcedureOutput(param,paramType,val); return true; } case Types.TIMESTAMP: { Timestamp val = cs.getTimestamp(param); boolean wn = cs.wasNull(); if (!wn) checkProcedureOutput(param,paramType,val); return true; } case Types.CLOB: { // clob not allowed for procedures Clob clob = cs.getClob(param); boolean wn = cs.wasNull(); return true; } case Types.BLOB: { // blob not allowed for procedures Blob blob = cs.getBlob(param); boolean wn = cs.wasNull(); return true; } default: fail("FAIL JDBC TYPE IN getOutValue " + JDBC.sqlNameFromJdbc(regJdbcType)); return false; } } private static void checkProcedureOutput(int param, int paramType, byte val) { checkProcedureOutput(param,paramType,(long) val); } private static void checkProcedureOutput(int param, int paramType, short val) { checkProcedureOutput(param,paramType,(long) val); } private static void checkProcedureOutput(int param, int paramType, int val) { checkProcedureOutput(param,paramType,(long) val); } private static void checkProcedureOutput(int param, int paramType, long val) { switch (jdbcTypes[paramType]) { case java.sql.Types.SMALLINT: if (param == 2) assertEquals(38,val); else if (param == 3) assertEquals(77,val); break; case java.sql.Types.INTEGER: if (param == 2) assertEquals(41,val); else if (param == 3) assertEquals(88,val); break; case java.sql.Types.BIGINT: if (param == 2) assertEquals(40,val); else if (param == 3) assertEquals(99,val); break; case java.sql.Types.FLOAT: if (param == 2) assertEquals(35,val); else if (param == 3) assertEquals(66,val); break; case java.sql.Types.REAL: if (param == 2) assertEquals(41,val); else if (param == 3) assertEquals(88,val); break; case java.sql.Types.DECIMAL: if (param == 2) assertEquals(34,val); else if (param == 3) assertEquals(84,val); break; case java.sql.Types.DOUBLE: if (param == 2) assertEquals(35,val); else if (param == 3) assertEquals(66,val); break; } } private static void checkProcedureOutput(int param, int paramType, float val) { checkProcedureOutput(param,paramType, (double) val); } private static void checkProcedureOutput(int param, int paramType, double val) { switch (jdbcTypes[paramType]) { case java.sql.Types.SMALLINT: if (param == 2) assertEquals(38.0,val,.00001); else if (param == 3) assertEquals(77.0,val,.00001); break; case java.sql.Types.INTEGER: if (param == 2) assertEquals(41.0,val,.00001); else if (param == 3) assertEquals(88.0,val, .00001); break; case java.sql.Types.BIGINT: if (param == 2) assertEquals(40.0,val,.00001); else if (param == 3) assertEquals(99.0,val,.00001); break; case java.sql.Types.FLOAT: if (param == 2) assertEquals(35.9,val,.00001); else if (param == 3) assertEquals(66.8,val,.00001); break; case java.sql.Types.REAL: if (param == 2) assertEquals(41.9,val,.00001); else if (param == 3) assertEquals(88.8,val,.00001); break; case java.sql.Types.DECIMAL: if (param == 2) assertEquals(34.29999,val,.0001); else if (param == 3) assertEquals(84.09999,val,.0001); break; case java.sql.Types.DOUBLE: if (param == 2) assertEquals(35.9,val,.00001); else if (param == 3) assertEquals(66.8,val,.00001); break; } } private static void checkProcedureOutput(int param, int paramType, String val) { switch (jdbcTypes[paramType]) { case java.sql.Types.SMALLINT: if (param == 2) assertEquals("38",val); else if (param == 3) assertEquals("77",val); break; case java.sql.Types.INTEGER: if (param == 2) assertEquals("41",val); else if (param == 3) assertEquals("88",val); break; case java.sql.Types.BIGINT: if (param == 2) assertEquals("40",val); else if (param == 3) assertEquals("99",val); break; case java.sql.Types.FLOAT: if (param == 2) assertEquals("35.9",val); else if (param == 3) assertEquals("66.8",val); break; case java.sql.Types.REAL: if (param == 2) assertEquals("41.9",val); else if (param == 3) assertEquals("88.8",val); break; case java.sql.Types.DECIMAL: if (param == 2) assertEquals("34.29999",val); else if (param == 3) assertEquals("84.09999",val); break; case java.sql.Types.DOUBLE: if (param == 2) assertEquals("35.9",val); else if (param == 3) assertEquals("66.8",val); break; } } private static void checkProcedureOutput(int param, int paramType, byte[] val) { if (param == 2) assertEquals("0x4,0x3",showFirstTwo(val)); else if (param == 3) assertEquals("0x9,0xfe",showFirstTwo(val)); } private static void checkProcedureOutput(int param, int paramType, Date val) { switch (jdbcTypes[paramType]) { case java.sql.Types.DATE: if (param == 2) assertEquals("2004-03-08", val.toString()); else if (param == 3) assertEquals("2005-03-08", val.toString()); break; case java.sql.Types.TIMESTAMP: if (param == 2) assertEquals("2004-03-12", val.toString()); else if (param == 3) assertEquals("2004-04-12", val.toString()); break; } } private static void checkProcedureOutput(int param, int paramType, Time val) { switch (jdbcTypes[paramType]) { case java.sql.Types.TIME: if (param == 2) assertEquals("19:44:42", val.toString()); else if (param == 3) assertEquals("20:44:42", val.toString()); break; case java.sql.Types.TIMESTAMP: if (param == 2) assertEquals("21:14:24", val.toString()); else if (param == 3) assertEquals("04:25:26", val.toString()); break; } } private static void checkProcedureOutput(int param, int paramType, Timestamp val) { switch (jdbcTypes[paramType]) { case java.sql.Types.DATE: if (param == 2) assertEquals("2004-03-08 00:00:00.0",val.toString()); else if (param == 3) assertEquals("2005-03-08 00:00:00.0", val.toString()); break; case java.sql.Types.TIME: // getTimestamp on time will use the current date, so can't check it explicitly // just check not null assertNotNull(val); break; case java.sql.Types.TIMESTAMP: if (param == 2) assertEquals("2004-03-12 21:14:24.938222433", val.toString()); else if (param == 3) assertEquals("2004-04-12 04:25:26.462983731", val.toString()); break; } } static void dumpSQLExceptions(SQLException se) { while (se != null) { System.out.println("SQLSTATE(" + se.getSQLState() + "): " + se.toString()); se = se.getNextException(); } } /** * Test for DERBY-149 fix Check that setString to an invalid value throws an * exception rather than causing a hang * * @param type * type for SQLTypes array * @param psi - * insert prepared statement. * */ private static void testSetStringInvalidValue(int type, PreparedStatement psi) { // Do not perform this test for string types. // Only test for types wich will fail with setString("InvalidValue"); switch (jdbcTypes[type]) { case Types.CHAR: case Types.VARCHAR: case Types.LONGVARCHAR: case Types.CLOB: return; } String sqlType = SQLTypes[type]; try { psi.setString(1, "Invalid Value"); psi.executeUpdate(); // Should have gotten exception. Test fails String error = "FAIL - setString(1,\"Invalld Value\") for type " + sqlType + " did not throw an exception as expected"; } catch (SQLException sqle) { if ("22018".equals(sqle.getSQLState()) || "XCL12".equals(sqle.getSQLState()) || "22007".equals(sqle.getSQLState()) || "22005".equals(sqle.getSQLState()) || (sqle.getMessage().indexOf("Invalid data conversion") != -1) || (sqle.getMessage().indexOf("Illegal Conversion") != -1)) ; // System.out.println(" IC (Expected)"); else fail("FAIL:" + sqle.getMessage()); } catch (Exception e) { fail("FAIL: Unexpected Exception " + e.getMessage()); } } private static String showFirstTwo(java.io.Reader in) throws java.io.IOException { int b1 = in.read(); int b2 = in.read(); in.close(); return "0x" + Integer.toHexString(b1) + "," + "0x" + Integer.toHexString(b2); } private static String showFirstTwo(java.io.InputStream in) throws java.io.IOException { int b1 = in.read(); int b2 = in.read(); in.close(); return "0x" + Integer.toHexString(b1) + "," + "0x" + Integer.toHexString(b2); } private static String showFirstTwo(byte[] data) { int b1 = data[0]; int b2 = data[1]; return "0x" + Integer.toHexString(((int) b1) & 0xff) + "," + "0x" + Integer.toHexString(((int) b2) & 0xff); } public static Test suite() { // Can't run for client for now, getting strange protocol error on tearDown // DERBY-2381 //return TestConfiguration.defaultSuite(ParameterMappingTest.class); // Don't run for JSR169 until DERBY-2403 is resolved. if (JDBC.vmSupportsJDBC2()) return TestConfiguration.embeddedSuite(ParameterMappingTest.class); else return new TestSuite("ParameterMapping"); } }
java/testing/org/apache/derbyTesting/functionTests/tests/jdbcapi/ParameterMappingTest.java
/** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.derbyTesting.functionTests.tests.jdbcapi; import java.io.IOException; import java.io.InputStream; import java.io.Reader; import java.math.BigDecimal; import java.sql.Blob; import java.sql.CallableStatement; import java.sql.Clob; import java.sql.Connection; import java.sql.Date; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Statement; import java.sql.Time; import java.sql.Timestamp; import java.sql.Types; import junit.framework.Test; import org.apache.derbyTesting.junit.BigDecimalHandler; import org.apache.derbyTesting.junit.BaseJDBCTestCase; import org.apache.derbyTesting.junit.JDBC; import org.apache.derbyTesting.junit.TestConfiguration; /** * */ public class ParameterMappingTest extends BaseJDBCTestCase { private static boolean HAVE_BIG_DECIMAL; static { if (JDBC.vmSupportsJSR169()) HAVE_BIG_DECIMAL = false; else HAVE_BIG_DECIMAL = true; } private static int[] jdbcTypes = { Types.TINYINT, Types.SMALLINT, Types.INTEGER, Types.BIGINT, Types.REAL, Types.FLOAT, Types.DOUBLE, Types.DECIMAL, Types.NUMERIC, Types.BIT, Types.NULL, // Types.BOOLEAN Types.CHAR, Types.VARCHAR, Types.LONGVARCHAR, Types.NULL, // Types.BINARY, Types.VARBINARY, Types.NULL, // Types.LONGVARBINARY, Types.DATE, Types.TIME, Types.TIMESTAMP, Types.CLOB, Types.BLOB, }; private static String[] SQLTypes = { null, "SMALLINT", "INTEGER", "BIGINT", "REAL", "FLOAT", "DOUBLE", "DECIMAL(10,5)", null, null, null, "CHAR(60)", "VARCHAR(60)", "LONG VARCHAR", "CHAR(60) FOR BIT DATA", "VARCHAR(60) FOR BIT DATA", "LONG VARCHAR FOR BIT DATA", "DATE", "TIME", "TIMESTAMP", "CLOB(1k)", "BLOB(1k)", }; private static String[] validString = {null,"98","98","98", "98","98", "98","98",null,null,null, "98","98","98","0x4", "0x4","0x4", "2004-02-14", "00:00:00","2004-02-14 00:00:00","98","0x4"}; private static Class[] B3_GET_OBJECT; static { if (HAVE_BIG_DECIMAL) { B3_GET_OBJECT = new Class[] { java.lang.Integer.class, // Types.TINYINT, java.lang.Integer.class, // Types.SMALLINT, java.lang.Integer.class, // Types.INTEGER, java.lang.Long.class, // Types.BIGINT, java.lang.Float.class, // Types.REAL, java.lang.Double.class, // Types.FLOAT, java.lang.Double.class, // Types.DOUBLE, java.math.BigDecimal.class, // Types.DECIMAL, java.math.BigDecimal.class, // Types.NUMERIC, java.lang.Boolean.class, // Types.BIT, java.lang.Boolean.class, // Types.BOOLEAN java.lang.String.class, // Types.CHAR, java.lang.String.class, // Types.VARCHAR, java.lang.String.class, // Types.LONGVARCHAR, byte[].class, // Types.NULL, //Types.BINARY, byte[].class, // Types.VARBINARY, byte[].class, // Types.LONGVARBINARY, java.sql.Date.class, // Types.DATE, java.sql.Time.class, // Types.TIME, java.sql.Timestamp.class, // Types.TIMESTAMP, java.sql.Clob.class, // Types.CLOB, java.sql.Blob.class, // Types.BLOB, }; } else { B3_GET_OBJECT = new Class[] { java.lang.Integer.class, // Types.TINYINT, java.lang.Integer.class, // Types.SMALLINT, java.lang.Integer.class, // Types.INTEGER, java.lang.Long.class, // Types.BIGINT, java.lang.Float.class, // Types.REAL, java.lang.Double.class, // Types.FLOAT, java.lang.Double.class, // Types.DOUBLE, java.lang.String.class, // Types.DECIMAL, java.lang.String.class, // Types.NUMERIC, java.lang.Boolean.class, // Types.BIT, java.lang.Boolean.class, // Types.BOOLEAN java.lang.String.class, // Types.CHAR, java.lang.String.class, // Types.VARCHAR, java.lang.String.class, // Types.LONGVARCHAR, byte[].class, // Types.NULL, //Types.BINARY, byte[].class, // Types.VARBINARY, byte[].class, // Types.LONGVARBINARY, java.sql.Date.class, // Types.DATE, java.sql.Time.class, // Types.TIME, java.sql.Timestamp.class, // Types.TIMESTAMP, java.sql.Clob.class, // Types.CLOB, java.sql.Blob.class, // Types.BLOB, }; } } private static final boolean _ = false; private static final boolean X = true; /** JDBC 3.0 spec Table B6 - Use of ResultSet getter Methods to Retrieve JDBC Data Types */ public static final boolean[][] B6 = { // Types. T S I B R F D D N B B C V L B V L D T T C B // I M N I E L O E U I O H A O I A O A I I L L // N A T G A O U C M T O A R N N R N T M M O O // Y L E I L A B I E L R C G A B G E E E B B // I L G N T L M R E H V R I V S // N I E T E A I A A A Y N A T // T N R L C N R R A R A // T C R B M // H B I P // A I N // R N /* 0 getByte*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 1 getShort*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 2 getInt*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 3 getLong*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 4 getFloat*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 5 getDouble*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 6 getBigDecimal*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 7 getBoolean*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 8 getString*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _}, /* 9 getBytes*/ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, _, _}, /*10 getDate*/ { _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, X, _, X, _, _}, /*11 getTime*/ { _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, _, X, X, _, _}, /*12 getTimestamp*/ { _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, X, X, X, _, _}, /*13 getAsciiStream*/ { _, _, _, _, _, _, _, _, _, _, _, X, X, X, X, X, X, _, _, _, _, _}, /*14 getBinaryStream*/ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, _, _}, /*15 getCharStream*/ { _, _, _, _, _, _, _, _, _, _, _, X, X, X, X, X, X, _, _, _, _, _}, /*16 getClob */ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, X, _}, /*17 getBlob */ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, X}, /*18 getUnicodeStream */{ _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _}, }; /** JDBC 3.0 Section 13.2.2.1 specifies that table B-2 is used to specify type mappings from the Java types (e.g. int as setInt) to the JDBC SQL Type (Types.INT). This table does not include stream methods and does not include conversions specified elsewhere in the text, Namely Section 16.3.2 setBinaryStream may be used to set a BLOB setAsciiStream and setCharacterStream may be used to set a CLOB Thus this B2_MOD table is laid out like the B6 table and makes the assumptions that - Any Java numeric type can be used to set any SQL numeric type - Any Java numeric type can be used to set any SQL CHAR type - Numeric and date/time java types can be converted to SQL Char values. */ // Types. T S I B R F D D N B B C V L B V L D T T C B // I M N I E L O E U I O H A O I A O A I I L L // N A T G A O U C M T O A R N N R N T M M O O // Y L E I L A B I E L R C G A B G E E E B B // I L G N T L M R E H V R I V S // N I E T E A I A A A Y N A T // T N R L C N R R A R A // T C R B M // H B I P // A I N // R N public static boolean[][] B2_MOD = { /* 0 setByte*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 1 setShort*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 2 setInt*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 3 setLong*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 4 setFloat*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 5 setDouble*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 6 setBigDecimal*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 7 setBoolean*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 8 setString*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, X, X, X, _, _}, /* 9 setBytes*/ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, _, _}, /*10 setDate*/ { _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, X, _, X, _, _}, /*11 setTime*/ { _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, _, X, _, _, _}, /*12 setTimestamp*/ { _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, X, X, X, _, _}, /*13 setAsciiStream*/ { _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, _, _, _, X, _}, /*14 setBinaryStream*/ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, _, X}, /*15 setCharStream*/ { _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, _, _, _, X, _}, /*16 setClob */ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, X, _}, /*17 setBlob */ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, X}, /*18 setUnicodeStream */{ _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _}, }; /** Table B5 conversion of Objects using setObject*/ // Types. T S I B R F D D N B B C V L B V L D T T C B // I M N I E L O E U I O H A O I A O A I I L L // N A T G A O U C M T O A R N N R N T M M O O // Y L E I L A B I E L R C G A B G E E E B B // I L G N T L M R E H V R I V S // N I E T E A I A A A Y N A T // T N R L C N R R A R A // T C R B M // H B I P // A I N // R N public static boolean[][] B5 = { /* 0 String */ { X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, X, X, X, X, _, _}, /* 1 BigDecimal */ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 2 Boolean */ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 3 Integer */ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 4 Long */ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 5 Float */ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 6 Double */ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 7 byte[] */ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, _, _}, /* 8 Date */ { _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, X, _, X, _, _}, /* 9 Time */ { _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, _, X, _, _, _}, /*10 Timestamp */ { _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, X, X, X, _, _}, /*11 Blob */ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, X}, /*12 Clob */ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, X, _}, //Byte and Short were added to this table in JDBC 4.0. (See DERBY-1500.) /*13 Byte */ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /*14 Short */ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, }; public static final boolean[][] allowRegisterOut = { // Types. T S I B R F D D N B B C V L B V L D T T C B // I M N I E L O E U I O H A O I A O A I I L L // N A T G A O U C M T O A R N N R N T M M O O // Y L E I L A B I E L R C G A B G E E E B B // I L G N T L M R E H V R I V S // N I E T E A I A A A Y N A T // T N R L C N R R A R A // T C R B M // H B I P // A I N // param sqlType R N /* 0 null */ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _}, /* 1 SMALLINT*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 2 INTEGER*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 3 BIGINT */ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 4 REAL */ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 5 FLOAT */ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 6 DOUBLE */ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 7 DECIMAL*/ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, _, _, _, _, _}, /* 8 null */ { X, X, X, X, X, X, X, X, X, X, X, X, X, X, _, _, _, X, X, X, _, _}, /* 9 null*/ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, _, _}, /*10 null */ { _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, X, _, X, _, _}, /*11 CHAR(60) */ { _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, _, X, _, _, _}, /*12 VARCHAR(60) */ { _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, X, X, X, _, _}, /*13 LONG VARCHAR */ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _}, /*14 CHAR FOR BIT */ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, _, _}, /*15 VARCHAR FOR BIT*/ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, X, X, X, _, _, _, _, _}, /*16 LONGVARCHAR FOR B*/{ _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _}, /*17 DATE */ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, X, _, _, _, _}, /*18 TIME */ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, X, _, _, _}, /*19 TIMESTAMP */ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, X, _, _}, /*20 CLOB */ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _}, /*21 BLOB */ { _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _}, }; /** * @param arg0 */ public ParameterMappingTest(String arg0) { super(arg0); } /* * (non-Javadoc) * * @see junit.framework.TestCase#setUp() */ protected void setUp() throws Exception { Connection conn = getConnection(); conn.setAutoCommit(false); // create simple a table with BLOB and CLOB thta // can be used to for setBlob/setClob testing. Statement scb = conn.createStatement(); scb.execute("CREATE TABLE PM.LOB_GET(ID INT, B BLOB, C CLOB)"); PreparedStatement pscb = conn .prepareStatement("INSERT INTO PM.LOB_GET VALUES (?, ?, ?)"); pscb.setInt(1, 0); pscb.setNull(2, Types.BLOB); pscb.setNull(3, Types.CLOB); pscb.executeUpdate(); pscb.setInt(1, 1); { byte[] data = new byte[6]; data[0] = (byte) 0x4; data[1] = (byte) 0x3; data[2] = (byte) 0x72; data[3] = (byte) 0x43; data[4] = (byte) 0x00; data[5] = (byte) 0x37; pscb.setBinaryStream(2, new java.io.ByteArrayInputStream(data), 6); } pscb.setCharacterStream(3, new java.io.StringReader("72"), 2); pscb.executeUpdate(); scb.close(); pscb.close(); conn.commit(); } public void testParameterMapping() throws Exception { Connection conn = getConnection(); for (int type = 0; type < SQLTypes.length; type++) { String sqlType = SQLTypes[type]; if (sqlType == null || jdbcTypes[type] == Types.NULL) { continue; } Statement s = conn.createStatement(); try { s.execute("DROP TABLE PM.TYPE_AS"); } catch (SQLException seq) { } s.execute("CREATE TABLE PM.TYPE_AS(VAL " + SQLTypes[type] + ")"); PreparedStatement psi = conn .prepareStatement("INSERT INTO PM.TYPE_AS(VAL) VALUES(?)"); psi.setNull(1, jdbcTypes[type]); psi.executeUpdate(); PreparedStatement psq = conn .prepareStatement("SELECT VAL FROM PM.TYPE_AS"); ResultSet rs = psq.executeQuery(); ResultSetMetaData rsmd = rs.getMetaData(); assertEquivalentDataType(jdbcTypes[type], rsmd.getColumnType(1)); rs.close(); // For this data type // Test inserting a NULL value and then performing all the getXXX() // calls on it. // System.out.println(" NULL VALUE"); getXXX(psq, type, true); s.execute("DELETE FROM PM.TYPE_AS"); // For this data type // Test inserting a valid value and then performing all the getXXX() // calls on it. if (setValidValue(psi, 1, jdbcTypes[type])) { psi.executeUpdate(); getXXX(psq, type, false); } setXXX(s, psi, psq, type); psi.close(); psq.close(); s.execute("DROP TABLE PM.TYPE_AS"); conn.commit(); // NOW PROCEDURE PARAMETERS try { s.execute("DROP PROCEDURE PMP.TYPE_AS"); }catch (SQLException seq) { } String procSQL; if(HAVE_BIG_DECIMAL) { procSQL = "CREATE PROCEDURE PMP.TYPE_AS(" + "IN P1 " + SQLTypes[type] + ", INOUT P2 " + SQLTypes[type] + ", OUT P3 " + SQLTypes[type] + ") LANGUAGE JAVA PARAMETER STYLE JAVA NO SQL " + " EXTERNAL NAME 'org.apache.derbyTesting.functionTests.util.ProcedureTest.pmap'"; } else { procSQL = "CREATE PROCEDURE PMP.TYPE_AS(" + "IN P1 " + SQLTypes[type] + ", INOUT P2 " + SQLTypes[type] + ", OUT P3 " + SQLTypes[type] + ") LANGUAGE JAVA PARAMETER STYLE JAVA NO SQL " + " EXTERNAL NAME 'org.apache.derbyTesting.functionTests.util.SimpleProcedureTest.pmap'"; } try { if (!HAVE_BIG_DECIMAL && SQLTypes[type].equals("DECIMAL(10,5)")) continue; //System.out.println(procSQL); s.execute(procSQL); } catch (SQLException sqle) { // may get error that column is not allowed if ("42962".equals(sqle.getSQLState())) continue; else fail(sqle.getSQLState() + ":" + sqle.getMessage()); continue; } // For each JDBC type try to register the out parameters with that type. for (int opt = 0; opt < jdbcTypes.length; opt++) { int jopt = jdbcTypes[opt]; if (jopt == Types.NULL) continue; CallableStatement csp = conn.prepareCall("CALL PMP.TYPE_AS(?, ?, ?)"); boolean bothRegistered = true; //System.out.print("INOUT " + sqlType + " registerOutParameter(" + JDBC.sqlNameFromJdbc(jopt) + ") "); try { csp.registerOutParameter(2, jopt); } catch (SQLException sqle) { assertFalse("INOUT " + sqlType + " registerOutParameter(" + JDBC.sqlNameFromJdbc(jopt) + ") failed",allowRegisterOut[type][opt]); if (!"XCL25".equals(sqle.getSQLState())) fail("-- " + sqle.getSQLState()); bothRegistered = false; } //System.out.print("OUT " + sqlType + " registerOutParameter(" + TestUtil.getNameFromJdbcType(jopt) + ") "); try { csp.registerOutParameter(3, jopt); } catch (SQLException sqle) { if (!"XCL25".equals(sqle.getSQLState())) fail("-- " + sqle.getSQLState()); assertFalse("OUT " + sqlType + " registerOutParameter(" + JDBC.sqlNameFromJdbc(jopt) + "failed",allowRegisterOut[type][opt]); bothRegistered = false; } if (bothRegistered) { try { // set the IN value with an accepted value according to its type // set the INOUT value with an accepted value according to its registered type if (setValidValue(csp, 1, jdbcTypes[type]) && setValidValue(csp, 2, jopt)) { csp.execute(); // now get the INOUT, OUT parameters according to their registered type. getOutValue(csp, 2, jopt,type); getOutValue(csp, 3, jopt,type); } } catch (SQLException sqle) { boolean expectedConversionError = ("22018".equals(sqle.getSQLState())|| "22007".equals(sqle.getSQLState())); assertTrue("FAIL: Unexpected exception" + sqle.getSQLState() + ":" + sqle.getMessage(), expectedConversionError); } } csp.close(); } s.execute("DROP PROCEDURE PMP.TYPE_AS"); s.close(); conn.commit(); } } /* * (non-Javadoc) * * @see org.apache.derbyTesting.junit.BaseJDBCTestCase#tearDown() */ protected void tearDown() throws Exception { Connection conn = getConnection(); Statement scb = conn.createStatement(); scb.execute("DROP TABLE PM.LOB_GET"); scb.close(); commit(); } private static void getXXX(PreparedStatement ps, int type, boolean isNull) throws SQLException, java.io.IOException { { // getByte(); ResultSet rs = ps.executeQuery(); rs.next(); boolean worked; SQLException sqleResult = null; ; try { byte b = rs.getByte(1); boolean wn = rs.wasNull(); if (isNull) { assertTrue(wn); } else { assertFalse(wn); assertEquals(32, b); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } rs.close(); judge_getXXX(worked, sqleResult, 0, type); } { // getShort() ResultSet rs = ps.executeQuery(); rs.next(); boolean worked; SQLException sqleResult = null; try { short s = rs.getShort(1); boolean wn = rs.wasNull(); if (isNull) { assertTrue(wn); } else { assertFalse(wn); assertEquals(32, s); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } rs.close(); judge_getXXX(worked, sqleResult, 1, type); } { // getInt() ResultSet rs = ps.executeQuery(); rs.next(); boolean worked; SQLException sqleResult = null; ; try { int i = rs.getInt(1); boolean wn = rs.wasNull(); if (isNull) { assertTrue(wn); } else { assertFalse(isNull); assertEquals(32, i); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } rs.close(); judge_getXXX(worked, sqleResult, 2, type); } { // getLong(); ResultSet rs = ps.executeQuery(); rs.next(); boolean worked; SQLException sqleResult = null; ; try { long l = rs.getLong(1); boolean wn = rs.wasNull(); if (isNull) { assertTrue(wn); } else { assertEquals(32, l); assertFalse(wn); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } rs.close(); judge_getXXX(worked, sqleResult, 3, type); } { // getFloat() ResultSet rs = ps.executeQuery(); rs.next(); boolean worked; SQLException sqleResult = null; try { float f = rs.getFloat(1); boolean wn = rs.wasNull(); if (isNull) { assertTrue(wn); } else { assertFalse(wn); assertEquals(32.0, f, .000001); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } rs.close(); judge_getXXX(worked, sqleResult, 4, type); } { // getDouble(); ResultSet rs = ps.executeQuery(); rs.next(); SQLException sqleResult = null; boolean worked; try { double d = rs.getDouble(1); boolean wn = rs.wasNull(); if (isNull) { assertTrue(wn); } else { assertFalse(wn); assertEquals(32.0, d, .00001); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } rs.close(); judge_getXXX(worked, sqleResult, 5, type); } if (HAVE_BIG_DECIMAL) { // getBigDecimal() ResultSet rs = ps.executeQuery(); rs.next(); boolean worked; SQLException sqleResult = null; ; try { BigDecimal bd = rs.getBigDecimal(1); boolean wn = rs.wasNull(); if (isNull) { assertTrue(wn); assertNull(bd); } else { assertFalse(wn); assertEquals("BigDecimal comparison failed", 0, new BigDecimal("32.0").compareTo(bd)); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } rs.close(); judge_getXXX(worked, sqleResult, 6, type); } { // getBoolean() ResultSet rs = ps.executeQuery(); rs.next(); boolean worked; SQLException sqleResult = null; ; try { boolean b = rs.getBoolean(1); boolean wn = rs.wasNull(); if (isNull) { assertTrue(wn); } else { assertFalse(wn); assertTrue(b); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } rs.close(); judge_getXXX(worked, sqleResult, 7, type); } { // getString() ResultSet rs = ps.executeQuery(); rs.next(); boolean worked; SQLException sqleResult = null; ; try { String s = rs.getString(1); boolean wn = rs.wasNull(); if (isNull) { assertNull(s); assertTrue(wn); } else { s = s.trim(); int jdbcType = jdbcTypes[type]; switch(jdbcType) { case java.sql.Types.SMALLINT: case java.sql.Types.INTEGER: case java.sql.Types.BIGINT: case java.sql.Types.CHAR: case java.sql.Types.VARCHAR: case java.sql.Types.LONGVARCHAR: assertEquals("32",s); break; case java.sql.Types.REAL: case java.sql.Types.FLOAT: case java.sql.Types.DOUBLE: assertEquals("32.0",s); break; case java.sql.Types.DECIMAL: case java.sql.Types.NUMERIC: assertEquals("32.00000",s); break; case java.sql.Types.VARBINARY: case java.sql.Types.BINARY: assertEquals("0403fdc373",s); break; case java.sql.Types.DATE: assertEquals("2004-02-14",s); break; case java.sql.Types.TIME: assertEquals("17:14:24",s); break; case java.sql.Types.TIMESTAMP: assertEquals("2004-02-14 17:14:24.097625551",s); break; case java.sql.Types.CLOB: assertEquals("67",s); break; case java.sql.Types.BLOB: assertEquals("8243cafe0032",s); break; } assertFalse(wn); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } rs.close(); judge_getXXX(worked, sqleResult, 8, type); } { // getBytes() ResultSet rs = ps.executeQuery(); rs.next(); boolean worked; SQLException sqleResult = null; ; try { byte[] data = rs.getBytes(1); boolean wn = rs.wasNull(); if (isNull) { assertNull(data); assertTrue(wn); } else { int jdbcType = jdbcTypes[type]; switch (jdbcType) { case java.sql.Types.BINARY: case java.sql.Types.VARBINARY: case java.sql.Types.LONGVARBINARY: assertEquals("0x4,0x3", showFirstTwo(data)); break; case java.sql.Types.BLOB: assertEquals("0x82,0x43", showFirstTwo(data)); } assertNotNull(data); assertFalse(wn); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } rs.close(); judge_getXXX(worked, sqleResult, 9, type); } { // getDate() boolean worked; SQLException sqleResult = null; ; ResultSet rs = null; try { rs = ps.executeQuery(); rs.next(); Date d = rs.getDate(1); boolean wn = rs.wasNull(); if (isNull) { assertNull(d); assertTrue(wn); } else { assertEquals("2004-02-14", d.toString()); assertNotNull(d); assertFalse(wn); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; // 22007 invalid date time conversion worked = "22007".equals(sqle.getSQLState()); } catch (Throwable t) { // System.out.print(t.toString()); worked = false; } if (rs != null) rs.close(); judge_getXXX(worked, sqleResult, 10, type); } { boolean worked; SQLException sqleResult = null; ; ResultSet rs = null; try { // getTime() rs = ps.executeQuery(); rs.next(); Time t = rs.getTime(1); boolean wn = rs.wasNull(); if (isNull) { assertNull(t); assertTrue(wn); } else { assertFalse(wn); assertEquals("17:14:24", t.toString()); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; // 22007 invalid date time conversion worked = "22007".equals(sqle.getSQLState()); } catch (Throwable t) { // System.out.println(t); worked = false; } if (rs != null) rs.close(); judge_getXXX(worked, sqleResult, 11, type); } { boolean worked; SQLException sqleResult = null; ; ResultSet rs = null; try { // getTimestamp(); rs = ps.executeQuery(); rs.next(); Timestamp ts = rs.getTimestamp(1); boolean wn = rs.wasNull(); if (isNull) { assertNull(ts); assertTrue(wn); } else { if (type == java.sql.Types.DATE || type == java.sql.Types.TIMESTAMP) assertEquals("2004-02-14 00:00:00.0", ts.toString()); assertFalse(rs.wasNull()); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; // 22007 invalid date time conversion worked = "22007".equals(sqle.getSQLState()); } catch (Throwable t) { // System.out.println(t); worked = false; } if (rs != null) rs.close(); judge_getXXX(worked, sqleResult, 12, type); } { // getAsciiStream() ResultSet rs = ps.executeQuery(); rs.next(); boolean worked; SQLException sqleResult = null; ; try { InputStream is = rs.getAsciiStream(1); boolean wn = rs.wasNull(); if (isNull) { assertTrue(wn); assertNull(is); } else { assertFalse(wn); if (B6[13][type]) assertNotNull(showFirstTwo(is)); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } // getAsciiStream on a NULL value for an invalid conversion // is handled differently in JCC to Cloudscape. On a non-NULL // value an exception is correctly raised by both JCC and CS. // here we check this specific case to reduce canon differences // between CNS and CS. boolean judge = B6[13][type] || specificCheck(rs, worked, sqleResult, isNull); rs.close(); if (judge) judge_getXXX(worked, sqleResult, 13, type); } { // getBinaryStream() ResultSet rs = ps.executeQuery(); rs.next(); boolean worked; SQLException sqleResult = null; ; try { InputStream is = rs.getBinaryStream(1); if (isNull) { assertTrue(rs.wasNull()); assertNull(is); } else if (B6[14][type]) { assertNotNull(showFirstTwo(is)); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } boolean judge = B6[14][type] || specificCheck(rs, worked, sqleResult, isNull); rs.close(); if (judge) judge_getXXX(worked, sqleResult, 14, type); } { // getCharacterStream() ResultSet rs = ps.executeQuery(); rs.next(); boolean worked; SQLException sqleResult = null; ; try { Reader r = rs.getCharacterStream(1); boolean wn = rs.wasNull(); if (isNull) { assertNull(r); assertTrue(wn); } else if (B6[15][type]) { assertFalse(wn); assertNotNull(showFirstTwo(r)); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } boolean judge = B6[15][type] || specificCheck(rs, worked, sqleResult, isNull); rs.close(); if (judge) judge_getXXX(worked, sqleResult, 15, type); } { // getClob(); ResultSet rs = ps.executeQuery(); rs.next(); boolean worked; SQLException sqleResult = null; ; try { Clob clob = rs.getClob(1); boolean wn = rs.wasNull(); if (isNull) { assertNull(clob); assertTrue(wn); } else if (B6[16][type]) { assertFalse(wn); assertNotNull(clob.getSubString(1, 10)); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } boolean judge = B6[16][type] || specificCheck(rs, worked, sqleResult, isNull); rs.close(); if (judge) judge_getXXX(worked, sqleResult, 16, type); } { // getBlob() ResultSet rs = ps.executeQuery(); rs.next(); boolean worked; SQLException sqleResult = null; ; try { Blob blob = rs.getBlob(1); boolean wn = rs.wasNull(); if (isNull) { assertTrue(wn); assertNull(blob); } else if (B6[17][type]) { assertNotNull(showFirstTwo(blob.getBinaryStream())); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } boolean judge = B6[17][type] || specificCheck(rs, worked, sqleResult, isNull); rs.close(); if (judge) judge_getXXX(worked, sqleResult, 17, type); } { // getUnicodeStream() ResultSet rs = ps.executeQuery(); rs.next(); boolean worked; SQLException sqleResult = null; ; try { InputStream is = rs.getUnicodeStream(1); boolean wn = rs.wasNull(); if (isNull) { assertTrue(wn); assertNull(is); } else { assertFalse(wn); assertNotNull(is); } worked = true; } catch (NoSuchMethodError e) { worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } rs.close(); if (JDBC.vmSupportsJDBC2()) judge_getXXX(worked, sqleResult, 18, type); } // Check to see getObject returns the correct type { // getObject(); ResultSet rs = ps.executeQuery(); rs.next(); SQLException sqleResult = null; ; try { boolean worked; if (!SQLTypes[type].equals("DECIMAL(10,5)") || HAVE_BIG_DECIMAL) { Object o = rs.getObject(1); boolean wn = rs.wasNull(); Class cgo = B3_GET_OBJECT[type]; String cname; if (cgo.equals(byte[].class)) cname = "byte[]"; else cname = cgo.getName(); if (isNull) { assertTrue(wn); assertNull(o); worked = true; } else if (cgo.isInstance(o)) { worked = true; } else { worked = false; fail("FAIL NOT :" + cgo.getName() + " is " + o.getClass().getName()); } } else { // "ResultSet.getObject not called for DECIMAL type for // JSR169"; worked = true; } assertTrue(worked); } catch (SQLException sqle) { sqleResult = sqle; } rs.close(); } } private static boolean specificCheck(ResultSet rs, boolean worked, SQLException sqleResult, boolean isNull) throws SQLException { boolean judge = true; if (worked && isNull && rs.wasNull()) { // JCC returns NULL if (usingDerbyNetClient()) judge = false; } else if (!worked && isNull) { if (usingDerbyNetClient() && "22005".equals(sqleResult.getSQLState())) judge = false; } return judge; } private static void judge_getXXX(boolean worked, SQLException sqleResult, int whichCall, int type) { boolean validSQLState = false; // verify valid conversion worked if (B6[whichCall][type] && !worked) fail(" JDBC FAIL " + SQLTypes[type] + " " + sqleResult); else if (!worked) { // make sure not implemented or conversion error was thrown if it // didn't work String sqlState = sqleResult.getSQLState(); if ("0A000".equals(sqlState)) validSQLState = true; if ("0A000".equals(sqlState)) validSQLState = true; if ("22005".equals(sqlState)) // embedded invalid conversion error validSQLState = true; else if (sqlState == null) { // client invalid conversion error if (sqleResult.getMessage().indexOf( "Wrong result column type for requested conversion") != -1) validSQLState = true; } assertTrue("FAIL: Expected conversion error but got " + sqleResult, validSQLState); } } private static void judge_setXXX(boolean worked, SQLException sqleResult, int whichCall, int type) { String msg; if (worked && B2_MOD[whichCall][type]) msg = " JDBC MATCH(OK)"; else if (worked) msg = " CLOUD EXT (OK)"; else if (sqleResult != null && "0A000".equals(sqleResult.getSQLState())) msg = " Not Implemented (OK)"; else if (B2_MOD[whichCall][type]) { if (sqleResult != null) showException(sqleResult); msg = " JDBC FAIL " + SQLTypes[type]; } else { msg = checkForInvalidConversion(sqleResult); if (msg == null) return; } if (msg.startsWith("JDBC FAIL")) fail(" JDBC FAIL " + SQLTypes[type]); } private static void judge_setObject(boolean worked, SQLException sqleResult, int b5o, int type) { String msg; if (worked && B5[b5o][type]) msg = " JDBC MATCH(OK)"; else if (worked) msg = " CLOUD EXT (OK)"; else if ("0A000".equals(sqleResult.getSQLState())) msg = " Not Implemented (OK)"; else if (B5[b5o][type]) { if (sqleResult != null) showException(sqleResult); msg = " JDBC FAIL " + SQLTypes[type]; } else { msg = checkForInvalidConversion(sqleResult); if (msg == null) return; } if (msg.startsWith("JDBC FAIL")) fail(" JDBC FAIL " + SQLTypes[type]); } /** * Look for an "Invalid Conversion" exception and format it for display. * * Look for an "Invalid Conversion" exception. If one is found, print "IC". * If one is not found, dump the actual exception to the output instead. * * Note that the actual invalid conversion exception may be wrapped inside a * BatchUpdateException, so we may need to hunt through the exception chain * to find it. */ private static String checkForInvalidConversion(SQLException sqle) { if (sqle == null) return null; boolean unknownException = true; SQLException e = sqle; while (e != null && unknownException == true) { // XCL12 is temp if ("22005".equals(e.getSQLState()) || "XCL12".equals(e.getSQLState()) || e.getMessage().indexOf("Illegal Conv") != -1) { unknownException = false; if ("0A000".equals(e.getSQLState()) && e.getMessage().indexOf("setUnicodeStream") != -1) unknownException = false; // System.out.print("IC"); break; } e = e.getNextException(); } if (unknownException) showException(sqle); return " JDBC MATCH (INVALID)"; } private static void setXXX(Statement s, PreparedStatement psi, PreparedStatement psq, int type) throws SQLException, java.io.IOException { { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setByte() psi.setByte(1, (byte) 98); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setByte"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 0, type); } // and as a batch { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { psi.setByte(1, (byte) 98); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setByte"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 0, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setShort() psi.setShort(1, (short) 98); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setShort"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 1, type); } // and as a batch { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setShort() as batch psi.setShort(1, (short) 98); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setShort"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 1, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setInt() psi.setInt(1, 98); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setInt"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 2, type); } // and as a batch { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setInt() as batch psi.setInt(1, 98); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setInt"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 2, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setLong() psi.setLong(1, 98L); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setLong"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 3, type); } // as a batch { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setLong() as batch psi.setLong(1, 98L); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setLong"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 3, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setFloat() psi.setFloat(1, 98.4f); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setFloat"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 4, type); } // and as a batch { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setFloat() as batch psi.setFloat(1, 98.4f); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setFloat"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 4, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setDouble() psi.setDouble(1, 98.5); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setDouble"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 5, type); } // as a batch { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setDouble() as batch psi.setDouble(1, 98.5); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setDouble"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 5, type); } if (HAVE_BIG_DECIMAL) { { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setBigDecimal() psi.setBigDecimal(1, new BigDecimal(98.0)); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setBigDecimal"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 6, type); } // as a batch { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setBigDecimal() as batch psi.setBigDecimal(1, new BigDecimal(98.0)); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setBigDecimal"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 6, type); } // null BigDecimal { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setBigDecimal(null) psi.setBigDecimal(1, null); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setBigDecimal"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 6, type); } // null BigDecimal { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setBigDecimal(null) as batch psi.setBigDecimal(1, null); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setBigDecimal"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 6, type); } } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setBoolean() psi.setBoolean(1, true); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setBoolean"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 7, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setBoolean() as batch psi.setBoolean(1, true); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setBoolean"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 7, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { psi.setString(1,validString[type]); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setString"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } catch (Throwable t) { // JCC has some bugs // System.out.println(t.getMessage()); worked = false; sqleResult = null; } judge_setXXX(worked, sqleResult, 8, type); } // as batch { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setString() as batch psi.setString(1,validString[type]); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setString"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } catch (Throwable t) { // JCC has some bugs // System.out.println(t.getMessage()); worked = false; sqleResult = null; } judge_setXXX(worked, sqleResult, 8, type); } // null String { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setString(null) psi.setString(1, null); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setString"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } catch (Throwable t) { // JCC has some bugs // System.out.println(t.getMessage()); worked = false; sqleResult = null; } judge_setXXX(worked, sqleResult, 8, type); } // null String as batch { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setString(null) as batch psi.setString(1, null); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setString"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } catch (Throwable t) { // JCC has some bugs // System.out.println(t.getMessage()); worked = false; sqleResult = null; } judge_setXXX(worked, sqleResult, 8, type); } { s.execute("DELETE FROM PM.TYPE_AS"); // Set Invalid String for nonString types (DERBY-149) testSetStringInvalidValue(type, psi); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setBytes() byte[] data = { (byte) 0x04, (byte) 0x03, (byte) 0xfd, (byte) 0xc3, (byte) 0x73 }; psi.setBytes(1, data); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setBytes"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 9, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setBytes() as batch byte[] data = { (byte) 0x04, (byte) 0x03, (byte) 0xfd, (byte) 0xc3, (byte) 0x73 }; psi.setBytes(1, data); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setBytes"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 9, type); } // null byte[] { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setBytes(null) psi.setBytes(1, null); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setBytes"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 9, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setBytes(null) as batch psi.setBytes(1, null); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setBytes"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 9, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setDate() psi.setDate(1, java.sql.Date.valueOf("2004-02-14")); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setDate"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 10, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setDate() as batch psi.setDate(1, java.sql.Date.valueOf("2004-02-14")); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setDate"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 10, type); } // null Date { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setDate(null) psi.setDate(1, null); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setDate"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 10, type); } // null Date { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setDate(null) as batch psi.setDate(1, null); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setDate"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 10, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setTime() psi.setTime(1, java.sql.Time.valueOf("00:00:00")); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setTime"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 11, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setTime() as batch psi.setTime(1, java.sql.Time.valueOf("00:00:00")); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setTime"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 11, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setTime(null) psi.setTime(1, null); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setTime"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 11, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setTime(null) as batch psi.setTime(1, null); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setTime"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 11, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setTimestamp() psi.setTimestamp(1, java.sql.Timestamp .valueOf("2004-02-14 00:00:00.0")); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setTimestamp"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 12, type); } // as batch { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setTimestamp() as batch psi.setTimestamp(1, java.sql.Timestamp .valueOf("2004-02-14 00:00:00.0")); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setTimestamp"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 12, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setTimestamp(null) psi.setTimestamp(1, null); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setTimestamp"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 12, type); } // as batch { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setTimestamp(null) as batch psi.setTimestamp(1, null); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setTimestamp"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 12, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setAsciiStream() byte[] data = new byte[6]; data[0] = (byte) 0x65; data[1] = (byte) 0x67; data[2] = (byte) 0x30; data[3] = (byte) 0x31; data[4] = (byte) 0x32; data[5] = (byte) 0x64; psi .setAsciiStream(1, new java.io.ByteArrayInputStream( data), 6); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setAsciiStream"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 13, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setAsciiStream() as batch byte[] data = new byte[6]; data[0] = (byte) 0x65; data[1] = (byte) 0x67; data[2] = (byte) 0x30; data[3] = (byte) 0x31; data[4] = (byte) 0x32; data[5] = (byte) 0x64; psi .setAsciiStream(1, new java.io.ByteArrayInputStream( data), 6); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setAsciiStream"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 13, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setAsciiStream(null) psi.setAsciiStream(1, null, 0); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setAsciiStream"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 13, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setAsciiStream(null) as batch psi.setAsciiStream(1, null, 0); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setAsciiStream"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 13, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setBinaryStream() byte[] data = new byte[6]; data[0] = (byte) 0x4; data[1] = (byte) 0x3; data[2] = (byte) 0xca; data[3] = (byte) 0xfe; data[4] = (byte) 0x00; data[5] = (byte) 0x32; psi.setBinaryStream(1, new java.io.ByteArrayInputStream(data), 6); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setBinaryStream"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 14, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setBinaryStream() as batch byte[] data = new byte[6]; data[0] = (byte) 0x4; data[1] = (byte) 0x3; data[2] = (byte) 0xca; data[3] = (byte) 0xfe; data[4] = (byte) 0x00; data[5] = (byte) 0x32; psi.setBinaryStream(1, new java.io.ByteArrayInputStream(data), 6); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "getBinaryStream"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 14, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setBinaryStream(null) psi.setBinaryStream(1, null, 0); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setBinaryStream"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 14, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setBinaryStream(null) as batch psi.setBinaryStream(1, null, 0); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setBinaryStream"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 14, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setCharacterStream() psi.setCharacterStream(1, new java.io.StringReader("89"), 2); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setCharacterStream"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 15, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setCharacterStream() as batch psi.setCharacterStream(1, new java.io.StringReader("89"), 2); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setCharacterStream"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 15, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setCharacterStream(null) psi.setCharacterStream(1, null, 0); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setCharacterStream"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 15, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setCharacterStream(null) as batch psi.setCharacterStream(1, null, 0); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setCharacterStream"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 15, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setClob() ResultSet rsc = s .executeQuery("SELECT C FROM PM.LOB_GET WHERE ID = 1"); rsc.next(); Clob tester = rsc.getClob(1); rsc.close(); psi.setClob(1, tester); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setClob"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 16, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setClob() as batch ResultSet rsc = s .executeQuery("SELECT C FROM PM.LOB_GET WHERE ID = 1"); rsc.next(); Clob tester = rsc.getClob(1); rsc.close(); psi.setClob(1, tester); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setClob"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 16, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setClob(null) psi.setClob(1, null); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setClob"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 16, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setClob(null) as batch psi.setClob(1, null); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setClob"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 16, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setBlob() ResultSet rsc = s .executeQuery("SELECT B FROM PM.LOB_GET WHERE ID = 1"); rsc.next(); Blob tester = rsc.getBlob(1); rsc.close(); psi.setBlob(1, tester); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setBlob"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 17, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setBlob() as batch ResultSet rsc = s .executeQuery("SELECT B FROM PM.LOB_GET WHERE ID = 1"); rsc.next(); Blob tester = rsc.getBlob(1); rsc.close(); psi.setBlob(1, tester); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setBlob"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 17, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // Blob(null) psi.setBlob(1, null); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setBlob"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 17, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setBlob(null) as batch psi.setBlob(1, null); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setBlob"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setXXX(worked, sqleResult, 17, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setUnicodeStream() byte[] data = new byte[6]; data[0] = (byte) 0x4; data[1] = (byte) 0x3; data[2] = (byte) 0xca; data[3] = (byte) 0xfe; data[4] = (byte) 0x00; data[5] = (byte) 0x32; try { psi.setUnicodeStream(1, new java.io.ByteArrayInputStream( data), 6); } catch (NoSuchMethodError e) { // ResultSet.setUnicodeStream not present - correct for // JSR169 } if (JDBC.vmSupportsJDBC2()) { psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setUnicodeStream"); } worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } if (JDBC.vmSupportsJDBC2()) judge_setXXX(worked, sqleResult, 14, type); } // setObject(null) { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // should never work! // setObject(null) psi.setObject(1, null); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setObject"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } if (worked) fail("FAIL: setObject(null) not valid"); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // should never work! // setObject(null) as batch psi.setObject(1, null); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setObject"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } if (worked) fail("FAIL: setObject(1,null) did not throw exception"); } setXXX_setObject(s, psi, psq, type, validString[type], "java.lang.String", 0); if (HAVE_BIG_DECIMAL) setXXX_setObject(s, psi, psq, type, BigDecimal.valueOf(98L), "java.math.BigDecimal", 1); setXXX_setObject(s, psi, psq, type, Boolean.TRUE, "java.lang.Boolean", 2); // DERBY-1500: setObject() should work for Byte and Short too. setXXX_setObject(s, psi, psq, type, new Byte((byte) 98), "java.lang.Byte", 1); setXXX_setObject(s, psi, psq, type, new Short((short) 98), "java.lang.Short", 2); setXXX_setObject(s, psi, psq, type, new Integer(98), "java.lang.Integer", 3); setXXX_setObject(s, psi, psq, type, new Long(98), "java.lang.Long", 4); setXXX_setObject(s, psi, psq, type, new Float(98.0f), "java.lang.Float", 5); setXXX_setObject(s, psi, psq, type, new Double(98.0d), "java.lang.Double", 6); { byte[] data = { 0x4, 0x3 }; setXXX_setObject(s, psi, psq, type, data, "byte[]", 7); } setXXX_setObject(s, psi, psq, type, java.sql.Date.valueOf("2004-02-14"), "java.sql.Date", 8); setXXX_setObject(s, psi, psq, type, java.sql.Time.valueOf("00:00:00"), "java.sql.Time", 9); setXXX_setObject(s, psi, psq, type, java.sql.Timestamp .valueOf("2004-02-14 00:00:00.0"), "java.sql.Timestamp", 10); s.getConnection().commit(); if (!usingDerbyNetClient()) { { ResultSet rsc = s .executeQuery("SELECT B FROM PM.LOB_GET WHERE ID = 1"); rsc.next(); Blob tester = rsc.getBlob(1); rsc.close(); setXXX_setObject(s, psi, psq, type, tester, "java.sql.Blob", 11); } { ResultSet rsc = s .executeQuery("SELECT C FROM PM.LOB_GET WHERE ID = 1"); rsc.next(); Clob tester = rsc.getClob(1); rsc.close(); setXXX_setObject(s, psi, psq, type, tester, "java.sql.Clob", 12); } } } private static void setXXX_setObject(Statement s, PreparedStatement psi, PreparedStatement psq, int type, Object value, String className, int b5o) throws SQLException, java.io.IOException { { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setObject(" + className + ") psi.setObject(1, value); psi.executeUpdate(); getValidValue(psq, jdbcTypes[type], "setObject(" + className + ")"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } judge_setObject(worked, sqleResult, b5o, type); } { s.execute("DELETE FROM PM.TYPE_AS"); SQLException sqleResult = null; boolean worked; try { // setObject(" + className + ") as batch psi.setObject(1, value); psi.addBatch(); psi.executeBatch(); getValidValue(psq, jdbcTypes[type], "setObject(" + className + ")"); worked = true; } catch (SQLException sqle) { sqleResult = sqle; worked = false; } catch (Throwable t) { fail("FAIL " + t.getMessage()); return; } judge_setObject(worked, sqleResult, b5o, type); } } private static void unexpectedException(SQLException sqle) { fail("FAIL unexpected exception - "); showException(sqle); sqle.printStackTrace(System.out); } private static void showException(SQLException sqle) { do { String state = sqle.getSQLState(); if (state == null) state = "?????"; String msg = sqle.getMessage(); if (msg == null) msg = "?? no message ??"; sqle.printStackTrace(); fail(" (" + state + "):" + msg); sqle = sqle.getNextException(); } while (sqle != null); } private static boolean setValidValue(PreparedStatement ps, int param, int jdbcType) throws SQLException { switch (jdbcType) { case Types.BIT: ps.setBoolean(param, true); return true; case Types.TINYINT: ps.setByte(param, (byte) 32); return true; case Types.SMALLINT: ps.setShort(param, (short) 32); return true; case Types.INTEGER: ps.setInt(param, 32); return true; case Types.BIGINT: ps.setLong(param, 32L); return true; case Types.REAL: ps.setFloat(param, 32.0f); return true; case Types.FLOAT: case Types.DOUBLE: ps.setDouble(param, 32.0); return true; case Types.DECIMAL: BigDecimalHandler.setBigDecimalString(ps, param, "32.0"); return true; case Types.CHAR: case Types.VARCHAR: case Types.LONGVARCHAR: ps.setString(param, "32"); return true; case Types.BINARY: case Types.VARBINARY: { byte[] data = { (byte) 0x04, (byte) 0x03, (byte) 0xfd, (byte) 0xc3, (byte) 0x73 }; ps.setBytes(param, data); return true; } // Types.LONGVARBINARY: case Types.DATE: ps.setDate(param, java.sql.Date.valueOf("2004-02-14")); return true; case Types.TIME: ps.setTime(param, java.sql.Time.valueOf("17:14:24")); return true; case Types.TIMESTAMP: ps.setTimestamp(param, java.sql.Timestamp .valueOf("2004-02-14 17:14:24.097625551")); return true; case Types.CLOB: // JDBC 3.0 spec section 16.3.2 explictly states setCharacterStream // is OK for setting a CLOB ps.setCharacterStream(param, new java.io.StringReader("67"), 2); return true; case Types.BLOB: // JDBC 3.0 spec section 16.3.2 explictly states setBinaryStream is // OK for setting a BLOB { byte[] data = new byte[6]; data[0] = (byte) 0x82; data[1] = (byte) 0x43; data[2] = (byte) 0xca; data[3] = (byte) 0xfe; data[4] = (byte) 0x00; data[5] = (byte) 0x32; ps .setBinaryStream(param, new java.io.ByteArrayInputStream( data), 6); return true; } default: return false; } } private static boolean getValidValue(PreparedStatement ps, int jdbcType, String method) throws SQLException, IOException { ResultSet rs = ps.executeQuery(); rs.next(); switch (jdbcType) { case Types.SMALLINT: { short val = rs.getShort(1); boolean wn = rs.wasNull(); if (wn) assertEquals(0, val); else if (isBooleanMethod(method)) assertEquals(1, val); else assertEquals(98, val); return true; } case Types.INTEGER: { int val = rs.getInt(1); boolean wn = rs.wasNull(); if (wn) assertEquals(0, val); else if (isBooleanMethod(method)) assertEquals(1, val); else assertEquals(98, val); return true; } case Types.BIGINT: { long val = rs.getLong(1); boolean wn = rs.wasNull(); if (wn) assertEquals(0, val); else if (isBooleanMethod(method)) assertEquals(1, val); else assertEquals(98, val); return true; } case Types.REAL: { float val = rs.getFloat(1); boolean wn = rs.wasNull(); if (wn) assertEquals(0.0, val, .001); else if (isBooleanMethod(method)) assertEquals(1.0, val, .001); else if (method.equals("setFloat")) assertEquals(98.4, val, .001); else if (method.equals("setDouble")) assertEquals(98.5, val, .001); else assertEquals(98.0, val, .001); return true; } case Types.FLOAT: case Types.DOUBLE: { double val = rs.getDouble(1); boolean wn = rs.wasNull(); if (wn) assertEquals(0.0, val, .001); else if (isBooleanMethod(method)) assertEquals(1.0, val, .001); else if (method.equals("setFloat")) assertEquals(98.4, val, .001); else if (method.equals("setDouble")) assertEquals(98.5, val, .001); else assertEquals(98.0, val, .001); return true; } case Types.DECIMAL: { String val = BigDecimalHandler.getBigDecimalString(rs, 1); boolean wn = rs.wasNull(); if (wn) assertNull(val); else if (isBooleanMethod(method)) assertEquals("1.00000", val); else if (method.equals("setFloat")) assertEquals("98.40000", val); else if (method.equals("setDouble")) assertEquals("98.50000", val); else assertEquals("98.00000", val); return true; } case Types.CHAR: case Types.VARCHAR: case Types.LONGVARCHAR: { String s = rs.getString(1); boolean wn = rs.wasNull(); if (wn) assertNull(s); else { // With IBM's DB2 universal driver. // Setting a java.sql.Clob value works with // a character column but sets the value to // be the object's toString. This is probably a bug with JCC. if (s.startsWith("com.ibm.db2.jcc.") || s.startsWith("org.apache.derby.client")) s = "<OBJECT.toString()>"; boolean hasNonAscii = false; // check for any characters in the control range for (int si = 0; si < s.length(); si++) { char c = s.charAt(si); if (c < (char) 0x20 || c >= (char) 0x7f) { hasNonAscii = true; break; } } if (hasNonAscii) { StringBuffer sb = new StringBuffer(); sb.append("EncodedString: >"); for (int si = 0; si < s.length(); si++) { sb.append(' '); sb.append((int) s.charAt(si)); } sb.append(" <"); s = sb.toString(); } checkValidStringValue(method, s); } return true; } case Types.BINARY: case Types.VARBINARY: { byte[] data = rs.getBytes(1); boolean wn = rs.wasNull(); if (wn) assertNull(data); else assertEquals("0x4,0x3", showFirstTwo(data)); return true; } case Types.LONGVARBINARY: { InputStream is = rs.getBinaryStream(1); boolean wn = rs.wasNull(); if (wn) assertNull(is); else assertEquals("0x4,0x3", showFirstTwo(is)); return true; } case Types.DATE: { Date d = rs.getDate(1); boolean wn = rs.wasNull(); if (wn) assertNull(d); else assertEquals(Date.valueOf("2004-02-14"), d); return true; } case Types.TIME: { Time t = rs.getTime(1); boolean wn = rs.wasNull(); if (wn) assertNull(t); else assertEquals(Time.valueOf("00:00:00"), t); return true; } case Types.TIMESTAMP: { Timestamp ts = rs.getTimestamp(1); boolean wn = rs.wasNull(); if (wn) assertNull(rs.getTimestamp(1)); else assertEquals(Timestamp.valueOf("2004-02-14 00:00:00.0"), ts); return true; } case Types.CLOB: { Clob clob = rs.getClob(1); boolean wn = rs.wasNull(); if (wn) assertNull(clob); else { char[] charray = new char[20]; int numchar = clob.getCharacterStream().read(charray); String s = new String(charray,0,numchar); if ("setString".equals(method)) assertEquals("98",s); else if ("setAsciiStream".equals(method)) assertEquals("eg012d", s); else if ("setCharacterStream".equals(method)) assertEquals("89",s); else if ("setClob".equals(method)) assertEquals("72",s); else if ("setObject(java.lang.String)".equals(method)) assertEquals("98",s); else if ("setObject(java.lang.Clob)".equals(method)) assertEquals("72",s); } return true; } case Types.BLOB: { Blob blob = rs.getBlob(1); boolean wn = rs.wasNull(); if (wn) assertNull(blob); else { assertEquals("0x4,0x3", showFirstTwo(blob.getBinaryStream())); } return true; } default: fail("FAIL JDBC TYPE IN getValidValue " + JDBC.sqlNameFromJdbc(jdbcType)); return false; } } private static void checkValidStringValue(String method, String s) { s = s.trim(); if ("setBoolean".equals(method) || "setObject(java.lang.Boolean)".equals(method) ) assertEquals("1",s); else if ("setBytes".equals(method) || ("setObject(byte[])".equals(method))) assertEquals("EncodedString: > 1027 ",s.substring(0,22)); else if ("setFloat".equals(method)) assertEquals("98.4", s); else if ("setDouble".equals(method)) assertEquals("98.5",s); else if ("setDate".equals(method) || "setObject(java.sql.Date)".equals(method)) assertEquals("2004-02-14", s); else if ("setTime".equals(method) || "setObject(java.sql.Time)".equals(method)) assertEquals("00:00:00",s); else if ("setTimestamp".equals(method)|| "setObject(java.sql.Timestamp)".equals(method)) assertEquals("2004-02-14 00:00:00.0",s); else if ("setAsciiStream".equals(method)) assertEquals("eg012d",s); else if ("setCharacterStream".equals(method)) assertEquals("89",s); else if ("setObject(java.lang.Float)".equals(method) || "setObject(java.lang.Double)".equals(method)) assertEquals("98.0",s); else assertEquals("98",s.trim()); } private static boolean isBooleanMethod(String method) { return method.equals("setBoolean") || method.equals("setObject(java.lang.Boolean)"); } private static boolean getOutValue(CallableStatement cs, int param, int regJdbcType, int paramType) throws SQLException, IOException { int paramJdbcType= jdbcTypes[paramType]; switch (regJdbcType) { case Types.BIT: { boolean val = cs.getBoolean(param); boolean wn = cs.wasNull(); if (!wn) assertTrue(val); return true; } case Types.TINYINT: { // Check out and inout params for procedures byte val = cs.getByte(param); boolean wn = cs.wasNull(); if (!wn) checkProcedureOutput(param, paramType, val); return true; } case Types.SMALLINT: { short val = cs.getShort(param); boolean wn = cs.wasNull(); if (!wn) checkProcedureOutput(param, paramType, val); return true; } case Types.INTEGER: { int val = cs.getInt(param); boolean wn = cs.wasNull(); if (!wn) checkProcedureOutput(param, paramType, val); return true; } case Types.BIGINT: { long val = cs.getLong(param); boolean wn = cs.wasNull(); if(!wn) checkProcedureOutput(param, paramType, val); return true; } case Types.REAL: { float val = cs.getFloat(param); boolean wn = cs.wasNull(); if(!wn) checkProcedureOutput(param, paramType, val); return true; } case Types.FLOAT: case Types.DOUBLE: { double val = cs.getDouble(param); boolean wn = cs.wasNull(); if (!wn) checkProcedureOutput(param, paramType, val); return true; } case Types.DECIMAL: { String val = BigDecimalHandler.getBigDecimalString(cs, param, regJdbcType); boolean wn = cs.wasNull(); if (!wn) checkProcedureOutput(param,paramType,val); return true; } case Types.CHAR: case Types.VARCHAR: case Types.LONGVARCHAR: { String val = cs.getString(param); boolean wn = cs.wasNull(); if (!wn) checkProcedureOutput(param,paramType,val.trim()); return true; } case Types.BINARY: case Types.VARBINARY: case Types.LONGVARBINARY: { byte[] data = cs.getBytes(param); boolean wn = cs.wasNull(); if (!wn) checkProcedureOutput(param,paramType,data); return true; } case Types.DATE: { Date val = cs.getDate(param); boolean wn = cs.wasNull(); if (!wn) checkProcedureOutput(param,paramType,val); return true; } case Types.TIME: { Time val = cs.getTime(param); boolean wn = cs.wasNull(); if (!wn) checkProcedureOutput(param,paramType,val); return true; } case Types.TIMESTAMP: { Timestamp val = cs.getTimestamp(param); boolean wn = cs.wasNull(); if (!wn) checkProcedureOutput(param,paramType,val); return true; } case Types.CLOB: { // clob not allowed for procedures Clob clob = cs.getClob(param); boolean wn = cs.wasNull(); return true; } case Types.BLOB: { // blob not allowed for procedures Blob blob = cs.getBlob(param); boolean wn = cs.wasNull(); return true; } default: fail("FAIL JDBC TYPE IN getOutValue " + JDBC.sqlNameFromJdbc(regJdbcType)); return false; } } private static void checkProcedureOutput(int param, int paramType, byte val) { checkProcedureOutput(param,paramType,(long) val); } private static void checkProcedureOutput(int param, int paramType, short val) { checkProcedureOutput(param,paramType,(long) val); } private static void checkProcedureOutput(int param, int paramType, int val) { checkProcedureOutput(param,paramType,(long) val); } private static void checkProcedureOutput(int param, int paramType, long val) { switch (jdbcTypes[paramType]) { case java.sql.Types.SMALLINT: if (param == 2) assertEquals(38,val); else if (param == 3) assertEquals(77,val); break; case java.sql.Types.INTEGER: if (param == 2) assertEquals(41,val); else if (param == 3) assertEquals(88,val); break; case java.sql.Types.BIGINT: if (param == 2) assertEquals(40,val); else if (param == 3) assertEquals(99,val); break; case java.sql.Types.FLOAT: if (param == 2) assertEquals(35,val); else if (param == 3) assertEquals(66,val); break; case java.sql.Types.REAL: if (param == 2) assertEquals(41,val); else if (param == 3) assertEquals(88,val); break; case java.sql.Types.DECIMAL: if (param == 2) assertEquals(34,val); else if (param == 3) assertEquals(84,val); break; case java.sql.Types.DOUBLE: if (param == 2) assertEquals(35,val); else if (param == 3) assertEquals(66,val); break; } } private static void checkProcedureOutput(int param, int paramType, float val) { checkProcedureOutput(param,paramType, (double) val); } private static void checkProcedureOutput(int param, int paramType, double val) { switch (jdbcTypes[paramType]) { case java.sql.Types.SMALLINT: if (param == 2) assertEquals(38.0,val,.00001); else if (param == 3) assertEquals(77.0,val,.00001); break; case java.sql.Types.INTEGER: if (param == 2) assertEquals(41.0,val,.00001); else if (param == 3) assertEquals(88.0,val, .00001); break; case java.sql.Types.BIGINT: if (param == 2) assertEquals(40.0,val,.00001); else if (param == 3) assertEquals(99.0,val,.00001); break; case java.sql.Types.FLOAT: if (param == 2) assertEquals(35.9,val,.00001); else if (param == 3) assertEquals(66.8,val,.00001); break; case java.sql.Types.REAL: if (param == 2) assertEquals(41.9,val,.00001); else if (param == 3) assertEquals(88.8,val,.00001); break; case java.sql.Types.DECIMAL: if (param == 2) assertEquals(34.29999,val,.0001); else if (param == 3) assertEquals(84.09999,val,.0001); break; case java.sql.Types.DOUBLE: if (param == 2) assertEquals(35.9,val,.00001); else if (param == 3) assertEquals(66.8,val,.00001); break; } } private static void checkProcedureOutput(int param, int paramType, String val) { switch (jdbcTypes[paramType]) { case java.sql.Types.SMALLINT: if (param == 2) assertEquals("38",val); else if (param == 3) assertEquals("77",val); break; case java.sql.Types.INTEGER: if (param == 2) assertEquals("41",val); else if (param == 3) assertEquals("88",val); break; case java.sql.Types.BIGINT: if (param == 2) assertEquals("40",val); else if (param == 3) assertEquals("99",val); break; case java.sql.Types.FLOAT: if (param == 2) assertEquals("35.9",val); else if (param == 3) assertEquals("66.8",val); break; case java.sql.Types.REAL: if (param == 2) assertEquals("41.9",val); else if (param == 3) assertEquals("88.8",val); break; case java.sql.Types.DECIMAL: if (param == 2) assertEquals("34.29999",val); else if (param == 3) assertEquals("84.09999",val); break; case java.sql.Types.DOUBLE: if (param == 2) assertEquals("35.9",val); else if (param == 3) assertEquals("66.8",val); break; } } private static void checkProcedureOutput(int param, int paramType, byte[] val) { if (param == 2) assertEquals("0x4,0x3",showFirstTwo(val)); else if (param == 3) assertEquals("0x9,0xfe",showFirstTwo(val)); } private static void checkProcedureOutput(int param, int paramType, Date val) { switch (jdbcTypes[paramType]) { case java.sql.Types.DATE: if (param == 2) assertEquals("2004-03-08", val.toString()); else if (param == 3) assertEquals("2005-03-08", val.toString()); break; case java.sql.Types.TIMESTAMP: if (param == 2) assertEquals("2004-03-12", val.toString()); else if (param == 3) assertEquals("2004-04-12", val.toString()); break; } } private static void checkProcedureOutput(int param, int paramType, Time val) { switch (jdbcTypes[paramType]) { case java.sql.Types.TIME: if (param == 2) assertEquals("19:44:42", val.toString()); else if (param == 3) assertEquals("20:44:42", val.toString()); break; case java.sql.Types.TIMESTAMP: if (param == 2) assertEquals("21:14:24", val.toString()); else if (param == 3) assertEquals("04:25:26", val.toString()); break; } } private static void checkProcedureOutput(int param, int paramType, Timestamp val) { switch (jdbcTypes[paramType]) { case java.sql.Types.DATE: if (param == 2) assertEquals("2004-03-08 00:00:00.0",val.toString()); else if (param == 3) assertEquals("2005-03-08 00:00:00.0", val.toString()); break; case java.sql.Types.TIME: // getTimestamp on time will use the current date, so can't check it explicitly // just check not null assertNotNull(val); break; case java.sql.Types.TIMESTAMP: if (param == 2) assertEquals("2004-03-12 21:14:24.938222433", val.toString()); else if (param == 3) assertEquals("2004-04-12 04:25:26.462983731", val.toString()); break; } } static void dumpSQLExceptions(SQLException se) { while (se != null) { System.out.println("SQLSTATE(" + se.getSQLState() + "): " + se.toString()); se = se.getNextException(); } } /** * Test for DERBY-149 fix Check that setString to an invalid value throws an * exception rather than causing a hang * * @param type * type for SQLTypes array * @param psi - * insert prepared statement. * */ private static void testSetStringInvalidValue(int type, PreparedStatement psi) { // Do not perform this test for string types. // Only test for types wich will fail with setString("InvalidValue"); switch (jdbcTypes[type]) { case Types.CHAR: case Types.VARCHAR: case Types.LONGVARCHAR: case Types.CLOB: return; } String sqlType = SQLTypes[type]; try { psi.setString(1, "Invalid Value"); psi.executeUpdate(); // Should have gotten exception. Test fails String error = "FAIL - setString(1,\"Invalld Value\") for type " + sqlType + " did not throw an exception as expected"; } catch (SQLException sqle) { if ("22018".equals(sqle.getSQLState()) || "XCL12".equals(sqle.getSQLState()) || "22007".equals(sqle.getSQLState()) || "22005".equals(sqle.getSQLState()) || (sqle.getMessage().indexOf("Invalid data conversion") != -1) || (sqle.getMessage().indexOf("Illegal Conversion") != -1)) ; // System.out.println(" IC (Expected)"); else fail("FAIL:" + sqle.getMessage()); } catch (Exception e) { fail("FAIL: Unexpected Exception " + e.getMessage()); } } private static String showFirstTwo(java.io.Reader in) throws java.io.IOException { int b1 = in.read(); int b2 = in.read(); in.close(); return "0x" + Integer.toHexString(b1) + "," + "0x" + Integer.toHexString(b2); } private static String showFirstTwo(java.io.InputStream in) throws java.io.IOException { int b1 = in.read(); int b2 = in.read(); in.close(); return "0x" + Integer.toHexString(b1) + "," + "0x" + Integer.toHexString(b2); } private static String showFirstTwo(byte[] data) { int b1 = data[0]; int b2 = data[1]; return "0x" + Integer.toHexString(((int) b1) & 0xff) + "," + "0x" + Integer.toHexString(((int) b2) & 0xff); } public static Test suite() { // Can't run for client for now, getting strange protocol error on tearDown //return TestConfiguration.defaultSuite(ParameterMappingTest.class); return TestConfiguration.embeddedSuite(ParameterMappingTest.class); } }
DERBY-2403 Disable ParameterMappingTest for JSR169 for now. git-svn-id: 2c06e9c5008124d912b69f0b82df29d4867c0ce2@515792 13f79535-47bb-0310-9956-ffa450edef68
java/testing/org/apache/derbyTesting/functionTests/tests/jdbcapi/ParameterMappingTest.java
DERBY-2403 Disable ParameterMappingTest for JSR169 for now.
Java
apache-2.0
e260ad0c85be0c27d2f23e8754093c1b7cd0508f
0
budthapa/social-network,budthapa/social-network,budthapa/social-network
package com.budthapa.controller; import javax.validation.Valid; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.validation.BindingResult; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.servlet.ModelAndView; import com.budthapa.domain.SiteUser; import com.budthapa.service.UserService; @Controller public class AuthController { @Autowired UserService userService; @RequestMapping("/login") String admin(){ return "login"; } @RequestMapping(value="/register", method=RequestMethod.GET ) ModelAndView register(ModelAndView modelAndView){ SiteUser user=new SiteUser(); modelAndView.getModel().put("user", user); modelAndView.setViewName("register"); return modelAndView; } @RequestMapping(value="/register", method=RequestMethod.POST) ModelAndView register(ModelAndView modelAndView, @Valid SiteUser user, BindingResult result){ modelAndView.setViewName("register"); if(!result.hasErrors()){ userService.register(user); modelAndView.setViewName("redirect:/"); } return modelAndView; } }
src/main/java/com/budthapa/controller/AuthController.java
package com.budthapa.controller; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.servlet.ModelAndView; import com.budthapa.domain.SiteUser; @Controller public class AuthController { @RequestMapping("/login") String admin(){ return "login"; } @RequestMapping(value="/register", method=RequestMethod.GET ) ModelAndView register(ModelAndView modelAndView){ SiteUser user=new SiteUser(); modelAndView.getModel().put("user", user); modelAndView.setViewName("register"); return modelAndView; } }
register new user
src/main/java/com/budthapa/controller/AuthController.java
register new user
Java
apache-2.0
bea87ae353878b61174ca428fe3d191a44f5dad6
0
ihmcrobotics/ihmc-realtime,ihmcrobotics/ihmc-realtime,ihmcrobotics/ihmc-realtime,ihmcrobotics/ihmc-realtime,ihmcrobotics/ihmc-realtime,ihmcrobotics/ihmc-realtime,ihmcrobotics/ihmc-realtime
/* * Copyright 2014 Florida Institute for Human and Machine Cognition (IHMC) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Written by Alex Lesman with assistance from IHMC team members */ package us.ihmc.concurrent; import java.util.concurrent.atomic.AtomicInteger; /** * * Class to copy data from one producer thread to one consumer thread guaranteeing atomicity. * This class is lock-free, non-blocking and garbage-free * * Only one producer and one consumer are supported. * * This class is difficult to read due to the necessity to make things atomic. * There are 3 objects in the buffer. One will be the one to be read, one will * be the one to be written and one will be the one to be committed. The state * variable encodes which of the objects is which. The high two bits encode * which object to read. The low two bits encode which object to write. * * * @author Alex Lesman, Jesper Smith * * @param <T> object * */ public class ConcurrentCopier<T> { private static final int NEXT_OBJECT_TO_READ_MASK = 0xC; private static final int CURRENTLY_BEING_READ_MASK = 0x3; private static final int INITIAL_STATE = 0xC; public final T[] buffer; private int currentlyBeingWritten = -1; /* * State bitmask integer * * NEXT_OBJECT_TO_READ_MASK : nextObjectToRead * CURRENTLY_BEING_READ_MASK : currentlyBeingRead */ private final AtomicInteger state = new AtomicInteger(); @SuppressWarnings("unchecked") public ConcurrentCopier(Builder<? extends T> classBuilder) { buffer = (T[]) new Object[3]; for (int i = 0; i < 3; i++) { buffer[i] = classBuilder.newInstance(); } state.set(INITIAL_STATE); } public T getCopyForReading() { while(true) { int currentState = state.get(); if (currentState == INITIAL_STATE) { return null; } int nextObjectToRead = (currentState & NEXT_OBJECT_TO_READ_MASK) >> 2; int newState = (currentState & NEXT_OBJECT_TO_READ_MASK) | (nextObjectToRead); if(state.compareAndSet(currentState, newState)) { return buffer[nextObjectToRead]; } } } // returns an index that is not beingRead or nextToRead private int getNextWriteIndex(int currentState) { switch(currentState) { case 0x0: return 0x1; case 0x1: return 0x2; case 0x2: return 0x1; case 0x4: return 0x2; case 0x5: return 0x0; case 0x6: return 0x0; case 0x8: return 0x1; case 0x9: return 0x0; case 0xA: return 0x0; case INITIAL_STATE: return 0x1; default: throw new RuntimeException("Invalid Copier State: " + currentState); } } public T getCopyForWriting() { currentlyBeingWritten = getNextWriteIndex(state.get()); return buffer[currentlyBeingWritten]; } /** * Commit write such that getCopyForWriting returns the newest copy */ public void commit() { // Updating nextObjectToRead(state & NEXT_OBJECT_TO_READ_MASK) to currentlyBeingWritten while(true) { int currentState = state.get(); int newState = (currentState & CURRENTLY_BEING_READ_MASK) | (currentlyBeingWritten << 2); if(state.compareAndSet(currentState, newState)) { break; } } } }
src/us/ihmc/concurrent/ConcurrentCopier.java
/* * Copyright 2014 Florida Institute for Human and Machine Cognition (IHMC) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Written by Alex Lesman with assistance from IHMC team members */ package us.ihmc.concurrent; import java.util.concurrent.atomic.AtomicInteger; /** * * Class to copy data from one producer thread to one consumer thread guaranteeing atomicity. * This class is lock-free, non-blocking and garbage-free * * Only one producer and one consumer are supported * * * @author Jesper Smith * * @param <T> object * */ public class ConcurrentCopier<T> { private static final int NEXT_OBJECT_TO_READ_MASK = 0xC; private static final int CURRENTLY_BEING_READ_MASK = 0x3; private static final int INITIAL_STATE = 0xC; public final T[] buffer; private int currentlyBeingWritten = -1; /* * State bitmask integer * * NEXT_OBJECT_TO_READ_MASK : nextObjectToRead * CURRENTLY_BEING_READ_MASK : currentlyBeingRead */ private final AtomicInteger state = new AtomicInteger(); @SuppressWarnings("unchecked") public ConcurrentCopier(Builder<? extends T> classBuilder) { buffer = (T[]) new Object[3]; for (int i = 0; i < 3; i++) { buffer[i] = classBuilder.newInstance(); } state.set(INITIAL_STATE); } public T getCopyForReading() { while(true) { int currentState = state.get(); if (currentState == INITIAL_STATE) { return null; } int nextObjectToRead = (currentState & NEXT_OBJECT_TO_READ_MASK) >> 2; int newState = (currentState & NEXT_OBJECT_TO_READ_MASK) | (nextObjectToRead); if(state.compareAndSet(currentState, newState)) { return buffer[nextObjectToRead]; } } } // returns an index that is not beingRead or nextToRead private int getNextWriteIndex(int currentState) { switch(currentState) { case 0x0: return 0x1; case 0x1: return 0x2; case 0x2: return 0x1; case 0x4: return 0x2; case 0x5: return 0x0; case 0x6: return 0x0; case 0x8: return 0x1; case 0x9: return 0x0; case 0xA: return 0x0; case INITIAL_STATE: return 0x1; default: throw new RuntimeException("Invalid Copier State: " + currentState); } } public T getCopyForWriting() { currentlyBeingWritten = getNextWriteIndex(state.get()); return buffer[currentlyBeingWritten]; } /** * Commit write such that getCopyForWriting returns the newest copy */ public void commit() { // Updating nextObjectToRead(state & NEXT_OBJECT_TO_READ_MASK) to currentlyBeingWritten while(true) { int currentState = state.get(); int newState = (currentState & CURRENTLY_BEING_READ_MASK) | (currentlyBeingWritten << 2); if(state.compareAndSet(currentState, newState)) { break; } } } }
DRC-2258: Added some comments. Won't fixBamboo.
src/us/ihmc/concurrent/ConcurrentCopier.java
DRC-2258: Added some comments. Won't fixBamboo.
Java
apache-2.0
6c6f6670c4e043a6350ebbe317052bcf9e16d74e
0
Endran/ScrumPoker,Endran/ScrumPoker
/* * Copyright (c) 2015 by David Hardy. Licensed under the Apache License, Version 2.0. */ package nl.endran.scrumpoker; import android.content.Context; import android.content.Intent; import android.os.Bundle; import android.support.annotation.CallSuper; import android.support.design.widget.NavigationView; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentTransaction; import android.support.v4.view.GravityCompat; import android.support.v4.widget.DrawerLayout; import android.support.v7.app.ActionBarDrawerToggle; import android.support.v7.widget.Toolbar; import android.view.MenuItem; import nl.endran.scrumpoker.fragments.cardselection.AboutFragment; import nl.endran.scrumpoker.fragments.cardselection.CardDisplayFragment; import nl.endran.scrumpoker.fragments.cardselection.CardSelection; import nl.endran.scrumpoker.fragments.cardselection.CardSelectionFragment; import nl.endran.scrumpoker.fragments.cardselection.CardValue; import nl.endran.scrumpoker.fragments.cardselection.SelectionBackgroundFragment; import uk.co.chrisjenx.calligraphy.CalligraphyContextWrapper; public class MainActivity extends BaseActivity { private CardDisplayFragment cardDisplayFragment; private CardSelectionFragment cardSelectionFragment; private SelectionBackgroundFragment selectionBackgroundFragment; private DrawerLayout drawer; private FragmentManager supportFragmentManager; @Override @CallSuper protected void attachBaseContext(Context newBase) { super.attachBaseContext(CalligraphyContextWrapper.wrap(newBase)); } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar); setSupportActionBar(toolbar); supportFragmentManager = getSupportFragmentManager(); selectionBackgroundFragment = (SelectionBackgroundFragment) getSupportFragmentManager().findFragmentById(R.id.fragmentSelectionBackground); cardSelectionFragment = (CardSelectionFragment) getSupportFragmentManager().findFragmentById(R.id.fragmentCardSelection); cardDisplayFragment = (CardDisplayFragment) getSupportFragmentManager().findFragmentById(R.id.fragmentCardDisplay); NavigationView navigationView = (NavigationView) findViewById(R.id.nav_view); navigationView.setNavigationItemSelectedListener(new NavigationView.OnNavigationItemSelectedListener() { @Override public boolean onNavigationItemSelected(final MenuItem item) { return handleNavigationItemSelected(item); } }); drawer = (DrawerLayout) findViewById(R.id.drawer_layout); ActionBarDrawerToggle toggle = new ActionBarDrawerToggle( this, drawer, toolbar, R.string.navigation_drawer_open, R.string.navigation_drawer_close); drawer.setDrawerListener(toggle); toggle.syncState(); setCardsAndShow(CardValue.getStandard()); } private void setCardsAndShow(final CardValue[] cardValues) { closeDrawer(); cardSelectionFragment.setCardValues(cardValues); showCardSelection(); } private void showCardSelection() { cardDisplayFragment.hide(); selectionBackgroundFragment.hide(); cardSelectionFragment.show(new CardSelectionFragment.Listener() { @Override public void onCardSelected(final CardSelection cardSelection) { showSelectionBackgroundFragment(cardSelection); } }); } private void showSelectionBackgroundFragment(final CardSelection cardSelection) { cardDisplayFragment.hide(); cardSelectionFragment.hide(); selectionBackgroundFragment.show(new SelectionBackgroundFragment.Listener() { @Override public void onShowCardClicked() { showCardDisplay(cardSelection); } }); } private void showCardDisplay(final CardSelection cardSelection) { cardSelectionFragment.hide(); selectionBackgroundFragment.hide(); cardDisplayFragment.show(cardSelection); } @Override protected int getLayoutId() { return R.layout.activity_main; } @Override protected String getPageName() { return "MainActivity"; } @Override public void onBackPressed() { if (drawer.isDrawerOpen(GravityCompat.START)) { closeDrawer(); } else if (supportFragmentManager.getBackStackEntryCount() > 0) { supportFragmentManager.popBackStack(); } else if (!cardSelectionFragment.isShowing()) { showCardSelection(); } else { super.onBackPressed(); } } private void closeDrawer() { if (drawer.isDrawerOpen(GravityCompat.START)) { drawer.closeDrawer(GravityCompat.START); } } @Override protected void onResume() { super.onResume(); closeDrawer(); } public boolean handleNavigationItemSelected(MenuItem item) { // Handle navigation view item clicks here. int id = item.getItemId(); if (id == R.id.nav_standard) { setCardsAndShow(CardValue.getStandard()); } else if (id == R.id.nav_fibonacci) { setCardsAndShow(CardValue.getFibonacci()); } else if (id == R.id.nav_shirt) { setCardsAndShow(CardValue.getShirt()); } else if (id == R.id.nav_share) { shareApp(); } else if (id == R.id.nav_about) { FragmentTransaction transaction = supportFragmentManager.beginTransaction(); AboutFragment fragment = new AboutFragment(); transaction.addToBackStack(fragment.getClass().getName()); transaction.setCustomAnimations(R.anim.fade_in, 0, 0, R.anim.fade_out); transaction.replace(R.id.contentFrame, fragment); transaction.commit(); // } else if (id == R.id.nav_settings) { } DrawerLayout drawer = (DrawerLayout) findViewById(R.id.drawer_layout); drawer.closeDrawer(GravityCompat.START); return true; } private void shareApp() { Intent sendIntent = new Intent(); sendIntent.setAction(Intent.ACTION_SEND); sendIntent.putExtra(Intent.EXTRA_TITLE, getString(R.string.app_name)); sendIntent.putExtra(Intent.EXTRA_SUBJECT, getString(R.string.app_name)); sendIntent.putExtra(Intent.EXTRA_TEXT, "Hey check out this awesome Scrum Poker app at: https://play.google.com/store/apps/details?id=nl.endran.scrumpoker"); sendIntent.setType("text/plain"); startActivity(sendIntent); } }
app/src/main/java/nl/endran/scrumpoker/MainActivity.java
/* * Copyright (c) 2015 by David Hardy. Licensed under the Apache License, Version 2.0. */ package nl.endran.scrumpoker; import android.content.Context; import android.content.Intent; import android.os.Bundle; import android.support.annotation.CallSuper; import android.support.design.widget.NavigationView; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentTransaction; import android.support.v4.view.GravityCompat; import android.support.v4.widget.DrawerLayout; import android.support.v7.app.ActionBarDrawerToggle; import android.support.v7.widget.Toolbar; import android.view.MenuItem; import nl.endran.scrumpoker.fragments.cardselection.AboutFragment; import nl.endran.scrumpoker.fragments.cardselection.CardDisplayFragment; import nl.endran.scrumpoker.fragments.cardselection.CardSelection; import nl.endran.scrumpoker.fragments.cardselection.CardSelectionFragment; import nl.endran.scrumpoker.fragments.cardselection.CardValue; import nl.endran.scrumpoker.fragments.cardselection.SelectionBackgroundFragment; import uk.co.chrisjenx.calligraphy.CalligraphyContextWrapper; public class MainActivity extends BaseActivity { private CardDisplayFragment cardDisplayFragment; private CardSelectionFragment cardSelectionFragment; private SelectionBackgroundFragment selectionBackgroundFragment; private DrawerLayout drawer; private FragmentManager supportFragmentManager; @Override @CallSuper protected void attachBaseContext(Context newBase) { super.attachBaseContext(CalligraphyContextWrapper.wrap(newBase)); } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar); setSupportActionBar(toolbar); supportFragmentManager = getSupportFragmentManager(); selectionBackgroundFragment = (SelectionBackgroundFragment) getSupportFragmentManager().findFragmentById(R.id.fragmentSelectionBackground); cardSelectionFragment = (CardSelectionFragment) getSupportFragmentManager().findFragmentById(R.id.fragmentCardSelection); cardDisplayFragment = (CardDisplayFragment) getSupportFragmentManager().findFragmentById(R.id.fragmentCardDisplay); NavigationView navigationView = (NavigationView) findViewById(R.id.nav_view); navigationView.setNavigationItemSelectedListener(new NavigationView.OnNavigationItemSelectedListener() { @Override public boolean onNavigationItemSelected(final MenuItem item) { return handleNavigationItemSelected(item); } }); drawer = (DrawerLayout) findViewById(R.id.drawer_layout); ActionBarDrawerToggle toggle = new ActionBarDrawerToggle( this, drawer, toolbar, R.string.navigation_drawer_open, R.string.navigation_drawer_close); drawer.setDrawerListener(toggle); toggle.syncState(); setCardsAndShow(CardValue.getStandard()); } private void setCardsAndShow(final CardValue[] cardValues) { cardSelectionFragment.setCardValues(cardValues); showCardSelection(); } private void showCardSelection() { cardDisplayFragment.hide(); selectionBackgroundFragment.hide(); cardSelectionFragment.show(new CardSelectionFragment.Listener() { @Override public void onCardSelected(final CardSelection cardSelection) { showSelectionBackgroundFragment(cardSelection); } }); } private void showSelectionBackgroundFragment(final CardSelection cardSelection) { cardDisplayFragment.hide(); cardSelectionFragment.hide(); selectionBackgroundFragment.show(new SelectionBackgroundFragment.Listener() { @Override public void onShowCardClicked() { showCardDisplay(cardSelection); } }); } private void showCardDisplay(final CardSelection cardSelection) { cardSelectionFragment.hide(); selectionBackgroundFragment.hide(); cardDisplayFragment.show(cardSelection); } @Override protected int getLayoutId() { return R.layout.activity_main; } @Override protected String getPageName() { return "MainActivity"; } @Override public void onBackPressed() { if (drawer.isDrawerOpen(GravityCompat.START)) { drawer.closeDrawer(GravityCompat.START); } else if (supportFragmentManager.getBackStackEntryCount() > 0) { supportFragmentManager.popBackStack(); } else if (!cardSelectionFragment.isShowing()) { showCardSelection(); } else { super.onBackPressed(); } } @Override protected void onResume() { super.onResume(); drawer.closeDrawer(GravityCompat.START); } public boolean handleNavigationItemSelected(MenuItem item) { // Handle navigation view item clicks here. int id = item.getItemId(); if (id == R.id.nav_standard) { setCardsAndShow(CardValue.getStandard()); } else if (id == R.id.nav_fibonacci) { setCardsAndShow(CardValue.getFibonacci()); } else if (id == R.id.nav_shirt) { setCardsAndShow(CardValue.getShirt()); } else if (id == R.id.nav_share) { shareApp(); } else if (id == R.id.nav_about) { FragmentTransaction transaction = supportFragmentManager.beginTransaction(); AboutFragment fragment = new AboutFragment(); transaction.addToBackStack(fragment.getClass().getName()); transaction.setCustomAnimations(R.anim.fade_in, 0, 0, R.anim.fade_out); transaction.replace(R.id.contentFrame, fragment); transaction.commit(); // } else if (id == R.id.nav_settings) { } DrawerLayout drawer = (DrawerLayout) findViewById(R.id.drawer_layout); drawer.closeDrawer(GravityCompat.START); return true; } private void shareApp() { Intent sendIntent = new Intent(); sendIntent.setAction(Intent.ACTION_SEND); sendIntent.putExtra(Intent.EXTRA_TITLE, getString(R.string.app_name)); sendIntent.putExtra(Intent.EXTRA_SUBJECT, getString(R.string.app_name)); sendIntent.putExtra(Intent.EXTRA_TEXT, "Hey check out this awesome Scrum Poker app at: https://play.google.com/store/apps/details?id=nl.endran.scrumpoker"); sendIntent.setType("text/plain"); startActivity(sendIntent); } }
Fixed issue when about is opened and deck type is selected in drawer menu
app/src/main/java/nl/endran/scrumpoker/MainActivity.java
Fixed issue when about is opened and deck type is selected in drawer menu
Java
apache-2.0
bb954edf5099b42e73dcce8ef7c88cb35dfdb4e3
0
Wechat-Group/WxJava,binarywang/weixin-java-tools,comeonc/weixin-java-tools,binarywang/weixin-java-tools,chunwei/weixin-java-tools,Wechat-Group/WxJava,crazycode/weixin-java-tools,comeonc/weixin-java-tools
package com.github.binarywang.wxpay.bean.request; import com.github.binarywang.wxpay.config.WxPayConfig; import com.github.binarywang.wxpay.exception.WxPayException; import com.thoughtworks.xstream.annotations.XStreamAlias; import me.chanjar.weixin.common.annotation.Required; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import java.util.Arrays; /** * <pre> * 微信支付-申请退款请求参数 * 注释中各行每个字段描述对应如下: * <li>字段名 * <li>变量名 * <li>是否必填 * <li>类型 * <li>示例值 * <li>描述 * Created by Binary Wang on 2016-10-08. * </pre> * * @author <a href="https://github.com/binarywang">binarywang(Binary Wang)</a> */ @XStreamAlias("xml") public class WxPayRefundRequest extends WxPayBaseRequest { private static final String[] REFUND_ACCOUNT = new String[]{"REFUND_SOURCE_RECHARGE_FUNDS", "REFUND_SOURCE_UNSETTLED_FUNDS"}; /** * <pre> * 设备号 * device_info * 否 * String(32) * 13467007045764 * 终端设备号 * </pre> */ @XStreamAlias("device_info") private String deviceInfo; /** * <pre> * 微信订单号 * transaction_id * 跟out_trade_no二选一 * String(28) * 1217752501201400000000000000 * 微信生成的订单号,在支付通知中有返回 * </pre> */ @XStreamAlias("transaction_id") private String transactionId; /** * <pre> * 商户订单号 * out_trade_no * 跟transaction_id二选一 * String(32) * 1217752501201400000000000000 * 商户侧传给微信的订单号 * </pre> */ @XStreamAlias("out_trade_no") private String outTradeNo; /** * <pre> * 商户退款单号 * out_refund_no * 是 * String(32) * 1217752501201400000000000000 * 商户系统内部的退款单号,商户系统内部唯一,同一退款单号多次请求只退一笔 * </pre> */ @Required @XStreamAlias("out_refund_no") private String outRefundNo; /** * <pre> * 订单金额 * total_fee * 是 * Int * 100 * 订单总金额,单位为分,只能为整数,详见支付金额 * </pre> */ @Required @XStreamAlias("total_fee") private Integer totalFee; /** * <pre> * 退款金额 * refund_fee * 是 * Int * 100 * 退款总金额,订单总金额,单位为分,只能为整数,详见支付金额 * </pre> */ @Required @XStreamAlias("refund_fee") private Integer refundFee; /** * <pre> * 货币种类 * refund_fee_type * 否 * String(8) * CNY * 货币类型,符合ISO 4217标准的三位字母代码,默认人民币:CNY,其他值列表详见货币类型 * </pre> */ @XStreamAlias("refund_fee_type") private String refundFeeType; /** * <pre> * 操作员 * op_user_id * 是 * String(32) * 1900000109 * 操作员帐号, 默认为商户号 * </pre> */ //@Required @XStreamAlias("op_user_id") private String opUserId; /** * <pre> * 退款资金来源 * refund_account * 否 * String(30) * REFUND_SOURCE_RECHARGE_FUNDS * 仅针对老资金流商户使用, * <li>REFUND_SOURCE_UNSETTLED_FUNDS---未结算资金退款(默认使用未结算资金退款), * <li>REFUND_SOURCE_RECHARGE_FUNDS---可用余额退款 * </pre> */ @XStreamAlias("refund_account") private String refundAccount; /** * <pre> * 退款原因 * refund_account * 否 * String(80) * 商品已售完 * 若商户传入,会在下发给用户的退款消息中体现退款原因 * </pre> */ @XStreamAlias("refund_desc") private String refundDesc; private WxPayRefundRequest(Builder builder) { setDeviceInfo(builder.deviceInfo); setAppid(builder.appid); setTransactionId(builder.transactionId); setMchId(builder.mchId); setSubAppId(builder.subAppId); setOutTradeNo(builder.outTradeNo); setSubMchId(builder.subMchId); setOutRefundNo(builder.outRefundNo); setNonceStr(builder.nonceStr); setTotalFee(builder.totalFee); setSign(builder.sign); setRefundFee(builder.refundFee); setRefundFeeType(builder.refundFeeType); setOpUserId(builder.opUserId); setRefundAccount(builder.refundAccount); setRefundDesc(builder.refundDesc); } public static Builder newBuilder() { return new Builder(); } public String getDeviceInfo() { return this.deviceInfo; } public void setDeviceInfo(String deviceInfo) { this.deviceInfo = deviceInfo; } public String getTransactionId() { return this.transactionId; } public void setTransactionId(String transactionId) { this.transactionId = transactionId; } public String getOutTradeNo() { return this.outTradeNo; } public void setOutTradeNo(String outTradeNo) { this.outTradeNo = outTradeNo; } public String getOutRefundNo() { return this.outRefundNo; } public void setOutRefundNo(String outRefundNo) { this.outRefundNo = outRefundNo; } public Integer getTotalFee() { return this.totalFee; } public void setTotalFee(Integer totalFee) { this.totalFee = totalFee; } public Integer getRefundFee() { return this.refundFee; } public void setRefundFee(Integer refundFee) { this.refundFee = refundFee; } public String getRefundFeeType() { return this.refundFeeType; } public void setRefundFeeType(String refundFeeType) { this.refundFeeType = refundFeeType; } public String getOpUserId() { return this.opUserId; } public void setOpUserId(String opUserId) { this.opUserId = opUserId; } public String getRefundAccount() { return this.refundAccount; } public void setRefundAccount(String refundAccount) { this.refundAccount = refundAccount; } public String getRefundDesc() { return this.refundDesc; } public void setRefundDesc(String refundDesc) { this.refundDesc = refundDesc; } public WxPayRefundRequest() { } @Override public void checkAndSign(WxPayConfig config) throws WxPayException { if (StringUtils.isBlank(this.getOpUserId())) { this.setOpUserId(config.getMchId()); } super.checkAndSign(config); } @Override protected void checkConstraints() { if (StringUtils.isNotBlank(this.getRefundAccount())) { if (!ArrayUtils.contains(REFUND_ACCOUNT, this.getRefundAccount())) { throw new IllegalArgumentException(String.format("refund_account目前必须为%s其中之一,实际值:%s", Arrays.toString(REFUND_ACCOUNT), this.getRefundAccount())); } } if (StringUtils.isBlank(this.getOutTradeNo()) && StringUtils.isBlank(this.getTransactionId())) { throw new IllegalArgumentException("transaction_id 和 out_trade_no 不能同时为空,必须提供一个"); } } public static final class Builder { private String deviceInfo; private String appid; private String transactionId; private String mchId; private String subAppId; private String outTradeNo; private String subMchId; private String outRefundNo; private String nonceStr; private Integer totalFee; private String sign; private Integer refundFee; private String refundFeeType; private String opUserId; private String refundAccount; private String refundDesc; private Builder() { } public Builder deviceInfo(String deviceInfo) { this.deviceInfo = deviceInfo; return this; } public Builder appid(String appid) { this.appid = appid; return this; } public Builder transactionId(String transactionId) { this.transactionId = transactionId; return this; } public Builder mchId(String mchId) { this.mchId = mchId; return this; } public Builder subAppId(String subAppId) { this.subAppId = subAppId; return this; } public Builder outTradeNo(String outTradeNo) { this.outTradeNo = outTradeNo; return this; } public Builder subMchId(String subMchId) { this.subMchId = subMchId; return this; } public Builder outRefundNo(String outRefundNo) { this.outRefundNo = outRefundNo; return this; } public Builder nonceStr(String nonceStr) { this.nonceStr = nonceStr; return this; } public Builder totalFee(Integer totalFee) { this.totalFee = totalFee; return this; } public Builder sign(String sign) { this.sign = sign; return this; } public Builder refundFee(Integer refundFee) { this.refundFee = refundFee; return this; } public Builder refundFeeType(String refundFeeType) { this.refundFeeType = refundFeeType; return this; } public Builder opUserId(String opUserId) { this.opUserId = opUserId; return this; } public Builder refundAccount(String refundAccount) { this.refundAccount = refundAccount; return this; } public Builder refundDesc(String refundDesc) { this.refundDesc = refundDesc; return this; } public WxPayRefundRequest build() { return new WxPayRefundRequest(this); } } }
weixin-java-pay/src/main/java/com/github/binarywang/wxpay/bean/request/WxPayRefundRequest.java
package com.github.binarywang.wxpay.bean.request; import com.github.binarywang.wxpay.config.WxPayConfig; import com.github.binarywang.wxpay.exception.WxPayException; import com.thoughtworks.xstream.annotations.XStreamAlias; import me.chanjar.weixin.common.annotation.Required; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import java.util.Arrays; /** * <pre> * 微信支付-申请退款请求参数 * 注释中各行每个字段描述对应如下: * <li>字段名 * <li>变量名 * <li>是否必填 * <li>类型 * <li>示例值 * <li>描述 * Created by Binary Wang on 2016-10-08. * </pre> * * @author <a href="https://github.com/binarywang">binarywang(Binary Wang)</a> */ @XStreamAlias("xml") public class WxPayRefundRequest extends WxPayBaseRequest { private static final String[] REFUND_ACCOUNT = new String[]{"REFUND_SOURCE_RECHARGE_FUNDS", "REFUND_SOURCE_UNSETTLED_FUNDS"}; /** * <pre> * 设备号 * device_info * 否 * String(32) * 13467007045764 * 终端设备号 * </pre> */ @XStreamAlias("device_info") private String deviceInfo; /** * <pre> * 微信订单号 * transaction_id * 跟out_trade_no二选一 * String(28) * 1217752501201400000000000000 * 微信生成的订单号,在支付通知中有返回 * </pre> */ @XStreamAlias("transaction_id") private String transactionId; /** * <pre> * 商户订单号 * out_trade_no * 跟transaction_id二选一 * String(32) * 1217752501201400000000000000 * 商户侧传给微信的订单号 * </pre> */ @XStreamAlias("out_trade_no") private String outTradeNo; /** * <pre> * 商户退款单号 * out_refund_no * 是 * String(32) * 1217752501201400000000000000 * 商户系统内部的退款单号,商户系统内部唯一,同一退款单号多次请求只退一笔 * </pre> */ @Required @XStreamAlias("out_refund_no") private String outRefundNo; /** * <pre> * 订单金额 * total_fee * 是 * Int * 100 * 订单总金额,单位为分,只能为整数,详见支付金额 * </pre> */ @Required @XStreamAlias("total_fee") private Integer totalFee; /** * <pre> * 退款金额 * refund_fee * 是 * Int * 100 * 退款总金额,订单总金额,单位为分,只能为整数,详见支付金额 * </pre> */ @Required @XStreamAlias("refund_fee") private Integer refundFee; /** * <pre> * 货币种类 * refund_fee_type * 否 * String(8) * CNY * 货币类型,符合ISO 4217标准的三位字母代码,默认人民币:CNY,其他值列表详见货币类型 * </pre> */ @XStreamAlias("refund_fee_type") private String refundFeeType; /** * <pre> * 操作员 * op_user_id * 是 * String(32) * 1900000109 * 操作员帐号, 默认为商户号 * </pre> */ //@Required @XStreamAlias("op_user_id") private String opUserId; /** * <pre> * 退款资金来源 * refund_account * 否 * String(30) * REFUND_SOURCE_RECHARGE_FUNDS * 仅针对老资金流商户使用, * <li>REFUND_SOURCE_UNSETTLED_FUNDS---未结算资金退款(默认使用未结算资金退款), * <li>REFUND_SOURCE_RECHARGE_FUNDS---可用余额退款 * </pre> */ @XStreamAlias("refund_account") private String refundAccount; public WxPayRefundRequest() { } private WxPayRefundRequest(Builder builder) { setDeviceInfo(builder.deviceInfo); setAppid(builder.appid); setTransactionId(builder.transactionId); setMchId(builder.mchId); setOutTradeNo(builder.outTradeNo); setSubAppId(builder.subAppId); setSubMchId(builder.subMchId); setOutRefundNo(builder.outRefundNo); setNonceStr(builder.nonceStr); setTotalFee(builder.totalFee); setSign(builder.sign); setRefundFee(builder.refundFee); setRefundFeeType(builder.refundFeeType); setOpUserId(builder.opUserId); setRefundAccount(builder.refundAccount); } public static Builder newBuilder() { return new Builder(); } @Override public void checkAndSign(WxPayConfig config) throws WxPayException { if (StringUtils.isBlank(this.getOpUserId())) { this.setOpUserId(config.getMchId()); } super.checkAndSign(config); } public String getDeviceInfo() { return this.deviceInfo; } public void setDeviceInfo(String deviceInfo) { this.deviceInfo = deviceInfo; } public String getTransactionId() { return this.transactionId; } public void setTransactionId(String transactionId) { this.transactionId = transactionId; } public String getOutTradeNo() { return this.outTradeNo; } public void setOutTradeNo(String outTradeNo) { this.outTradeNo = outTradeNo; } public String getOutRefundNo() { return this.outRefundNo; } public void setOutRefundNo(String outRefundNo) { this.outRefundNo = outRefundNo; } public Integer getTotalFee() { return this.totalFee; } public void setTotalFee(Integer totalFee) { this.totalFee = totalFee; } public Integer getRefundFee() { return this.refundFee; } public void setRefundFee(Integer refundFee) { this.refundFee = refundFee; } public String getRefundFeeType() { return this.refundFeeType; } public void setRefundFeeType(String refundFeeType) { this.refundFeeType = refundFeeType; } public String getOpUserId() { return this.opUserId; } public void setOpUserId(String opUserId) { this.opUserId = opUserId; } public String getRefundAccount() { return this.refundAccount; } public void setRefundAccount(String refundAccount) { this.refundAccount = refundAccount; } @Override protected void checkConstraints() { if (StringUtils.isNotBlank(this.getRefundAccount())) { if (!ArrayUtils.contains(REFUND_ACCOUNT, this.getRefundAccount())) { throw new IllegalArgumentException(String.format("refund_account目前必须为%s其中之一,实际值:%s", Arrays.toString(REFUND_ACCOUNT), this.getRefundAccount())); } } if (StringUtils.isBlank(this.getOutTradeNo()) && StringUtils.isBlank(this.getTransactionId())) { throw new IllegalArgumentException("transaction_id 和 out_trade_no 不能同时为空,必须提供一个"); } } public static final class Builder { private String deviceInfo; private String appid; private String transactionId; private String mchId; private String outTradeNo; private String subAppId; private String subMchId; private String outRefundNo; private String nonceStr; private Integer totalFee; private String sign; private Integer refundFee; private String refundFeeType; private String opUserId; private String refundAccount; private Builder() { } public Builder deviceInfo(String deviceInfo) { this.deviceInfo = deviceInfo; return this; } public Builder appid(String appid) { this.appid = appid; return this; } public Builder transactionId(String transactionId) { this.transactionId = transactionId; return this; } public Builder mchId(String mchId) { this.mchId = mchId; return this; } public Builder outTradeNo(String outTradeNo) { this.outTradeNo = outTradeNo; return this; } public Builder subAppId(String subAppId) { this.subAppId = subAppId; return this; } public Builder subMchId(String subMchId) { this.subMchId = subMchId; return this; } public Builder outRefundNo(String outRefundNo) { this.outRefundNo = outRefundNo; return this; } public Builder nonceStr(String nonceStr) { this.nonceStr = nonceStr; return this; } public Builder totalFee(Integer totalFee) { this.totalFee = totalFee; return this; } public Builder sign(String sign) { this.sign = sign; return this; } public Builder refundFee(Integer refundFee) { this.refundFee = refundFee; return this; } public Builder refundFeeType(String refundFeeType) { this.refundFeeType = refundFeeType; return this; } public Builder opUserId(String opUserId) { this.opUserId = opUserId; return this; } public Builder refundAccount(String refundAccount) { this.refundAccount = refundAccount; return this; } public WxPayRefundRequest build() { return new WxPayRefundRequest(this); } } }
#257 微信支付申请退款接口增加refund_desc属性
weixin-java-pay/src/main/java/com/github/binarywang/wxpay/bean/request/WxPayRefundRequest.java
#257 微信支付申请退款接口增加refund_desc属性
Java
apache-2.0
564f31c01264211eaaf408cddc9ff1d19c294cce
0
Evast1995/Evast
package com.example.hjiang.gactelphonedemo.activity; import android.app.Activity; import android.content.Intent; import android.os.Bundle; import android.os.Handler; import android.support.v4.app.Fragment; import android.text.Editable; import android.text.TextUtils; import android.text.TextWatcher; import android.util.Log; import android.view.View; import android.widget.AdapterView; import android.widget.GridView; import android.widget.ImageView; import android.widget.Toast; import com.example.hjiang.gactelphonedemo.MyApplication; import com.example.hjiang.gactelphonedemo.R; import com.example.hjiang.gactelphonedemo.adapter.DialsAdapter; import com.example.hjiang.gactelphonedemo.fragment.ModelFragment; import com.example.hjiang.gactelphonedemo.fragment.SearchFragment; import com.example.hjiang.gactelphonedemo.util.CallUtils; import com.example.hjiang.gactelphonedemo.util.Contants; import com.example.hjiang.gactelphonedemo.util.OtherUtils; import com.example.hjiang.gactelphonedemo.weight.DelEdit; import java.util.List; public class MainActivity extends BaseActivity implements View.OnClickListener { /** 用于测试git */ /** 用于测试git */ /** 用于测试git */ /** 用于测试git */ /** 用于测试git */ /** 用于测试git */ /** 用于测试git */ /** 用于测试git */ /** 用于测试git */ /** 用于测试git */ /** 用于测试git */ private GridView gridView; private DelEdit delEdit; private ImageView callImage; public static final int RESULT_CODE = 101; /** 判断当前 搜索页面是否展开*/ private Boolean isSearchVisiable = false; private ModelFragment modelFragment = null; private SearchFragment searchFragment = null; private static final int CONTACTS_REQUEST_CODE=102; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); initView(); setEditChange(); } @Override public void onAttachFragment(Fragment fragment) { super.onAttachFragment(fragment); // add a fragment to current activity window. // fragment instance or args. // which fragment it is. if (fragment instanceof SearchFragment) { searchFragment = (SearchFragment) fragment; } else if (fragment instanceof ModelFragment) { modelFragment = (ModelFragment) fragment; } } /** * 初始化视图 */ private void initView(){ /** 初始化拨号盘相关视图*/ setDials(); findViewById(R.id.meeting_btn).setOnClickListener(this); findViewById(R.id.contact_btn).setOnClickListener(this); } /** * 对外提供一个改变DelEdit的方法 */ public void setDelEdit(String phoneStr){ delEdit.setAddTextView(phoneStr); } /** * 设置拨号盘,拨号编辑框 */ private void setDials(){ gridView = (GridView) findViewById(R.id.dials); delEdit = (DelEdit) findViewById(R.id.eidt_layout); callImage = (ImageView) findViewById(R.id.call_icon); callImage.setOnClickListener(this); final DialsAdapter adapter = new DialsAdapter(MainActivity.this, OtherUtils.getDialsNum()); gridView.setAdapter(adapter); gridView.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { String str = (String) adapter.getItem(position); delEdit.addString(str); } }); modelFragment = new ModelFragment(); searchFragment = new SearchFragment(); switchFragmentContent(R.id.lefl_slip_layout, modelFragment); } /** * 拨打电话 */ private void makeCall(){ String callStr = delEdit.getEditText(); List<String> list = delEdit.getImagesText(); int position = list.size(); if(position>1||(position == 1&&!TextUtils.isEmpty(callStr))){//表示成员有两人及其以上 则开启会议 while(position>0){ String phoneNum =list.get(position-1); CallUtils.getInstance(this).confCall(MyApplication.localId,phoneNum,phoneNum,MyApplication.callModel); position--; } if(!TextUtils.isEmpty(callStr)) { CallUtils.getInstance(this).confCall(MyApplication.localId, callStr, callStr,MyApplication.callModel); } }else {//无会议成员则拨打单路线路 if(!TextUtils.isEmpty(callStr)) { CallUtils.getInstance(this).makeCall(MyApplication.localId, callStr, callStr, MyApplication.callModel); }else if(position == 1){ CallUtils.getInstance(this).makeCall(MyApplication.localId, list.get(0), list.get(0), MyApplication.callModel); } else{ Toast.makeText(this,getResources().getString(R.string.phone_isnull),Toast.LENGTH_SHORT).show(); } } list.clear(); delEdit.removeEditText(); } /** * 转拨 */ private void makeTransfer(){ String callStr = delEdit.getEditText(); MyApplication.isTransfer = true; CallUtils.getInstance(this).transferCall(MyApplication.localId, callStr); } /** * 是否是会议线路 * @return */ private Boolean isInConfLine(){ return getIntent().getBooleanExtra(Contants.IS_CONFLINE,false); } /** * 判断当前电弧是否是通话转移 * @return */ private Boolean isTransfer(){ Boolean isTransfer = getIntent().getBooleanExtra(Contants.IS_TRANSFER, false); return isTransfer; } /** * 创建会议线路 */ private void makeConf(){ String callStr = delEdit.getEditText(); Intent intent = new Intent(); intent.putExtra(Contants.PHONE_NUM,callStr); setResult(RESULT_CODE,intent); finish(); } @Override public void onClick(View v) { switch (v.getId()){ /** 按下拨打电话*/ case R.id.call_icon:{ if(isTransfer()) {//转拨线路 makeTransfer(); }else if(isInConfLine()){//会议线路 makeConf(); }else{//拨号 makeCall(); } break; } /** 按下会议按钮*/ case R.id.meeting_btn:{ openMeetingRoom(); break; } /** 按下联系人按钮*/ case R.id.contact_btn:{ startActivityForResult(new Intent(this, ContactsActivity.class), CONTACTS_REQUEST_CODE); // openContactView(); break; } } } /** * 解决singleTask和intent传值的问题 * @param intent */ @Override protected void onNewIntent(Intent intent) { super.onNewIntent(intent); setIntent(intent); } /** * 打开会议界面 */ private void openMeetingRoom(){ Intent intent = new Intent(this,MeetingActivity.class); startActivity(intent); } /** * 打开联系人视图 */ private void openContactView() { // ArrayList<ContentValues> data = new ArrayList<ContentValues>(); // // ContentValues row1 = new ContentValues(); // row1.put(ContactsContract.Data.MIMETYPE, ContactsContract.CommonDataKinds.Organization.CONTENT_ITEM_TYPE); // row1.put(ContactsContract.CommonDataKinds.Organization.COMPANY, "Android"); // data.add(row1); // // ContentValues row2 = new ContentValues(); // row2.put(ContactsContract.Data.MIMETYPE, ContactsContract.CommonDataKinds.Email.CONTENT_ITEM_TYPE); // row2.put(ContactsContract.CommonDataKinds.Email.TYPE, ContactsContract.CommonDataKinds.Email.TYPE_CUSTOM); // row2.put(ContactsContract.CommonDataKinds.Email.LABEL, "Green Bot"); // row2.put(ContactsContract.CommonDataKinds.Email.ADDRESS, "[email protected]"); // data.add(row2); // // Intent intent = new Intent(Intent.ACTION_INSERT, ContactsContract.Contacts.CONTENT_URI); // intent.putParcelableArrayListExtra(ContactsContract.Intents.Insert.DATA, data); // // startActivity(intent); } /** * 编辑框变动时事件 */ private void setEditChange(){ delEdit.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) { } @Override public void onTextChanged(final CharSequence s, int start, int before, int count) { if (s.length() == 0) {//当编辑框为空时显示modelfragment页面 switchFragmentContent(R.id.lefl_slip_layout, modelFragment); isSearchVisiable = false; } else if (!isSearchVisiable&&s.length()!=0) {//当编辑框里的内容不为空的时候,并且SearchFragment没有显示时切换到SearchFragment switchFragmentContent(R.id.lefl_slip_layout, searchFragment); isSearchVisiable = true; setEditChangeSearch(s.toString()); }else if(isSearchVisiable&&s.length()!=0) {//当编辑框内容不为空并且SearchFragment正在显示时 setEditChangeSearch(s.toString()); } } @Override public void afterTextChanged(Editable s) { } }); } /** * 文本有变化的时候 * @param str */ private void setEditChangeSearch(final String str){ mHandler.post(new Runnable() { @Override public void run() { searchFragment.search(str); } }); } private Handler mHandler = new Handler(); @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { if(resultCode == Activity.RESULT_OK && requestCode == CONTACTS_REQUEST_CODE){ Bundle bundle = data.getExtras(); List<String> phoneList = bundle.getStringArrayList(Contants.PHONE_LIST_KEY); Log.e("--main--",phoneList.toString()); for(int i = 0;i<phoneList.size();i++){ delEdit.setAddTextView(phoneList.get(i)); } } } }
GACTelphoneDemo/app/src/main/java/com/example/hjiang/gactelphonedemo/activity/MainActivity.java
package com.example.hjiang.gactelphonedemo.activity; import android.app.Activity; import android.content.Intent; import android.os.Bundle; import android.os.Handler; import android.support.v4.app.Fragment; import android.text.Editable; import android.text.TextUtils; import android.text.TextWatcher; import android.util.Log; import android.view.View; import android.widget.AdapterView; import android.widget.GridView; import android.widget.ImageView; import android.widget.Toast; import com.example.hjiang.gactelphonedemo.MyApplication; import com.example.hjiang.gactelphonedemo.R; import com.example.hjiang.gactelphonedemo.adapter.DialsAdapter; import com.example.hjiang.gactelphonedemo.fragment.ModelFragment; import com.example.hjiang.gactelphonedemo.fragment.SearchFragment; import com.example.hjiang.gactelphonedemo.util.CallUtils; import com.example.hjiang.gactelphonedemo.util.Contants; import com.example.hjiang.gactelphonedemo.util.OtherUtils; import com.example.hjiang.gactelphonedemo.weight.DelEdit; import java.util.List; public class MainActivity extends BaseActivity implements View.OnClickListener { private GridView gridView; private DelEdit delEdit; private ImageView callImage; public static final int RESULT_CODE = 101; /** 判断当前 搜索页面是否展开*/ private Boolean isSearchVisiable = false; private ModelFragment modelFragment = null; private SearchFragment searchFragment = null; private static final int CONTACTS_REQUEST_CODE=102; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); initView(); setEditChange(); } @Override public void onAttachFragment(Fragment fragment) { super.onAttachFragment(fragment); // add a fragment to current activity window. // fragment instance or args. // which fragment it is. if (fragment instanceof SearchFragment) { searchFragment = (SearchFragment) fragment; } else if (fragment instanceof ModelFragment) { modelFragment = (ModelFragment) fragment; } } /** * 初始化视图 */ private void initView(){ /** 初始化拨号盘相关视图*/ setDials(); findViewById(R.id.meeting_btn).setOnClickListener(this); findViewById(R.id.contact_btn).setOnClickListener(this); } /** * 对外提供一个改变DelEdit的方法 */ public void setDelEdit(String phoneStr){ delEdit.setAddTextView(phoneStr); } /** * 设置拨号盘,拨号编辑框 */ private void setDials(){ gridView = (GridView) findViewById(R.id.dials); delEdit = (DelEdit) findViewById(R.id.eidt_layout); callImage = (ImageView) findViewById(R.id.call_icon); callImage.setOnClickListener(this); final DialsAdapter adapter = new DialsAdapter(MainActivity.this, OtherUtils.getDialsNum()); gridView.setAdapter(adapter); gridView.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { String str = (String) adapter.getItem(position); delEdit.addString(str); } }); modelFragment = new ModelFragment(); searchFragment = new SearchFragment(); switchFragmentContent(R.id.lefl_slip_layout, modelFragment); } /** * 拨打电话 */ private void makeCall(){ String callStr = delEdit.getEditText(); List<String> list = delEdit.getImagesText(); int position = list.size(); if(position>1||(position == 1&&!TextUtils.isEmpty(callStr))){//表示成员有两人及其以上 则开启会议 while(position>0){ String phoneNum =list.get(position-1); CallUtils.getInstance(this).confCall(MyApplication.localId,phoneNum,phoneNum,MyApplication.callModel); position--; } if(!TextUtils.isEmpty(callStr)) { CallUtils.getInstance(this).confCall(MyApplication.localId, callStr, callStr,MyApplication.callModel); } }else {//无会议成员则拨打单路线路 if(!TextUtils.isEmpty(callStr)) { CallUtils.getInstance(this).makeCall(MyApplication.localId, callStr, callStr, MyApplication.callModel); }else if(position == 1){ CallUtils.getInstance(this).makeCall(MyApplication.localId, list.get(0), list.get(0), MyApplication.callModel); } else{ Toast.makeText(this,getResources().getString(R.string.phone_isnull),Toast.LENGTH_SHORT).show(); } } list.clear(); delEdit.removeEditText(); } /** * 转拨 */ private void makeTransfer(){ String callStr = delEdit.getEditText(); MyApplication.isTransfer = true; CallUtils.getInstance(this).transferCall(MyApplication.localId, callStr); } /** * 是否是会议线路 * @return */ private Boolean isInConfLine(){ return getIntent().getBooleanExtra(Contants.IS_CONFLINE,false); } /** * 判断当前电弧是否是通话转移 * @return */ private Boolean isTransfer(){ Boolean isTransfer = getIntent().getBooleanExtra(Contants.IS_TRANSFER, false); return isTransfer; } /** * 创建会议线路 */ private void makeConf(){ String callStr = delEdit.getEditText(); Intent intent = new Intent(); intent.putExtra(Contants.PHONE_NUM,callStr); setResult(RESULT_CODE,intent); finish(); } @Override public void onClick(View v) { switch (v.getId()){ /** 按下拨打电话*/ case R.id.call_icon:{ if(isTransfer()) {//转拨线路 makeTransfer(); }else if(isInConfLine()){//会议线路 makeConf(); }else{//拨号 makeCall(); } break; } /** 按下会议按钮*/ case R.id.meeting_btn:{ openMeetingRoom(); break; } /** 按下联系人按钮*/ case R.id.contact_btn:{ startActivityForResult(new Intent(this, ContactsActivity.class), CONTACTS_REQUEST_CODE); // openContactView(); break; } } } /** * 解决singleTask和intent传值的问题 * @param intent */ @Override protected void onNewIntent(Intent intent) { super.onNewIntent(intent); setIntent(intent); } /** * 打开会议界面 */ private void openMeetingRoom(){ Intent intent = new Intent(this,MeetingActivity.class); startActivity(intent); } /** * 打开联系人视图 */ private void openContactView() { // ArrayList<ContentValues> data = new ArrayList<ContentValues>(); // // ContentValues row1 = new ContentValues(); // row1.put(ContactsContract.Data.MIMETYPE, ContactsContract.CommonDataKinds.Organization.CONTENT_ITEM_TYPE); // row1.put(ContactsContract.CommonDataKinds.Organization.COMPANY, "Android"); // data.add(row1); // // ContentValues row2 = new ContentValues(); // row2.put(ContactsContract.Data.MIMETYPE, ContactsContract.CommonDataKinds.Email.CONTENT_ITEM_TYPE); // row2.put(ContactsContract.CommonDataKinds.Email.TYPE, ContactsContract.CommonDataKinds.Email.TYPE_CUSTOM); // row2.put(ContactsContract.CommonDataKinds.Email.LABEL, "Green Bot"); // row2.put(ContactsContract.CommonDataKinds.Email.ADDRESS, "[email protected]"); // data.add(row2); // // Intent intent = new Intent(Intent.ACTION_INSERT, ContactsContract.Contacts.CONTENT_URI); // intent.putParcelableArrayListExtra(ContactsContract.Intents.Insert.DATA, data); // // startActivity(intent); } /** * 编辑框变动时事件 */ private void setEditChange(){ delEdit.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) { } @Override public void onTextChanged(final CharSequence s, int start, int before, int count) { if (s.length() == 0) {//当编辑框为空时显示modelfragment页面 switchFragmentContent(R.id.lefl_slip_layout, modelFragment); isSearchVisiable = false; } else if (!isSearchVisiable&&s.length()!=0) {//当编辑框里的内容不为空的时候,并且SearchFragment没有显示时切换到SearchFragment switchFragmentContent(R.id.lefl_slip_layout, searchFragment); isSearchVisiable = true; setEditChangeSearch(s.toString()); }else if(isSearchVisiable&&s.length()!=0) {//当编辑框内容不为空并且SearchFragment正在显示时 setEditChangeSearch(s.toString()); } } @Override public void afterTextChanged(Editable s) { } }); } /** * 文本有变化的时候 * @param str */ private void setEditChangeSearch(final String str){ mHandler.post(new Runnable() { @Override public void run() { searchFragment.search(str); } }); } private Handler mHandler = new Handler(); @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { if(resultCode == Activity.RESULT_OK && requestCode == CONTACTS_REQUEST_CODE){ Bundle bundle = data.getExtras(); List<String> phoneList = bundle.getStringArrayList(Contants.PHONE_LIST_KEY); Log.e("--main--",phoneList.toString()); for(int i = 0;i<phoneList.size();i++){ delEdit.setAddTextView(phoneList.get(i)); } } } }
这是A的一次改动”
GACTelphoneDemo/app/src/main/java/com/example/hjiang/gactelphonedemo/activity/MainActivity.java
这是A的一次改动”
Java
apache-2.0
16d3b395224356ac0cfcd030e010007222b0fba5
0
orientechnologies/orientdb,orientechnologies/orientdb,orientechnologies/orientdb,orientechnologies/orientdb
/* * * * Copyright 2010-2016 OrientDB LTD (http://orientdb.com) * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * * For more information: http://orientdb.com * */ package com.orientechnologies.orient.core.db.document; import com.orientechnologies.common.concur.ONeedRetryException; import com.orientechnologies.common.exception.OException; import com.orientechnologies.common.exception.OHighLevelException; import com.orientechnologies.common.listener.OListenerManger; import com.orientechnologies.common.log.OLogManager; import com.orientechnologies.common.util.OCallable; import com.orientechnologies.orient.core.Orient; import com.orientechnologies.orient.core.cache.OLocalRecordCache; import com.orientechnologies.orient.core.command.OCommandOutputListener; import com.orientechnologies.orient.core.command.OCommandRequest; import com.orientechnologies.orient.core.command.OCommandRequestInternal; import com.orientechnologies.orient.core.config.OContextConfiguration; import com.orientechnologies.orient.core.config.OGlobalConfiguration; import com.orientechnologies.orient.core.config.OStorageEntryConfiguration; import com.orientechnologies.orient.core.conflict.ORecordConflictStrategy; import com.orientechnologies.orient.core.db.*; import com.orientechnologies.orient.core.db.record.OCurrentStorageComponentsFactory; import com.orientechnologies.orient.core.db.record.OIdentifiable; import com.orientechnologies.orient.core.db.record.ORecordElement; import com.orientechnologies.orient.core.db.record.ORecordOperation; import com.orientechnologies.orient.core.dictionary.ODictionary; import com.orientechnologies.orient.core.exception.*; import com.orientechnologies.orient.core.fetch.OFetchHelper; import com.orientechnologies.orient.core.hook.ORecordHook; import com.orientechnologies.orient.core.id.ORID; import com.orientechnologies.orient.core.id.ORecordId; import com.orientechnologies.orient.core.intent.OIntent; import com.orientechnologies.orient.core.iterator.ORecordIteratorClass; import com.orientechnologies.orient.core.iterator.ORecordIteratorCluster; import com.orientechnologies.orient.core.metadata.OMetadata; import com.orientechnologies.orient.core.metadata.OMetadataDefault; import com.orientechnologies.orient.core.metadata.schema.OClass; import com.orientechnologies.orient.core.metadata.schema.OSchema; import com.orientechnologies.orient.core.metadata.schema.OSchemaProxy; import com.orientechnologies.orient.core.metadata.security.*; import com.orientechnologies.orient.core.query.OQuery; import com.orientechnologies.orient.core.record.*; import com.orientechnologies.orient.core.record.impl.*; import com.orientechnologies.orient.core.serialization.serializer.binary.OBinarySerializerFactory; import com.orientechnologies.orient.core.serialization.serializer.record.ORecordSerializer; import com.orientechnologies.orient.core.serialization.serializer.record.ORecordSerializerFactory; import com.orientechnologies.orient.core.sql.executor.OResultSet; import com.orientechnologies.orient.core.storage.*; import com.orientechnologies.orient.core.storage.impl.local.OFreezableStorageComponent; import com.orientechnologies.orient.core.storage.impl.local.OMicroTransaction; import com.orientechnologies.orient.core.storage.impl.local.paginated.OOfflineClusterException; import com.orientechnologies.orient.core.storage.impl.local.paginated.ORecordSerializationContext; import com.orientechnologies.orient.core.storage.ridbag.sbtree.OSBTreeCollectionManager; import com.orientechnologies.orient.core.tx.*; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.*; import java.util.concurrent.Callable; /** * Document API entrypoint. * * @author Luca Garulli (l.garulli--(at)--orientdb.com) */ @SuppressWarnings("unchecked") public abstract class ODatabaseDocumentAbstract extends OListenerManger<ODatabaseListener> implements ODatabaseDocumentInternal { protected final Map<String, Object> properties = new HashMap<String, Object>(); protected Map<ORecordHook, ORecordHook.HOOK_POSITION> unmodifiableHooks; protected final Set<OIdentifiable> inHook = new HashSet<OIdentifiable>(); protected ORecordSerializer serializer; protected String url; protected STATUS status; protected OIntent currentIntent; protected ODatabaseInternal<?> databaseOwner; protected OMetadataDefault metadata; protected OImmutableUser user; protected final byte recordType = ODocument.RECORD_TYPE; protected final Map<ORecordHook, ORecordHook.HOOK_POSITION> hooks = new LinkedHashMap<ORecordHook, ORecordHook.HOOK_POSITION>(); protected boolean retainRecords = true; protected OLocalRecordCache localCache; protected OCurrentStorageComponentsFactory componentsFactory; protected boolean initialized = false; protected OTransaction currentTx; protected final ORecordHook[][] hooksByScope = new ORecordHook[ORecordHook.SCOPE.values().length][]; protected OSharedContext sharedContext; private boolean prefetchRecords; protected OMicroTransaction microTransaction = null; protected Map<String, OResultSet> activeQueries = new HashMap<>(); protected ODatabaseDocumentAbstract() { // DO NOTHING IS FOR EXTENDED OBJECTS super(false); } /** * @return default serializer which is used to serialize documents. Default serializer is common for all database instances. */ public static ORecordSerializer getDefaultSerializer() { return ORecordSerializerFactory.instance().getDefaultRecordSerializer(); } /** * Sets default serializer. The default serializer is common for all database instances. * * @param iDefaultSerializer new default serializer value */ public static void setDefaultSerializer(ORecordSerializer iDefaultSerializer) { ORecordSerializerFactory.instance().setDefaultRecordSerializer(iDefaultSerializer); } public void callOnOpenListeners() { // WAKE UP DB LIFECYCLE LISTENER for (Iterator<ODatabaseLifecycleListener> it = Orient.instance().getDbLifecycleListeners(); it.hasNext(); ) it.next().onOpen(getDatabaseOwner()); // WAKE UP LISTENERS for (ODatabaseListener listener : getListenersCopy()) try { listener.onOpen(getDatabaseOwner()); } catch (Exception e) { OLogManager.instance().error(this, "Error during call of database listener", e); } } protected abstract void loadMetadata(); public void callOnCloseListeners() { // WAKE UP DB LIFECYCLE LISTENER for (Iterator<ODatabaseLifecycleListener> it = Orient.instance().getDbLifecycleListeners(); it.hasNext(); ) it.next().onClose(getDatabaseOwner()); // WAKE UP LISTENERS for (ODatabaseListener listener : getListenersCopy()) try { listener.onClose(getDatabaseOwner()); } catch (Exception e) { OLogManager.instance().error(this, "Error during call of database listener", e); } } public void callOnDropListeners() { // WAKE UP LISTENERS for (ODatabaseListener listener : getListenersCopy()) try { activateOnCurrentThread(); listener.onDelete(getDatabaseOwner()); } catch (Exception e) { OLogManager.instance().error(this, "Error during call of database listener", e); } } /** * {@inheritDoc} */ public <RET extends ORecord> RET getRecord(final OIdentifiable iIdentifiable) { if (iIdentifiable instanceof ORecord) return (RET) iIdentifiable; return (RET) load(iIdentifiable.getIdentity()); } @Override public void reload() { checkIfActive(); if (this.isClosed()) throw new ODatabaseException("Cannot reload a closed db"); metadata.reload(); getStorage().reload(); } /** * {@inheritDoc} */ public <RET extends ORecord> RET load(final ORID iRecordId, final String iFetchPlan, final boolean iIgnoreCache) { return (RET) executeReadRecord((ORecordId) iRecordId, null, -1, iFetchPlan, iIgnoreCache, !iIgnoreCache, false, OStorage.LOCKING_STRATEGY.DEFAULT, new SimpleRecordReader(prefetchRecords)); } /** * Deletes the record checking the version. */ public ODatabase<ORecord> delete(final ORID iRecord, final int iVersion) { ORecord record = load(iRecord); ORecordInternal.setVersion(record, iVersion); delete(record); return this; } public ODatabaseDocumentInternal cleanOutRecord(final ORID iRecord, final int iVersion) { executeDeleteRecord(iRecord, iVersion, true, OPERATION_MODE.SYNCHRONOUS, true); return this; } public String getType() { return TYPE; } /** * Deletes the record without checking the version. */ public ODatabaseDocument delete(final ORID iRecord, final OPERATION_MODE iMode) { ORecord record = load(iRecord); if (record == null) return this; delete(record, iMode); return this; } public ODatabaseDocument delete(final ORecord iRecord, final OPERATION_MODE iMode) { checkIfActive(); ODirtyManager dirtyManager = ORecordInternal.getDirtyManager(iRecord); if (iRecord instanceof OElement && dirtyManager != null && dirtyManager.getReferences() != null && !dirtyManager.getReferences() .isEmpty()) { if (((OElement) iRecord).isEdge() || ((OElement) iRecord).isVertex() && !getTransaction().isActive()) { begin(); try { currentTx.deleteRecord(iRecord, iMode); return this; } finally { commit(); } } } currentTx.deleteRecord(iRecord, iMode); return this; } public <REC extends ORecord> ORecordIteratorCluster<REC> browseCluster(final String iClusterName, final Class<REC> iClass) { checkSecurity(ORule.ResourceGeneric.CLUSTER, ORole.PERMISSION_READ, iClusterName); checkIfActive(); final int clusterId = getClusterIdByName(iClusterName); return new ORecordIteratorCluster<REC>(this, this, clusterId); } /** * {@inheritDoc} */ @Override @Deprecated public <REC extends ORecord> ORecordIteratorCluster<REC> browseCluster(final String iClusterName, final Class<REC> iRecordClass, final long startClusterPosition, final long endClusterPosition, final boolean loadTombstones) { checkSecurity(ORule.ResourceGeneric.CLUSTER, ORole.PERMISSION_READ, iClusterName); checkIfActive(); final int clusterId = getClusterIdByName(iClusterName); return new ORecordIteratorCluster<REC>(this, this, clusterId, startClusterPosition, endClusterPosition, loadTombstones, OStorage.LOCKING_STRATEGY.DEFAULT); } @Override public <REC extends ORecord> ORecordIteratorCluster<REC> browseCluster(String iClusterName, Class<REC> iRecordClass, long startClusterPosition, long endClusterPosition) { checkSecurity(ORule.ResourceGeneric.CLUSTER, ORole.PERMISSION_READ, iClusterName); checkIfActive(); final int clusterId = getClusterIdByName(iClusterName); return new ORecordIteratorCluster<REC>(this, this, clusterId, startClusterPosition, endClusterPosition); } /** * {@inheritDoc} */ public OCommandRequest command(final OCommandRequest iCommand) { checkSecurity(ORule.ResourceGeneric.COMMAND, ORole.PERMISSION_READ); checkIfActive(); final OCommandRequestInternal command = (OCommandRequestInternal) iCommand; try { command.reset(); return command; } catch (Exception e) { throw OException.wrapException(new ODatabaseException("Error on command execution"), e); } } /** * {@inheritDoc} */ public <RET extends List<?>> RET query(final OQuery<?> iCommand, final Object... iArgs) { checkIfActive(); iCommand.reset(); return (RET) iCommand.execute(iArgs); } /** * {@inheritDoc} */ public byte getRecordType() { return recordType; } /** * {@inheritDoc} */ @Override public long countClusterElements(final int[] iClusterIds) { return countClusterElements(iClusterIds, false); } /** * {@inheritDoc} */ @Override public long countClusterElements(final int iClusterId) { return countClusterElements(iClusterId, false); } /** * {@inheritDoc} */ @Override public void truncateCluster(String clusterName) { command("truncate cluster " + clusterName); } /** * {@inheritDoc} */ @Override public long countClusterElements(int iClusterId, boolean countTombstones) { final String name = getClusterNameById(iClusterId); if (name == null) return 0; checkSecurity(ORule.ResourceGeneric.CLUSTER, ORole.PERMISSION_READ, name); checkIfActive(); return getStorage().count(iClusterId, countTombstones); } /** * {@inheritDoc} */ @Override public long countClusterElements(int[] iClusterIds, boolean countTombstones) { checkIfActive(); String name; for (int iClusterId : iClusterIds) { name = getClusterNameById(iClusterId); checkSecurity(ORule.ResourceGeneric.CLUSTER, ORole.PERMISSION_READ, name); } return getStorage().count(iClusterIds, countTombstones); } /** * {@inheritDoc} */ @Override public long countClusterElements(final String iClusterName) { checkSecurity(ORule.ResourceGeneric.CLUSTER, ORole.PERMISSION_READ, iClusterName); checkIfActive(); final int clusterId = getClusterIdByName(iClusterName); if (clusterId < 0) throw new IllegalArgumentException("Cluster '" + iClusterName + "' was not found"); return getStorage().count(clusterId); } /** * {@inheritDoc} */ public OMetadataDefault getMetadata() { checkOpenness(); return metadata; } /** * {@inheritDoc} */ public <DB extends ODatabaseDocument> DB checkSecurity(final ORule.ResourceGeneric resourceGeneric, final String resourceSpecific, final int iOperation) { if (user != null) { try { user.allow(resourceGeneric, resourceSpecific, iOperation); } catch (OSecurityAccessException e) { if (OLogManager.instance().isDebugEnabled()) OLogManager.instance() .debug(this, "User '%s' tried to access the reserved resource '%s.%s', operation '%s'", getUser(), resourceGeneric, resourceSpecific, iOperation); throw e; } } return (DB) this; } /** * {@inheritDoc} */ public <DB extends ODatabaseDocument> DB checkSecurity(final ORule.ResourceGeneric iResourceGeneric, final int iOperation, final Object... iResourcesSpecific) { if (user != null) { try { if (iResourcesSpecific.length != 0) { for (Object target : iResourcesSpecific) { if (target != null) { user.allow(iResourceGeneric, target.toString(), iOperation); } else user.allow(iResourceGeneric, null, iOperation); } } else user.allow(iResourceGeneric, null, iOperation); } catch (OSecurityAccessException e) { if (OLogManager.instance().isDebugEnabled()) OLogManager.instance() .debug(this, "[checkSecurity] User '%s' tried to access the reserved resource '%s', target(s) '%s', operation '%s'", getUser(), iResourceGeneric, Arrays.toString(iResourcesSpecific), iOperation); throw e; } } return (DB) this; } /** * {@inheritDoc} */ public <DB extends ODatabaseDocument> DB checkSecurity(final ORule.ResourceGeneric iResourceGeneric, final int iOperation, final Object iResourceSpecific) { checkOpenness(); if (user != null) { try { if (iResourceSpecific != null) user.allow(iResourceGeneric, iResourceSpecific.toString(), iOperation); else user.allow(iResourceGeneric, null, iOperation); } catch (OSecurityAccessException e) { if (OLogManager.instance().isDebugEnabled()) OLogManager.instance() .debug(this, "[checkSecurity] User '%s' tried to access the reserved resource '%s', target '%s', operation '%s'", getUser(), iResourceGeneric, iResourceSpecific, iOperation); throw e; } } return (DB) this; } /** * {@inheritDoc} */ @Override public ODatabaseInternal<?> getDatabaseOwner() { ODatabaseInternal<?> current = databaseOwner; while (current != null && current != this && current.getDatabaseOwner() != current) current = current.getDatabaseOwner(); return current; } /** * {@inheritDoc} */ @Override public ODatabaseInternal<ORecord> setDatabaseOwner(ODatabaseInternal<?> iOwner) { databaseOwner = iOwner; return this; } /** * {@inheritDoc} */ public boolean isRetainRecords() { return retainRecords; } /** * {@inheritDoc} */ public ODatabaseDocument setRetainRecords(boolean retainRecords) { this.retainRecords = retainRecords; return this; } /** * {@inheritDoc} */ public <DB extends ODatabase> DB setStatus(final STATUS status) { checkIfActive(); setStatusInternal(status); return (DB) this; } public void setStatusInternal(final STATUS status) { this.status = status; } /** * Deprecated since v2.2 */ @Deprecated public void setDefaultClusterIdInternal(final int iDefClusterId) { checkIfActive(); getStorage().setDefaultClusterId(iDefClusterId); } /** * {@inheritDoc} */ public void setInternal(final ATTRIBUTES iAttribute, final Object iValue) { set(iAttribute, iValue); } /** * {@inheritDoc} */ public OSecurityUser getUser() { return user; } /** * {@inheritDoc} */ public void setUser(final OSecurityUser user) { checkIfActive(); if (user instanceof OUser) { OMetadata metadata = getMetadata(); if (metadata != null) { final OSecurity security = metadata.getSecurity(); this.user = new OImmutableUser(security.getVersion(), (OUser) user); } else this.user = new OImmutableUser(-1, (OUser) user); } else this.user = (OImmutableUser) user; } public void reloadUser() { if (user != null) { activateOnCurrentThread(); if (user.checkIfAllowed(ORule.ResourceGeneric.CLASS, OUser.CLASS_NAME, ORole.PERMISSION_READ) != null) { OMetadata metadata = getMetadata(); if (metadata != null) { final OSecurity security = metadata.getSecurity(); OUser secGetUser = security.getUser(user.getName()); if (secGetUser != null) user = new OImmutableUser(security.getVersion(), secGetUser); else user = new OImmutableUser(-1, new OUser()); } else user = new OImmutableUser(-1, new OUser()); } } } /** * {@inheritDoc} */ public boolean isMVCC() { return true; } /** * {@inheritDoc} */ public <DB extends ODatabase<?>> DB setMVCC(boolean mvcc) { throw new UnsupportedOperationException(); } /** * {@inheritDoc} */ public ODictionary<ORecord> getDictionary() { checkOpenness(); return metadata.getIndexManager().getDictionary(); } /** * {@inheritDoc} */ public <DB extends ODatabase<?>> DB registerHook(final ORecordHook iHookImpl, final ORecordHook.HOOK_POSITION iPosition) { checkOpenness(); checkIfActive(); final Map<ORecordHook, ORecordHook.HOOK_POSITION> tmp = new LinkedHashMap<ORecordHook, ORecordHook.HOOK_POSITION>(hooks); tmp.put(iHookImpl, iPosition); hooks.clear(); for (ORecordHook.HOOK_POSITION p : ORecordHook.HOOK_POSITION.values()) { for (Map.Entry<ORecordHook, ORecordHook.HOOK_POSITION> e : tmp.entrySet()) { if (e.getValue() == p) hooks.put(e.getKey(), e.getValue()); } } compileHooks(); return (DB) this; } /** * {@inheritDoc} */ public <DB extends ODatabase<?>> DB registerHook(final ORecordHook iHookImpl) { return (DB) registerHook(iHookImpl, ORecordHook.HOOK_POSITION.REGULAR); } /** * {@inheritDoc} */ public <DB extends ODatabase<?>> DB unregisterHook(final ORecordHook iHookImpl) { checkIfActive(); if (iHookImpl != null) { iHookImpl.onUnregister(); hooks.remove(iHookImpl); compileHooks(); } return (DB) this; } /** * {@inheritDoc} */ @Override public OLocalRecordCache getLocalCache() { return localCache; } /** * {@inheritDoc} */ public Map<ORecordHook, ORecordHook.HOOK_POSITION> getHooks() { return unmodifiableHooks; } /** * Callback the registered hooks if any. * * @param type Hook type. Define when hook is called. * @param id Record received in the callback * * @return True if the input record is changed, otherwise false */ public ORecordHook.RESULT callbackHooks(final ORecordHook.TYPE type, final OIdentifiable id) { if (id == null || hooks.isEmpty() || id.getIdentity().getClusterId() == 0) return ORecordHook.RESULT.RECORD_NOT_CHANGED; final ORecordHook.SCOPE scope = ORecordHook.SCOPE.typeToScope(type); final int scopeOrdinal = scope.ordinal(); final ORID identity = id.getIdentity().copy(); if (!pushInHook(identity)) return ORecordHook.RESULT.RECORD_NOT_CHANGED; try { final ORecord rec = id.getRecord(); if (rec == null) return ORecordHook.RESULT.RECORD_NOT_CHANGED; final OScenarioThreadLocal.RUN_MODE runMode = OScenarioThreadLocal.INSTANCE.getRunMode(); boolean recordChanged = false; for (ORecordHook hook : hooksByScope[scopeOrdinal]) { switch (runMode) { case DEFAULT: // NON_DISTRIBUTED OR PROXIED DB if (getStorage().isDistributed() && hook.getDistributedExecutionMode() == ORecordHook.DISTRIBUTED_EXECUTION_MODE.TARGET_NODE) // SKIP continue; break; // TARGET NODE case RUNNING_DISTRIBUTED: if (hook.getDistributedExecutionMode() == ORecordHook.DISTRIBUTED_EXECUTION_MODE.SOURCE_NODE) continue; } final ORecordHook.RESULT res = hook.onTrigger(type, rec); if (res == ORecordHook.RESULT.RECORD_CHANGED) recordChanged = true; else if (res == ORecordHook.RESULT.SKIP_IO) // SKIP IO OPERATION return res; else if (res == ORecordHook.RESULT.SKIP) // SKIP NEXT HOOKS AND RETURN IT return res; else if (res == ORecordHook.RESULT.RECORD_REPLACED) return res; } return recordChanged ? ORecordHook.RESULT.RECORD_CHANGED : ORecordHook.RESULT.RECORD_NOT_CHANGED; } finally { popInHook(identity); } } /** * {@inheritDoc} */ public boolean isValidationEnabled() { return (Boolean) get(ATTRIBUTES.VALIDATION); } /** * {@inheritDoc} */ public <DB extends ODatabaseDocument> DB setValidationEnabled(final boolean iEnabled) { set(ATTRIBUTES.VALIDATION, iEnabled); return (DB) this; } public ORecordConflictStrategy getConflictStrategy() { checkIfActive(); return getStorage().getConflictStrategy(); } public ODatabaseDocumentAbstract setConflictStrategy(final String iStrategyName) { checkIfActive(); getStorage().setConflictStrategy(Orient.instance().getRecordConflictStrategy().getStrategy(iStrategyName)); return this; } public ODatabaseDocumentAbstract setConflictStrategy(final ORecordConflictStrategy iResolver) { checkIfActive(); getStorage().setConflictStrategy(iResolver); return this; } @Override public OContextConfiguration getConfiguration() { checkIfActive(); if (getStorage() != null) return getStorage().getConfiguration().getContextConfiguration(); return null; } @Override public boolean declareIntent(final OIntent iIntent) { checkIfActive(); if (currentIntent != null) { if (iIntent != null && iIntent.getClass().equals(currentIntent.getClass())) // SAME INTENT: JUMP IT return false; // END CURRENT INTENT currentIntent.end(this); } currentIntent = iIntent; if (iIntent != null) iIntent.begin(this); return true; } @Override public OIntent getActiveIntent() { return currentIntent; } @Override public void close() { checkIfActive(); try { closeActiveQueries(); localCache.shutdown(); if (isClosed()) { status = STATUS.CLOSED; return; } try { rollback(true); } catch (Exception e) { OLogManager.instance().error(this, "Exception during commit of active transaction", e); } if (status != STATUS.OPEN) return; callOnCloseListeners(); if (currentIntent != null) { currentIntent.end(this); currentIntent = null; } sharedContext = null; status = STATUS.CLOSED; localCache.clear(); if (getStorage() != null) getStorage().close(); } finally { // ALWAYS RESET TL ODatabaseRecordThreadLocal.instance().remove(); } } @Override public STATUS getStatus() { return status; } @Override public long getSize() { checkIfActive(); return getStorage().getSize(); } @Override public String getName() { return getStorage() != null ? getStorage().getName() : url; } @Override public String getURL() { return url != null ? url : getStorage().getURL(); } @Override public int getDefaultClusterId() { checkIfActive(); return getStorage().getDefaultClusterId(); } @Override public int getClusters() { checkIfActive(); return getStorage().getClusters(); } @Override public boolean existsCluster(final String iClusterName) { checkIfActive(); return getStorage().getClusterNames().contains(iClusterName.toLowerCase(Locale.ENGLISH)); } @Override public Collection<String> getClusterNames() { checkIfActive(); return getStorage().getClusterNames(); } @Override public int getClusterIdByName(final String iClusterName) { if (iClusterName == null) return -1; checkIfActive(); return getStorage().getClusterIdByName(iClusterName.toLowerCase(Locale.ENGLISH)); } @Override public String getClusterNameById(final int iClusterId) { if (iClusterId < 0) return null; checkIfActive(); return getStorage().getPhysicalClusterNameById(iClusterId); } @Override public long getClusterRecordSizeByName(final String clusterName) { checkIfActive(); try { return getStorage().getClusterById(getClusterIdByName(clusterName)).getRecordsSize(); } catch (Exception e) { throw OException.wrapException(new ODatabaseException("Error on reading records size for cluster '" + clusterName + "'"), e); } } @Override public long getClusterRecordSizeById(final int clusterId) { checkIfActive(); try { return getStorage().getClusterById(clusterId).getRecordsSize(); } catch (Exception e) { throw OException .wrapException(new ODatabaseException("Error on reading records size for cluster with id '" + clusterId + "'"), e); } } @Override public boolean isClosed() { return status == STATUS.CLOSED || getStorage().isClosed(); } @Override public int addCluster(final String iClusterName, final Object... iParameters) { checkIfActive(); return getStorage().addCluster(iClusterName, iParameters); } @Override public int addCluster(final String iClusterName, final int iRequestedId, final Object... iParameters) { checkIfActive(); return getStorage().addCluster(iClusterName, iRequestedId, iParameters); } @Override public boolean dropCluster(final String iClusterName, final boolean iTruncate) { checkIfActive(); final int clusterId = getClusterIdByName(iClusterName); OSchemaProxy schema = metadata.getSchema(); OClass clazz = schema.getClassByClusterId(clusterId); if (clazz != null) clazz.removeClusterId(clusterId); if (schema.getBlobClusters().contains(clusterId)) schema.removeBlobCluster(iClusterName); getLocalCache().freeCluster(clusterId); checkForClusterPermissions(iClusterName); return getStorage().dropCluster(iClusterName, iTruncate); } @Override public boolean dropCluster(final int iClusterId, final boolean iTruncate) { checkIfActive(); checkSecurity(ORule.ResourceGeneric.CLUSTER, ORole.PERMISSION_DELETE, getClusterNameById(iClusterId)); OSchemaProxy schema = metadata.getSchema(); final OClass clazz = schema.getClassByClusterId(iClusterId); if (clazz != null) clazz.removeClusterId(iClusterId); getLocalCache().freeCluster(iClusterId); if (schema.getBlobClusters().contains(iClusterId)) schema.removeBlobCluster(getClusterNameById(iClusterId)); checkForClusterPermissions(getClusterNameById(iClusterId)); return getStorage().dropCluster(iClusterId, iTruncate); } public void checkForClusterPermissions(final String iClusterName) { // CHECK FOR ORESTRICTED final Set<OClass> classes = getMetadata().getImmutableSchemaSnapshot().getClassesRelyOnCluster(iClusterName); for (OClass c : classes) { if (c.isSubClassOf(OSecurityShared.RESTRICTED_CLASSNAME)) throw new OSecurityException( "Class '" + c.getName() + "' cannot be truncated because has record level security enabled (extends '" + OSecurityShared.RESTRICTED_CLASSNAME + "')"); } } @Override public Object setProperty(final String iName, final Object iValue) { if (iValue == null) return properties.remove(iName.toLowerCase(Locale.ENGLISH)); else return properties.put(iName.toLowerCase(Locale.ENGLISH), iValue); } @Override public Object getProperty(final String iName) { return properties.get(iName.toLowerCase(Locale.ENGLISH)); } @Override public Iterator<Map.Entry<String, Object>> getProperties() { return properties.entrySet().iterator(); } @Override public Object get(final ATTRIBUTES iAttribute) { checkIfActive(); if (iAttribute == null) throw new IllegalArgumentException("attribute is null"); final OStorage storage = getStorage(); switch (iAttribute) { case STATUS: return getStatus(); case DEFAULTCLUSTERID: return getDefaultClusterId(); case TYPE: return getMetadata().getImmutableSchemaSnapshot().existsClass("V") ? "graph" : "document"; case DATEFORMAT: return storage.getConfiguration().getDateFormat(); case DATETIMEFORMAT: return storage.getConfiguration().getDateTimeFormat(); case TIMEZONE: return storage.getConfiguration().getTimeZone().getID(); case LOCALECOUNTRY: return storage.getConfiguration().getLocaleCountry(); case LOCALELANGUAGE: return storage.getConfiguration().getLocaleLanguage(); case CHARSET: return storage.getConfiguration().getCharset(); case CUSTOM: return storage.getConfiguration().getProperties(); case CLUSTERSELECTION: return storage.getConfiguration().getClusterSelection(); case MINIMUMCLUSTERS: return storage.getConfiguration().getMinimumClusters(); case CONFLICTSTRATEGY: return storage.getConfiguration().getConflictStrategy(); case VALIDATION: return storage.getConfiguration().isValidationEnabled(); } return null; } @Override public ORecordMetadata getRecordMetadata(final ORID rid) { checkIfActive(); return getStorage().getRecordMetadata(rid); } public OTransaction getTransaction() { checkIfActive(); return currentTx; } @Override public OBasicTransaction getMicroOrRegularTransaction() { return microTransaction != null && microTransaction.isActive() ? microTransaction : getTransaction(); } @SuppressWarnings("unchecked") @Override public <RET extends ORecord> RET load(final ORecord iRecord, final String iFetchPlan) { checkIfActive(); return (RET) currentTx.loadRecord(iRecord.getIdentity(), iRecord, iFetchPlan, false, false, OStorage.LOCKING_STRATEGY.DEFAULT); } @SuppressWarnings("unchecked") @Override @Deprecated public <RET extends ORecord> RET load(ORecord iRecord, String iFetchPlan, boolean iIgnoreCache, boolean loadTombstone, OStorage.LOCKING_STRATEGY iLockingStrategy) { checkIfActive(); return (RET) currentTx .loadRecord(iRecord.getIdentity(), iRecord, iFetchPlan, iIgnoreCache, !iIgnoreCache, loadTombstone, iLockingStrategy); } @SuppressWarnings("unchecked") @Override @Deprecated public <RET extends ORecord> RET load(final ORecord iRecord, final String iFetchPlan, final boolean iIgnoreCache, final boolean iUpdateCache, final boolean loadTombstone, final OStorage.LOCKING_STRATEGY iLockingStrategy) { checkIfActive(); return (RET) currentTx .loadRecord(iRecord.getIdentity(), iRecord, iFetchPlan, iIgnoreCache, iUpdateCache, loadTombstone, iLockingStrategy); } @SuppressWarnings("unchecked") @Override public <RET extends ORecord> RET load(final ORecord iRecord) { checkIfActive(); return (RET) currentTx.loadRecord(iRecord.getIdentity(), iRecord, null, false); } @SuppressWarnings("unchecked") @Override public <RET extends ORecord> RET load(final ORID recordId) { return (RET) currentTx.loadRecord(recordId, null, null, false); } @SuppressWarnings("unchecked") @Override public <RET extends ORecord> RET load(final ORID iRecordId, final String iFetchPlan) { checkIfActive(); return (RET) currentTx.loadRecord(iRecordId, null, iFetchPlan, false); } @SuppressWarnings("unchecked") public <RET extends ORecord> RET loadIfVersionIsNotLatest(final ORID rid, final int recordVersion, String fetchPlan, boolean ignoreCache) throws ORecordNotFoundException { checkIfActive(); return (RET) currentTx.loadRecordIfVersionIsNotLatest(rid, recordVersion, fetchPlan, ignoreCache); } @SuppressWarnings("unchecked") @Override @Deprecated public <RET extends ORecord> RET load(final ORID iRecordId, String iFetchPlan, final boolean iIgnoreCache, final boolean loadTombstone, OStorage.LOCKING_STRATEGY iLockingStrategy) { checkIfActive(); return (RET) currentTx.loadRecord(iRecordId, null, iFetchPlan, iIgnoreCache, loadTombstone, iLockingStrategy); } @SuppressWarnings("unchecked") @Override @Deprecated public <RET extends ORecord> RET load(final ORID iRecordId, String iFetchPlan, final boolean iIgnoreCache, final boolean iUpdateCache, final boolean loadTombstone, OStorage.LOCKING_STRATEGY iLockingStrategy) { checkIfActive(); return (RET) currentTx.loadRecord(iRecordId, null, iFetchPlan, iIgnoreCache, iUpdateCache, loadTombstone, iLockingStrategy); } @SuppressWarnings("unchecked") public <RET extends ORecord> RET reload(final ORecord iRecord) { return reload(iRecord, null, false); } @SuppressWarnings("unchecked") public <RET extends ORecord> RET reload(final ORecord iRecord, final String iFetchPlan) { return reload(iRecord, iFetchPlan, false); } @SuppressWarnings("unchecked") @Override public <RET extends ORecord> RET reload(final ORecord iRecord, final String iFetchPlan, final boolean iIgnoreCache) { return reload(iRecord, iFetchPlan, iIgnoreCache, true); } @Override public <RET extends ORecord> RET reload(ORecord record, String fetchPlan, boolean ignoreCache, boolean force) { checkIfActive(); final ORecord loadedRecord = currentTx.reloadRecord(record.getIdentity(), record, fetchPlan, ignoreCache, force); if (loadedRecord != null && record != loadedRecord) { record.fromStream(loadedRecord.toStream()); ORecordInternal.setVersion(record, loadedRecord.getVersion()); } else if (loadedRecord == null) { throw new ORecordNotFoundException(record.getIdentity()); } return (RET) record; } /** * Deletes the record without checking the version. */ public ODatabaseDocument delete(final ORID iRecord) { checkOpenness(); checkIfActive(); final ORecord rec = load(iRecord); if (rec != null) delete(rec); return this; } @Override public boolean hide(ORID rid) { checkOpenness(); checkIfActive(); if (currentTx.isActive()) throw new ODatabaseException("This operation can be executed only in non transaction mode"); return executeHideRecord(rid, OPERATION_MODE.SYNCHRONOUS); } @Override public OBinarySerializerFactory getSerializerFactory() { return componentsFactory.binarySerializerFactory; } @Deprecated public ODatabaseDocument begin(final OTransaction iTx) { begin(); return this; } public void rawBegin(final OTransaction iTx) { checkOpenness(); checkIfActive(); if (currentTx.isActive() && iTx.equals(currentTx)) { currentTx.begin(); } currentTx.rollback(true, 0); // WAKE UP LISTENERS for (ODatabaseListener listener : browseListeners()) try { listener.onBeforeTxBegin(this); } catch (Exception e) { final String message = "Error before the transaction begin"; OLogManager.instance().error(this, message, e); throw OException.wrapException(new OTransactionBlockedException(message), e); } currentTx = iTx; currentTx.begin(); } /** * {@inheritDoc} */ public <RET extends ORecord> RET load(final ORecord iRecord, final String iFetchPlan, final boolean iIgnoreCache) { return (RET) executeReadRecord((ORecordId) iRecord.getIdentity(), iRecord, -1, iFetchPlan, iIgnoreCache, !iIgnoreCache, false, OStorage.LOCKING_STRATEGY.NONE, new SimpleRecordReader(prefetchRecords)); } @Override public void setPrefetchRecords(boolean prefetchRecords) { this.prefetchRecords = prefetchRecords; } @Override public boolean isPrefetchRecords() { return prefetchRecords; } /** * This method is internal, it can be subject to signature change or be removed, do not use. * * @Internal */ public <RET extends ORecord> RET executeReadRecord(final ORecordId rid, ORecord iRecord, final int recordVersion, final String fetchPlan, final boolean ignoreCache, final boolean iUpdateCache, final boolean loadTombstones, final OStorage.LOCKING_STRATEGY lockingStrategy, RecordReader recordReader) { checkOpenness(); checkIfActive(); getMetadata().makeThreadLocalSchemaSnapshot(); ORecordSerializationContext.pushContext(); try { checkSecurity(ORule.ResourceGeneric.CLUSTER, ORole.PERMISSION_READ, getClusterNameById(rid.getClusterId())); // either regular or micro tx must be active or both inactive assert !(getTransaction().isActive() && (microTransaction != null && microTransaction.isActive())); // SEARCH IN LOCAL TX ORecord record = getTransaction().getRecord(rid); if (record == OBasicTransaction.DELETED_RECORD) // DELETED IN TX return null; if (record == null) { if (microTransaction != null && microTransaction.isActive()) { record = microTransaction.getRecord(rid); if (record == OBasicTransaction.DELETED_RECORD) return null; } } if (record == null && !ignoreCache) // SEARCH INTO THE CACHE record = getLocalCache().findRecord(rid); if (record != null) { if (iRecord != null) { iRecord.fromStream(record.toStream()); ORecordInternal.setVersion(iRecord, record.getVersion()); record = iRecord; } OFetchHelper.checkFetchPlanValid(fetchPlan); if (callbackHooks(ORecordHook.TYPE.BEFORE_READ, record) == ORecordHook.RESULT.SKIP) return null; if (record.getInternalStatus() == ORecordElement.STATUS.NOT_LOADED) record.reload(); if (lockingStrategy == OStorage.LOCKING_STRATEGY.KEEP_SHARED_LOCK) { OLogManager.instance() .warn(this, "You use deprecated record locking strategy: %s it may lead to deadlocks " + lockingStrategy); record.lock(false); } else if (lockingStrategy == OStorage.LOCKING_STRATEGY.KEEP_EXCLUSIVE_LOCK) { OLogManager.instance() .warn(this, "You use deprecated record locking strategy: %s it may lead to deadlocks " + lockingStrategy); record.lock(true); } callbackHooks(ORecordHook.TYPE.AFTER_READ, record); if (record instanceof ODocument) ODocumentInternal.checkClass((ODocument) record, this); return (RET) record; } final ORawBuffer recordBuffer; if (!rid.isValid()) recordBuffer = null; else { OFetchHelper.checkFetchPlanValid(fetchPlan); int version; if (iRecord != null) version = iRecord.getVersion(); else version = recordVersion; recordBuffer = recordReader.readRecord(getStorage(), rid, fetchPlan, ignoreCache, version); } if (recordBuffer == null) return null; if (iRecord == null || ORecordInternal.getRecordType(iRecord) != recordBuffer.recordType) // NO SAME RECORD TYPE: CAN'T REUSE OLD ONE BUT CREATE A NEW ONE FOR IT iRecord = Orient.instance().getRecordFactoryManager().newInstance(recordBuffer.recordType, rid.getClusterId(), this); ORecordInternal.fill(iRecord, rid, recordBuffer.version, recordBuffer.buffer, false, this); if (iRecord instanceof ODocument) ODocumentInternal.checkClass((ODocument) iRecord, this); if (ORecordVersionHelper.isTombstone(iRecord.getVersion())) return (RET) iRecord; if (callbackHooks(ORecordHook.TYPE.BEFORE_READ, iRecord) == ORecordHook.RESULT.SKIP) return null; iRecord.fromStream(recordBuffer.buffer); callbackHooks(ORecordHook.TYPE.AFTER_READ, iRecord); if (iUpdateCache) getLocalCache().updateRecord(iRecord); return (RET) iRecord; } catch (OOfflineClusterException t) { throw t; } catch (ORecordNotFoundException t) { throw t; } catch (Exception t) { if (rid.isTemporary()) throw OException.wrapException(new ODatabaseException("Error on retrieving record using temporary RID: " + rid), t); else throw OException.wrapException(new ODatabaseException( "Error on retrieving record " + rid + " (cluster: " + getStorage().getPhysicalClusterNameById(rid.getClusterId()) + ")"), t); } finally { ORecordSerializationContext.pullContext(); getMetadata().clearThreadLocalSchemaSnapshot(); } } public int assignAndCheckCluster(ORecord record, String iClusterName) { ORecordId rid = (ORecordId) record.getIdentity(); // if provided a cluster name use it. if (rid.getClusterId() <= ORID.CLUSTER_POS_INVALID && iClusterName != null) { rid.setClusterId(getClusterIdByName(iClusterName)); if (rid.getClusterId() == -1) throw new IllegalArgumentException("Cluster name '" + iClusterName + "' is not configured"); } OClass schemaClass = null; // if cluster id is not set yet try to find it out if (rid.getClusterId() <= ORID.CLUSTER_ID_INVALID && getStorage().isAssigningClusterIds()) { if (record instanceof ODocument) { schemaClass = ODocumentInternal.getImmutableSchemaClass(((ODocument) record)); if (schemaClass != null) { if (schemaClass.isAbstract()) throw new OSchemaException("Document belongs to abstract class " + schemaClass.getName() + " and cannot be saved"); rid.setClusterId(schemaClass.getClusterForNewInstance((ODocument) record)); } else throw new ODatabaseException("Cannot save (1) document " + record + ": no class or cluster defined"); } else { if (record instanceof ORecordBytes) { Set<Integer> blobs = getBlobClusterIds(); if (blobs.size() == 0) { rid.setClusterId(getDefaultClusterId()); } else { rid.setClusterId(blobs.iterator().next()); } } else { throw new ODatabaseException("Cannot save (3) document " + record + ": no class or cluster defined"); } } } else if (record instanceof ODocument) schemaClass = ODocumentInternal.getImmutableSchemaClass(((ODocument) record)); // If the cluster id was set check is validity if (rid.getClusterId() > ORID.CLUSTER_ID_INVALID) { if (schemaClass != null) { String messageClusterName = getClusterNameById(rid.getClusterId()); checkRecordClass(schemaClass, messageClusterName, rid); if (!schemaClass.hasClusterId(rid.getClusterId())) { throw new IllegalArgumentException( "Cluster name '" + messageClusterName + "' (id=" + rid.getClusterId() + ") is not configured to store the class '" + schemaClass.getName() + "', valid are " + Arrays.toString(schemaClass.getClusterIds())); } } } return rid.getClusterId(); } public <RET extends ORecord> RET executeSaveEmptyRecord(ORecord record, String clusterName) { ORecordId rid = (ORecordId) record.getIdentity(); assert rid.isNew(); ORecordInternal.onBeforeIdentityChanged(record); int id = assignAndCheckCluster(record, clusterName); clusterName = getClusterNameById(id); checkSecurity(ORule.ResourceGeneric.CLUSTER, ORole.PERMISSION_CREATE, clusterName); byte[] content = getSerializer().writeClassOnly(record); final OStorageOperationResult<OPhysicalPosition> ppos = getStorage() .createRecord(rid, content, record.getVersion(), recordType, OPERATION_MODE.SYNCHRONOUS.ordinal(), null); ORecordInternal.setVersion(record, ppos.getResult().recordVersion); ((ORecordId) record.getIdentity()).copyFrom(rid); ORecordInternal.onAfterIdentityChanged(record); return (RET) record; } public abstract <RET extends ORecord> RET executeSaveRecord(final ORecord record, String clusterName, final int ver, final OPERATION_MODE mode, boolean forceCreate, final ORecordCallback<? extends Number> recordCreatedCallback, ORecordCallback<Integer> recordUpdatedCallback); public abstract void executeDeleteRecord(OIdentifiable record, final int iVersion, final boolean iRequired, final OPERATION_MODE iMode, boolean prohibitTombstones); /** * This method is internal, it can be subject to signature change or be removed, do not use. * * @Internal */ public boolean executeHideRecord(OIdentifiable record, final OPERATION_MODE iMode) { checkOpenness(); checkIfActive(); final ORecordId rid = (ORecordId) record.getIdentity(); if (rid == null) throw new ODatabaseException( "Cannot hide record because it has no identity. Probably was created from scratch or contains projections of fields rather than a full record"); if (!rid.isValid()) return false; checkSecurity(ORule.ResourceGeneric.CLUSTER, ORole.PERMISSION_DELETE, getClusterNameById(rid.getClusterId())); getMetadata().makeThreadLocalSchemaSnapshot(); if (record instanceof ODocument) ODocumentInternal.checkClass((ODocument) record, this); ORecordSerializationContext.pushContext(); try { final OStorageOperationResult<Boolean> operationResult; operationResult = getStorage().hideRecord(rid, iMode.ordinal(), null); // REMOVE THE RECORD FROM 1 AND 2 LEVEL CACHES if (!operationResult.isMoved()) getLocalCache().deleteRecord(rid); return operationResult.getResult(); } finally { ORecordSerializationContext.pullContext(); getMetadata().clearThreadLocalSchemaSnapshot(); } } public ODatabaseDocumentAbstract begin() { return begin(OTransaction.TXTYPE.OPTIMISTIC); } public ODatabaseDocumentAbstract begin(final OTransaction.TXTYPE iType) { checkOpenness(); checkIfActive(); if (currentTx.isActive()) { if (iType == OTransaction.TXTYPE.OPTIMISTIC && currentTx instanceof OTransactionOptimistic) { currentTx.begin(); return this; } currentTx.rollback(true, 0); } // CHECK IT'S NOT INSIDE A HOOK if (!inHook.isEmpty()) throw new IllegalStateException("Cannot begin a transaction while a hook is executing"); // WAKE UP LISTENERS for (ODatabaseListener listener : browseListeners()) try { listener.onBeforeTxBegin(this); } catch (Exception e) { OLogManager.instance().error(this, "Error before tx begin", e); } switch (iType) { case NOTX: setDefaultTransactionMode(); break; case OPTIMISTIC: currentTx = new OTransactionOptimistic(this); break; case PESSIMISTIC: throw new UnsupportedOperationException("Pessimistic transaction"); } currentTx.begin(); return this; } public void setDefaultTransactionMode() { if (!(currentTx instanceof OTransactionNoTx)) currentTx = new OTransactionNoTx(this); } /** * {@inheritDoc} */ @Override public void freeze(final boolean throwException) { checkOpenness(); if (!(getStorage() instanceof OFreezableStorageComponent)) { OLogManager.instance().error(this, "Only local paginated storage supports freeze. If you are using remote client please use OServerAdmin instead", null); return; } final long startTime = Orient.instance().getProfiler().startChrono(); final OFreezableStorageComponent storage = getFreezableStorage(); if (storage != null) { storage.freeze(throwException); } Orient.instance().getProfiler() .stopChrono("db." + getName() + ".freeze", "Time to freeze the database", startTime, "db.*.freeze"); } /** * {@inheritDoc} */ @Override public void freeze() { checkOpenness(); if (!(getStorage() instanceof OFreezableStorageComponent)) { OLogManager.instance().error(this, "Only local paginated storage supports freeze. " + "If you use remote client please use OServerAdmin instead", null); return; } final long startTime = Orient.instance().getProfiler().startChrono(); final OFreezableStorageComponent storage = getFreezableStorage(); if (storage != null) { storage.freeze(false); } Orient.instance().getProfiler() .stopChrono("db." + getName() + ".freeze", "Time to freeze the database", startTime, "db.*.freeze"); } @Override public boolean isFrozen() { if (!(getStorage() instanceof OFreezableStorageComponent)) return false; final OFreezableStorageComponent storage = getFreezableStorage(); if (storage != null) return storage.isFrozen(); return false; } /** * {@inheritDoc} */ @Override public void release() { checkOpenness(); if (!(getStorage() instanceof OFreezableStorageComponent)) { OLogManager.instance().error(this, "Only local paginated storage supports release. If you are using remote client please use OServerAdmin instead", null); return; } final long startTime = Orient.instance().getProfiler().startChrono(); final OFreezableStorageComponent storage = getFreezableStorage(); if (storage != null) { storage.release(); } Orient.instance().getProfiler() .stopChrono("db." + getName() + ".release", "Time to release the database", startTime, "db.*.release"); } /** * Creates a new ODocument. */ public ODocument newInstance() { return new ODocument(); } @Override public OBlob newBlob(byte[] bytes) { return new ORecordBytes(bytes); } @Override public OBlob newBlob() { return new ORecordBytes(); } /** * Creates a document with specific class. * * @param iClassName the name of class that should be used as a class of created document. * * @return new instance of document. */ @Override public ODocument newInstance(final String iClassName) { return new ODocument(iClassName); } @Override public OElement newElement() { return newInstance(); } @Override public OElement newElement(String className) { return newInstance(className); } public OElement newElement(OClass clazz) { return newInstance(clazz.getName()); } public OVertex newVertex(final String iClassName) { OClass cl = getClass(iClassName); if (cl == null || !cl.isVertexType()) { throw new IllegalArgumentException("" + iClassName + " is not a vertex class"); } OVertex doc = new OVertexDocument(cl); return doc; } @Override public OVertex newVertex(OClass type) { if (type == null) { return newVertex("E"); } return newVertex(type.getName()); } @Override public OEdge newEdge(OVertex from, OVertex to, String type) { OClass cl = getClass(type); if (cl == null || !cl.isEdgeType()) { throw new IllegalArgumentException("" + type + " is not an edge class"); } ODocument doc = new OEdgeDocument(cl); return addEdgeInternal(from, to, type); } @Override public OEdge newEdge(OVertex from, OVertex to, OClass type) { if (type == null) { return newEdge(from, to, "E"); } return newEdge(from, to, type.getName()); } private OEdge addEdgeInternal(final OVertex currentVertex, final OVertex inVertex, String iClassName, final Object... fields) { if (currentVertex == null) throw new IllegalArgumentException("To vertex is null"); if (inVertex == null) throw new IllegalArgumentException("To vertex is null"); OEdge edge = null; ODocument outDocument = null; ODocument inDocument = null; boolean outDocumentModified = false; if (checkDeletedInTx(currentVertex)) throw new ORecordNotFoundException(currentVertex.getIdentity(), "The vertex " + currentVertex.getIdentity() + " has been deleted"); if (checkDeletedInTx(inVertex)) throw new ORecordNotFoundException(inVertex.getIdentity(), "The vertex " + inVertex.getIdentity() + " has been deleted"); final int maxRetries = 1;//TODO for (int retry = 0; retry < maxRetries; ++retry) { try { // TEMPORARY STATIC LOCK TO AVOID MT PROBLEMS AGAINST OMVRBTreeRID if (outDocument == null) { outDocument = currentVertex.getRecord(); if (outDocument == null) throw new IllegalArgumentException("source vertex is invalid (rid=" + currentVertex.getIdentity() + ")"); } if (inDocument == null) { inDocument = inVertex.getRecord(); if (inDocument == null) throw new IllegalArgumentException("source vertex is invalid (rid=" + inVertex.getIdentity() + ")"); } if (!ODocumentInternal.getImmutableSchemaClass(outDocument).isVertexType()) throw new IllegalArgumentException("source record is not a vertex"); if (!ODocumentInternal.getImmutableSchemaClass(outDocument).isVertexType()) throw new IllegalArgumentException("destination record is not a vertex"); OVertex to = inVertex; OVertex from = currentVertex; OSchema schema = getMetadata().getSchema(); final OClass edgeType = schema.getClass(iClassName); if (edgeType == null) // AUTO CREATE CLASS schema.createClass(iClassName); else // OVERWRITE CLASS NAME BECAUSE ATTRIBUTES ARE CASE SENSITIVE iClassName = edgeType.getName(); final String outFieldName = getConnectionFieldName(ODirection.OUT, iClassName); final String inFieldName = getConnectionFieldName(ODirection.IN, iClassName); // since the label for the edge can potentially get re-assigned // before being pushed into the OrientEdge, the // null check has to go here. if (iClassName == null) throw new IllegalArgumentException("Class " + iClassName + " cannot be found"); // CREATE THE EDGE DOCUMENT TO STORE FIELDS TOO if (isUseLightweightEdges() && (fields == null || fields.length == 0)) { edge = newLightweightEdge(iClassName, from, to); OVertexDelegate.createLink(from.getRecord(), to.getRecord(), outFieldName); OVertexDelegate.createLink(to.getRecord(), from.getRecord(), inFieldName); } else { edge = newInstance(iClassName).asEdge().get(); edge.setProperty("out", currentVertex.getRecord()); edge.setProperty("in", inDocument.getRecord()); if (fields != null) { for (int i = 0; i < fields.length; i += 2) { String fieldName = "" + fields[i]; if (fields.length <= i + 1) { break; } Object fieldValue = fields[i + 1]; edge.setProperty(fieldName, fieldValue); } } if (!outDocumentModified) { // OUT-VERTEX ---> IN-VERTEX/EDGE OVertexDelegate.createLink(outDocument, edge.getRecord(), outFieldName); } // IN-VERTEX ---> OUT-VERTEX/EDGE OVertexDelegate.createLink(inDocument, edge.getRecord(), inFieldName); } // OK break; } catch (ONeedRetryException ignore) { // RETRY if (!outDocumentModified) outDocument.reload(); else if (inDocument != null) inDocument.reload(); } catch (RuntimeException e) { // REVERT CHANGES. EDGE.REMOVE() TAKES CARE TO UPDATE ALSO BOTH VERTICES IN CASE try { edge.delete(); } catch (Exception ex) { OLogManager.instance().error(this, "Error during edge deletion", ex); } throw e; } catch (Exception e) { // REVERT CHANGES. EDGE.REMOVE() TAKES CARE TO UPDATE ALSO BOTH VERTICES IN CASE try { edge.delete(); } catch (Exception ex) { OLogManager.instance().error(this, "Error during edge deletion", ex); } throw new IllegalStateException("Error on addEdge in non tx environment", e); } } return edge; } private boolean checkDeletedInTx(OVertex currentVertex) { ORID id; if (currentVertex.getRecord() != null) id = currentVertex.getRecord().getIdentity(); else return false; final ORecordOperation oper = getTransaction().getRecordEntry(id); if (oper == null) return id.isTemporary(); else return oper.type == ORecordOperation.DELETED; } private static String getConnectionFieldName(final ODirection iDirection, final String iClassName) { if (iDirection == null || iDirection == ODirection.BOTH) throw new IllegalArgumentException("Direction not valid"); // PREFIX "out_" or "in_" TO THE FIELD NAME final String prefix = iDirection == ODirection.OUT ? "out_" : "in_"; if (iClassName == null || iClassName.isEmpty() || iClassName.equals("E")) return prefix; return prefix + iClassName; } /** * {@inheritDoc} */ public ORecordIteratorClass<ODocument> browseClass(final String iClassName) { return browseClass(iClassName, true); } /** * {@inheritDoc} */ public ORecordIteratorClass<ODocument> browseClass(final String iClassName, final boolean iPolymorphic) { if (getMetadata().getImmutableSchemaSnapshot().getClass(iClassName) == null) throw new IllegalArgumentException("Class '" + iClassName + "' not found in current database"); checkSecurity(ORule.ResourceGeneric.CLASS, ORole.PERMISSION_READ, iClassName); return new ORecordIteratorClass<ODocument>(this, this, iClassName, iPolymorphic, false); } /** * {@inheritDoc} */ @Override public ORecordIteratorCluster<ODocument> browseCluster(final String iClusterName) { checkSecurity(ORule.ResourceGeneric.CLUSTER, ORole.PERMISSION_READ, iClusterName); return new ORecordIteratorCluster<ODocument>(this, this, getClusterIdByName(iClusterName)); } /** * {@inheritDoc} */ @Override public Iterable<ODatabaseListener> getListeners() { return getListenersCopy(); } /** * {@inheritDoc} */ @Override @Deprecated public ORecordIteratorCluster<ODocument> browseCluster(String iClusterName, long startClusterPosition, long endClusterPosition, boolean loadTombstones) { checkSecurity(ORule.ResourceGeneric.CLUSTER, ORole.PERMISSION_READ, iClusterName); return new ORecordIteratorCluster<ODocument>(this, this, getClusterIdByName(iClusterName), startClusterPosition, endClusterPosition, loadTombstones, OStorage.LOCKING_STRATEGY.DEFAULT); } /** * Saves a document to the database. Behavior depends by the current running transaction if any. If no transaction is running then * changes apply immediately. If an Optimistic transaction is running then the record will be changed at commit time. The current * transaction will continue to see the record as modified, while others not. If a Pessimistic transaction is running, then an * exclusive lock is acquired against the record. Current transaction will continue to see the record as modified, while others * cannot access to it since it's locked. * <p> * If MVCC is enabled and the version of the document is different by the version stored in the database, then a {@link * OConcurrentModificationException} exception is thrown.Before to save the document it must be valid following the constraints * declared in the schema if any (can work also in schema-less mode). To validate the document the {@link ODocument#validate()} is * called. * * @param iRecord Record to save. * * @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain. * * @throws OConcurrentModificationException if the version of the document is different by the version contained in the database. * @throws OValidationException if the document breaks some validation constraints defined in the schema * @see #setMVCC(boolean), {@link #isMVCC()} */ @Override public <RET extends ORecord> RET save(final ORecord iRecord) { return (RET) save(iRecord, null, OPERATION_MODE.SYNCHRONOUS, false, null, null); } /** * Saves a document to the database. Behavior depends by the current running transaction if any. If no transaction is running then * changes apply immediately. If an Optimistic transaction is running then the record will be changed at commit time. The current * transaction will continue to see the record as modified, while others not. If a Pessimistic transaction is running, then an * exclusive lock is acquired against the record. Current transaction will continue to see the record as modified, while others * cannot access to it since it's locked. * <p> * If MVCC is enabled and the version of the document is different by the version stored in the database, then a {@link * OConcurrentModificationException} exception is thrown.Before to save the document it must be valid following the constraints * declared in the schema if any (can work also in schema-less mode). To validate the document the {@link ODocument#validate()} is * called. * * @param iRecord Record to save. * @param iForceCreate Flag that indicates that record should be created. If record with current rid already exists, * exception is thrown * @param iRecordCreatedCallback callback that is called after creation of new record * @param iRecordUpdatedCallback callback that is called after record update * * @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain. * * @throws OConcurrentModificationException if the version of the document is different by the version contained in the database. * @throws OValidationException if the document breaks some validation constraints defined in the schema * @see #setMVCC(boolean), {@link #isMVCC()} */ @Override public <RET extends ORecord> RET save(final ORecord iRecord, final OPERATION_MODE iMode, boolean iForceCreate, final ORecordCallback<? extends Number> iRecordCreatedCallback, ORecordCallback<Integer> iRecordUpdatedCallback) { return save(iRecord, null, iMode, iForceCreate, iRecordCreatedCallback, iRecordUpdatedCallback); } /** * Saves a document specifying a cluster where to store the record. Behavior depends by the current running transaction if any. If * no transaction is running then changes apply immediately. If an Optimistic transaction is running then the record will be * changed at commit time. The current transaction will continue to see the record as modified, while others not. If a Pessimistic * transaction is running, then an exclusive lock is acquired against the record. Current transaction will continue to see the * record as modified, while others cannot access to it since it's locked. * <p> * If MVCC is enabled and the version of the document is different by the version stored in the database, then a {@link * OConcurrentModificationException} exception is thrown. Before to save the document it must be valid following the constraints * declared in the schema if any (can work also in schema-less mode). To validate the document the {@link ODocument#validate()} is * called. * * @param iRecord Record to save * @param iClusterName Cluster name where to save the record * * @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain. * * @throws OConcurrentModificationException if the version of the document is different by the version contained in the database. * @throws OValidationException if the document breaks some validation constraints defined in the schema * @see #setMVCC(boolean), {@link #isMVCC()}, ODocument#validate() */ @Override public <RET extends ORecord> RET save(final ORecord iRecord, final String iClusterName) { return (RET) save(iRecord, iClusterName, OPERATION_MODE.SYNCHRONOUS, false, null, null); } /** * Saves a document specifying a cluster where to store the record. Behavior depends by the current running transaction if any. If * no transaction is running then changes apply immediately. If an Optimistic transaction is running then the record will be * changed at commit time. The current transaction will continue to see the record as modified, while others not. If a Pessimistic * transaction is running, then an exclusive lock is acquired against the record. Current transaction will continue to see the * record as modified, while others cannot access to it since it's locked. * <p> * If MVCC is enabled and the version of the document is different by the version stored in the database, then a {@link * OConcurrentModificationException} exception is thrown. Before to save the document it must be valid following the constraints * declared in the schema if any (can work also in schema-less mode). To validate the document the {@link ODocument#validate()} is * called. * * @param iRecord Record to save * @param iClusterName Cluster name where to save the record * @param iMode Mode of save: synchronous (default) or asynchronous * @param iForceCreate Flag that indicates that record should be created. If record with current rid already exists, * exception is thrown * @param iRecordCreatedCallback callback that is called after creation of new record * @param iRecordUpdatedCallback callback that is called after record update * * @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain. * * @throws OConcurrentModificationException if the version of the document is different by the version contained in the database. * @throws OValidationException if the document breaks some validation constraints defined in the schema * @see #setMVCC(boolean), {@link #isMVCC()}, ODocument#validate() */ @Override public <RET extends ORecord> RET save(ORecord iRecord, String iClusterName, final OPERATION_MODE iMode, boolean iForceCreate, final ORecordCallback<? extends Number> iRecordCreatedCallback, ORecordCallback<Integer> iRecordUpdatedCallback) { checkOpenness(); ODirtyManager dirtyManager = ORecordInternal.getDirtyManager(iRecord); if (iRecord instanceof OElement && dirtyManager != null && dirtyManager.getReferences() != null && !dirtyManager.getReferences() .isEmpty()) { if ((((OElement) iRecord).isVertex() || ((OElement) iRecord).isEdge()) && !getTransaction().isActive() && inHook.isEmpty()) { return saveGraph(iRecord, iClusterName, iMode, iForceCreate, iRecordCreatedCallback, iRecordUpdatedCallback); } } return saveInternal(iRecord, iClusterName, iMode, iForceCreate, iRecordCreatedCallback, iRecordUpdatedCallback); } private <RET extends ORecord> RET saveInternal(ORecord iRecord, String iClusterName, OPERATION_MODE iMode, boolean iForceCreate, ORecordCallback<? extends Number> iRecordCreatedCallback, ORecordCallback<Integer> iRecordUpdatedCallback) { if (iRecord instanceof OVertex) { iRecord = iRecord.getRecord(); } if (iRecord instanceof OEdge) { iRecord = iRecord.getRecord(); } if (!(iRecord instanceof ODocument)) { assignAndCheckCluster(iRecord, iClusterName); return (RET) currentTx.saveRecord(iRecord, iClusterName, iMode, iForceCreate, iRecordCreatedCallback, iRecordUpdatedCallback); } ODocument doc = (ODocument) iRecord; ODocumentInternal.checkClass(doc, this); // IN TX THE VALIDATION MAY BE RUN TWICE BUT IS CORRECT BECAUSE OF DIFFERENT RECORD STATUS try { doc.validate(); } catch (OValidationException e) { doc.undo(); throw e; } ODocumentInternal.convertAllMultiValuesToTrackedVersions(doc); if (iForceCreate || !doc.getIdentity().isValid()) { if (doc.getClassName() != null) checkSecurity(ORule.ResourceGeneric.CLASS, ORole.PERMISSION_CREATE, doc.getClassName()); assignAndCheckCluster(doc, iClusterName); } else { // UPDATE: CHECK ACCESS ON SCHEMA CLASS NAME (IF ANY) if (doc.getClassName() != null) checkSecurity(ORule.ResourceGeneric.CLASS, ORole.PERMISSION_UPDATE, doc.getClassName()); } doc = (ODocument) currentTx .saveRecord(iRecord, iClusterName, iMode, iForceCreate, iRecordCreatedCallback, iRecordUpdatedCallback); return (RET) doc; } private <RET extends ORecord> RET saveGraph(ORecord iRecord, String iClusterName, OPERATION_MODE iMode, boolean iForceCreate, ORecordCallback<? extends Number> iRecordCreatedCallback, ORecordCallback<Integer> iRecordUpdatedCallback) { begin(); try { return saveInternal(iRecord, iClusterName, iMode, iForceCreate, iRecordCreatedCallback, iRecordUpdatedCallback); } finally { commit(); } } /** * Deletes a document. Behavior depends by the current running transaction if any. If no transaction is running then the record is * deleted immediately. If an Optimistic transaction is running then the record will be deleted at commit time. The current * transaction will continue to see the record as deleted, while others not. If a Pessimistic transaction is running, then an * exclusive lock is acquired against the record. Current transaction will continue to see the record as deleted, while others * cannot access to it since it's locked. * <p> * If MVCC is enabled and the version of the document is different by the version stored in the database, then a {@link * OConcurrentModificationException} exception is thrown. * * @param record record to delete * * @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain. * * @see #setMVCC(boolean), {@link #isMVCC()} */ public ODatabaseDocumentAbstract delete(final ORecord record) { checkOpenness(); if (record == null) throw new ODatabaseException("Cannot delete null document"); if (record instanceof OVertex) { OVertexDelegate.deleteLinks((OVertex) record); } else if (record instanceof OEdge) { OEdgeDelegate.deleteLinks((OEdge) record); } // CHECK ACCESS ON SCHEMA CLASS NAME (IF ANY) if (record instanceof ODocument && ((ODocument) record).getClassName() != null) checkSecurity(ORule.ResourceGeneric.CLASS, ORole.PERMISSION_DELETE, ((ODocument) record).getClassName()); try { currentTx.deleteRecord(record, OPERATION_MODE.SYNCHRONOUS); } catch (OException e) { throw e; } catch (Exception e) { if (record instanceof ODocument) throw OException.wrapException(new ODatabaseException( "Error on deleting record " + record.getIdentity() + " of class '" + ((ODocument) record).getClassName() + "'"), e); else throw OException.wrapException(new ODatabaseException("Error on deleting record " + record.getIdentity()), e); } return this; } /** * Returns the number of the records of the class iClassName. */ public long countClass(final String iClassName) { return countClass(iClassName, true); } /** * Returns the number of the records of the class iClassName considering also sub classes if polymorphic is true. */ public long countClass(final String iClassName, final boolean iPolymorphic) { final OClass cls = getMetadata().getImmutableSchemaSnapshot().getClass(iClassName); if (cls == null) throw new IllegalArgumentException("Class '" + iClassName + "' not found in database"); long totalOnDb = cls.count(iPolymorphic); long deletedInTx = 0; long addedInTx = 0; if (getTransaction().isActive()) for (ORecordOperation op : getTransaction().getRecordOperations()) { if (op.type == ORecordOperation.DELETED) { final ORecord rec = op.getRecord(); if (rec != null && rec instanceof ODocument) { OClass schemaClass = ((ODocument) rec).getSchemaClass(); if (iPolymorphic) { if (schemaClass.isSubClassOf(iClassName)) deletedInTx++; } else { if (iClassName.equals(schemaClass.getName()) || iClassName.equals(schemaClass.getShortName())) deletedInTx++; } } } if (op.type == ORecordOperation.CREATED) { final ORecord rec = op.getRecord(); if (rec != null && rec instanceof ODocument) { OClass schemaClass = ((ODocument) rec).getSchemaClass(); if (schemaClass != null) { if (iPolymorphic) { if (schemaClass.isSubClassOf(iClassName)) addedInTx++; } else { if (iClassName.equals(schemaClass.getName()) || iClassName.equals(schemaClass.getShortName())) addedInTx++; } } } } } return (totalOnDb + addedInTx) - deletedInTx; } /** * {@inheritDoc} */ @Override public ODatabase<ORecord> commit() { return commit(false); } @Override public ODatabaseDocument commit(boolean force) throws OTransactionException { checkOpenness(); checkIfActive(); if (!currentTx.isActive()) return this; if (!force && currentTx.amountOfNestedTxs() > 1) { //This just do count down no real commit here currentTx.commit(); return this; } // WAKE UP LISTENERS try { beforeCommitOperations(); } catch (OException e) { try { rollback(force); } catch (Exception re) { OLogManager.instance().error(this, "Exception during rollback `%08X`", re, System.identityHashCode(re)); } throw e; } try { currentTx.commit(force); } catch (RuntimeException e) { if ((e instanceof OHighLevelException) || (e instanceof ONeedRetryException)) OLogManager.instance().debug(this, "Error on transaction commit `%08X`", e, System.identityHashCode(e)); else OLogManager.instance().error(this, "Error on transaction commit `%08X`", e, System.identityHashCode(e)); // WAKE UP ROLLBACK LISTENERS beforeRollbackOperations(); try { // ROLLBACK TX AT DB LEVEL ((OTransactionAbstract) currentTx).internalRollback(); } catch (Exception re) { OLogManager.instance().error(this, "Error during transaction rollback `%08X`", re, System.identityHashCode(re)); } getLocalCache().clear(); // WAKE UP ROLLBACK LISTENERS afterRollbackOperations(); throw e; } // WAKE UP LISTENERS afterCommitOperations(); return this; } protected void beforeCommitOperations() { for (ODatabaseListener listener : browseListeners()) try { listener.onBeforeTxCommit(this); } catch (Exception e) { OLogManager.instance() .error(this, "Cannot commit the transaction: caught exception on execution of %s.onBeforeTxCommit() `%08X`", e, listener.getClass().getName(), System.identityHashCode(e)); throw OException.wrapException(new OTransactionException( "Cannot commit the transaction: caught exception on execution of " + listener.getClass().getName() + "#onBeforeTxCommit()"), e); } } protected void afterCommitOperations() { for (ODatabaseListener listener : browseListeners()) try { listener.onAfterTxCommit(this); } catch (Exception e) { final String message = "Error after the transaction has been committed. The transaction remains valid. The exception caught was on execution of " + listener.getClass() + ".onAfterTxCommit() `%08X`"; OLogManager.instance().error(this, message, e, System.identityHashCode(e)); throw OException.wrapException(new OTransactionBlockedException(message), e); } } protected void beforeRollbackOperations() { for (ODatabaseListener listener : browseListeners()) try { listener.onBeforeTxRollback(this); } catch (Exception t) { OLogManager.instance().error(this, "Error before transaction rollback `%08X`", t, System.identityHashCode(t)); } } protected void afterRollbackOperations() { for (ODatabaseListener listener : browseListeners()) try { listener.onAfterTxRollback(this); } catch (Exception t) { OLogManager.instance().error(this, "Error after transaction rollback `%08X`", t, System.identityHashCode(t)); } } /** * {@inheritDoc} */ @Override public ODatabase<ORecord> rollback() { return rollback(false); } @Override public ODatabaseDocument rollback(boolean force) throws OTransactionException { checkOpenness(); if (currentTx.isActive()) { if (!force && currentTx.amountOfNestedTxs() > 1) { //This just decrement the counter no real rollback here currentTx.rollback(); return this; } // WAKE UP LISTENERS beforeRollbackOperations(); currentTx.rollback(force, -1); // WAKE UP LISTENERS afterRollbackOperations(); } getLocalCache().clear(); return this; } /** * This method is internal, it can be subject to signature change or be removed, do not use. * * @Internal */ @Override public <DB extends ODatabase> DB getUnderlying() { throw new UnsupportedOperationException(); } @Override public <V> V callInLock(final Callable<V> iCallable, final boolean iExclusiveLock) { return getStorage().callInLock(iCallable, iExclusiveLock); } @Override public List<String> backup(final OutputStream out, final Map<String, Object> options, final Callable<Object> callable, final OCommandOutputListener iListener, final int compressionLevel, final int bufferSize) throws IOException { checkOpenness(); return getStorage().backup(out, options, callable, iListener, compressionLevel, bufferSize); } @Override public void restore(final InputStream in, final Map<String, Object> options, final Callable<Object> callable, final OCommandOutputListener iListener) throws IOException { checkOpenness(); getStorage().restore(in, options, callable, iListener); if (!isClosed()) { loadMetadata(); sharedContext = null; } } /** * {@inheritDoc} */ public OSBTreeCollectionManager getSbTreeCollectionManager() { return getStorage().getSBtreeCollectionManager(); } @Override public OCurrentStorageComponentsFactory getStorageVersions() { return componentsFactory; } public ORecordSerializer getSerializer() { return serializer; } /** * Sets serializer for the database which will be used for document serialization. * * @param serializer the serializer to set. */ public void setSerializer(ORecordSerializer serializer) { this.serializer = serializer; } @Override public void resetInitialization() { for (ORecordHook h : hooks.keySet()) h.onUnregister(); hooks.clear(); compileHooks(); close(); initialized = false; } @Override public String incrementalBackup(final String path) { checkOpenness(); checkIfActive(); return getStorage().incrementalBackup(path); } @Override @Deprecated public <DB extends ODatabaseDocument> DB checkSecurity(final String iResource, final int iOperation) { final String resourceSpecific = ORule.mapLegacyResourceToSpecificResource(iResource); final ORule.ResourceGeneric resourceGeneric = ORule.mapLegacyResourceToGenericResource(iResource); if (resourceSpecific == null || resourceSpecific.equals("*")) checkSecurity(resourceGeneric, null, iOperation); return checkSecurity(resourceGeneric, resourceSpecific, iOperation); } @Override @Deprecated public <DB extends ODatabaseDocument> DB checkSecurity(final String iResourceGeneric, final int iOperation, final Object iResourceSpecific) { final ORule.ResourceGeneric resourceGeneric = ORule.mapLegacyResourceToGenericResource(iResourceGeneric); if (iResourceSpecific == null || iResourceSpecific.equals("*")) return checkSecurity(resourceGeneric, iOperation, (Object) null); return checkSecurity(resourceGeneric, iOperation, iResourceSpecific); } @Override @Deprecated public <DB extends ODatabaseDocument> DB checkSecurity(final String iResourceGeneric, final int iOperation, final Object... iResourcesSpecific) { final ORule.ResourceGeneric resourceGeneric = ORule.mapLegacyResourceToGenericResource(iResourceGeneric); return checkSecurity(resourceGeneric, iOperation, iResourcesSpecific); } /** * @return <code>true</code> if database is obtained from the pool and <code>false</code> otherwise. */ @Override public boolean isPooled() { return false; } /** * Use #activateOnCurrentThread instead. */ @Deprecated public void setCurrentDatabaseInThreadLocal() { activateOnCurrentThread(); } /** * Activates current database instance on current thread. */ @Override public ODatabaseDocumentAbstract activateOnCurrentThread() { final ODatabaseRecordThreadLocal tl = ODatabaseRecordThreadLocal.instance(); if (tl != null) tl.set(this); return this; } @Override public boolean isActiveOnCurrentThread() { final ODatabaseRecordThreadLocal tl = ODatabaseRecordThreadLocal.instance(); final ODatabaseDocumentInternal db = tl != null ? tl.getIfDefined() : null; return db == this; } protected void checkOpenness() { if (status == STATUS.CLOSED) throw new ODatabaseException("Database '" + getURL() + "' is closed"); } private void popInHook(OIdentifiable id) { inHook.remove(id); } private boolean pushInHook(OIdentifiable id) { return inHook.add(id); } protected void callbackHookFailure(ORecord record, boolean wasNew, byte[] stream) { if (stream != null && stream.length > 0) callbackHooks(wasNew ? ORecordHook.TYPE.CREATE_FAILED : ORecordHook.TYPE.UPDATE_FAILED, record); } protected void callbackHookSuccess(final ORecord record, final boolean wasNew, final byte[] stream, final OStorageOperationResult<Integer> operationResult) { if (stream != null && stream.length > 0) { final ORecordHook.TYPE hookType; if (!operationResult.isMoved()) { hookType = wasNew ? ORecordHook.TYPE.AFTER_CREATE : ORecordHook.TYPE.AFTER_UPDATE; } else { hookType = wasNew ? ORecordHook.TYPE.CREATE_REPLICATED : ORecordHook.TYPE.UPDATE_REPLICATED; } callbackHooks(hookType, record); } } protected void callbackHookFinalize(final ORecord record, final boolean wasNew, final byte[] stream) { if (stream != null && stream.length > 0) { final ORecordHook.TYPE hookType; hookType = wasNew ? ORecordHook.TYPE.FINALIZE_CREATION : ORecordHook.TYPE.FINALIZE_UPDATE; callbackHooks(hookType, record); clearDocumentTracking(record); } } protected void clearDocumentTracking(final ORecord record) { if (record instanceof ODocument && ((ODocument) record).isTrackingChanges()) { ODocumentInternal.clearTrackData((ODocument) record); } } protected void checkRecordClass(final OClass recordClass, final String iClusterName, final ORecordId rid) { final OClass clusterIdClass = metadata.getImmutableSchemaSnapshot().getClassByClusterId(rid.getClusterId()); if (recordClass == null && clusterIdClass != null || clusterIdClass == null && recordClass != null || (recordClass != null && !recordClass.equals(clusterIdClass))) throw new IllegalArgumentException( "Record saved into cluster '" + iClusterName + "' should be saved with class '" + clusterIdClass + "' but has been created with class '" + recordClass + "'"); } protected void init() { currentTx = new OTransactionNoTx(this); } private OFreezableStorageComponent getFreezableStorage() { OStorage s = getStorage(); if (s instanceof OFreezableStorageComponent) return (OFreezableStorageComponent) s; else { OLogManager.instance().error(this, "Storage of type " + s.getType() + " does not support freeze operation", null); return null; } } public void checkIfActive() { final ODatabaseRecordThreadLocal tl = ODatabaseRecordThreadLocal.instance(); ODatabaseDocumentInternal currentDatabase = tl != null ? tl.get() : null; if (currentDatabase instanceof ODatabaseDocumentTx) { currentDatabase = ((ODatabaseDocumentTx) currentDatabase).internal; } if (currentDatabase != this) throw new IllegalStateException( "The current database instance (" + toString() + ") is not active on the current thread (" + Thread.currentThread() + "). Current active database is: " + currentDatabase); } public Set<Integer> getBlobClusterIds() { return getMetadata().getSchema().getBlobClusters(); } private void compileHooks() { final List<ORecordHook>[] intermediateHooksByScope = new List[ORecordHook.SCOPE.values().length]; for (ORecordHook.SCOPE scope : ORecordHook.SCOPE.values()) intermediateHooksByScope[scope.ordinal()] = new ArrayList<>(); for (ORecordHook hook : hooks.keySet()) for (ORecordHook.SCOPE scope : hook.getScopes()) intermediateHooksByScope[scope.ordinal()].add(hook); for (ORecordHook.SCOPE scope : ORecordHook.SCOPE.values()) { final int ordinal = scope.ordinal(); final List<ORecordHook> scopeHooks = intermediateHooksByScope[ordinal]; hooksByScope[ordinal] = scopeHooks.toArray(new ORecordHook[scopeHooks.size()]); } } @Override public OSharedContext getSharedContext() { // NOW NEED TO GET THE CONTEXT FROM RESOURCES IN FUTURE WILL BE NOT NEEDED if (sharedContext == null) { sharedContext = getStorage().getResource(OSharedContext.class.getName(), new Callable<OSharedContext>() { @Override public OSharedContext call() throws Exception { throw new ODatabaseException("Accessing to the database context before the database has bean initialized"); } }); } return sharedContext; } public static Object executeWithRetries(final OCallable<Object, Integer> callback, final int maxRetry) { return executeWithRetries(callback, maxRetry, 0, null); } public static Object executeWithRetries(final OCallable<Object, Integer> callback, final int maxRetry, final int waitBetweenRetry) { return executeWithRetries(callback, maxRetry, waitBetweenRetry, null); } public static Object executeWithRetries(final OCallable<Object, Integer> callback, final int maxRetry, final int waitBetweenRetry, final ORecord[] recordToReloadOnRetry) { ONeedRetryException lastException = null; for (int retry = 0; retry < maxRetry; ++retry) { try { return callback.call(retry); } catch (ONeedRetryException e) { // SAVE LAST EXCEPTION AND RETRY lastException = e; if (recordToReloadOnRetry != null) { // RELOAD THE RECORDS for (ORecord r : recordToReloadOnRetry) r.reload(); } if (waitBetweenRetry > 0) try { Thread.sleep(waitBetweenRetry); } catch (InterruptedException ignore) { Thread.currentThread().interrupt(); break; } } } throw lastException; } private void bindPropertiesToContext(OContextConfiguration configuration, final Map<String, Object> iProperties) { final String connectionStrategy = iProperties != null ? (String) iProperties.get("connectionStrategy") : null; if (connectionStrategy != null) configuration.setValue(OGlobalConfiguration.CLIENT_CONNECTION_STRATEGY, connectionStrategy); final String compressionMethod = iProperties != null ? (String) iProperties.get(OGlobalConfiguration.STORAGE_COMPRESSION_METHOD.getKey().toLowerCase(Locale.ENGLISH)) : null; if (compressionMethod != null) // SAVE COMPRESSION METHOD IN CONFIGURATION configuration.setValue(OGlobalConfiguration.STORAGE_COMPRESSION_METHOD, compressionMethod); final String encryptionMethod = iProperties != null ? (String) iProperties.get(OGlobalConfiguration.STORAGE_ENCRYPTION_METHOD.getKey().toLowerCase(Locale.ENGLISH)) : null; if (encryptionMethod != null) // SAVE ENCRYPTION METHOD IN CONFIGURATION configuration.setValue(OGlobalConfiguration.STORAGE_ENCRYPTION_METHOD, encryptionMethod); final String encryptionKey = iProperties != null ? (String) iProperties.get(OGlobalConfiguration.STORAGE_ENCRYPTION_KEY.getKey().toLowerCase(Locale.ENGLISH)) : null; if (encryptionKey != null) // SAVE ENCRYPTION KEY IN CONFIGURATION configuration.setValue(OGlobalConfiguration.STORAGE_ENCRYPTION_KEY, encryptionKey); } private void bindPropertiesToContextGlobal(OContextConfiguration configuration, final Map<OGlobalConfiguration, Object> iProperties) { final String connectionStrategy = iProperties != null ? (String) iProperties.get("connectionStrategy") : null; if (connectionStrategy != null) configuration.setValue(OGlobalConfiguration.CLIENT_CONNECTION_STRATEGY, connectionStrategy); final String compressionMethod = iProperties != null ? (String) iProperties.get(OGlobalConfiguration.STORAGE_COMPRESSION_METHOD) : null; if (compressionMethod != null) // SAVE COMPRESSION METHOD IN CONFIGURATION configuration.setValue(OGlobalConfiguration.STORAGE_COMPRESSION_METHOD, compressionMethod); final String encryptionMethod = iProperties != null ? (String) iProperties.get(OGlobalConfiguration.STORAGE_ENCRYPTION_METHOD) : null; if (encryptionMethod != null) // SAVE ENCRYPTION METHOD IN CONFIGURATION configuration.setValue(OGlobalConfiguration.STORAGE_ENCRYPTION_METHOD, encryptionMethod); final String encryptionKey = iProperties != null ? (String) iProperties.get(OGlobalConfiguration.STORAGE_ENCRYPTION_KEY) : null; if (encryptionKey != null) // SAVE ENCRYPTION KEY IN CONFIGURATION configuration.setValue(OGlobalConfiguration.STORAGE_ENCRYPTION_KEY, encryptionKey); } public boolean isUseLightweightEdges() { final List<OStorageEntryConfiguration> custom = (List<OStorageEntryConfiguration>) this.get(ATTRIBUTES.CUSTOM); for (OStorageEntryConfiguration c : custom) { if (c.name.equals("useLightweightEdges")) return Boolean.parseBoolean(c.value); } return false; } public void setUseLightweightEdges(boolean b) { this.setCustom("useLightweightEdges", b); } public OEdge newLightweightEdge(String iClassName, OVertex from, OVertex to) { OClass clazz = getMetadata().getSchema().getClass(iClassName); OEdgeDelegate result = new OEdgeDelegate(from, to, clazz, iClassName); return result; } public synchronized void queryStarted(String id, OResultSet rs) { if (this.activeQueries.size() > 1 && this.activeQueries.size() % 10 == 0) { StringBuilder msg = new StringBuilder(); msg.append("This database instance has "); msg.append(activeQueries.size()); msg.append(" open command/query result sets, please make sure you close them with OResultSet.close()"); OLogManager.instance().warn(this, msg.toString(), null); if (OLogManager.instance().isDebugEnabled()) { activeQueries.values().stream().map(pendingQuery -> pendingQuery.getExecutionPlan()).filter(plan -> plan != null) .forEach(plan -> OLogManager.instance().debug(this, plan.toString())); } } this.activeQueries.put(id, rs); } public synchronized void queryClosed(String id) { this.activeQueries.remove(id); } protected synchronized void closeActiveQueries() { while (activeQueries.size() > 0) { this.activeQueries.values().iterator().next().close();//the query automatically unregisters itself } } public OResultSet getActiveQuery(String id) { return activeQueries.get(id); } @Override public void internalCommit(OTransactionInternal transaction) { this.getStorage().commit(transaction); } @Override public boolean isClusterEdge(int cluster) { OClass clazz = getMetadata().getSchema().getClassByClusterId(cluster); if (clazz != null && clazz.isEdgeType()) return true; return false; } @Override public boolean isClusterVertex(int cluster) { OClass clazz = getMetadata().getSchema().getClassByClusterId(cluster); if (clazz != null && clazz.isVertexType()) return true; return false; } }
core/src/main/java/com/orientechnologies/orient/core/db/document/ODatabaseDocumentAbstract.java
/* * * * Copyright 2010-2016 OrientDB LTD (http://orientdb.com) * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * * For more information: http://orientdb.com * */ package com.orientechnologies.orient.core.db.document; import com.orientechnologies.common.concur.ONeedRetryException; import com.orientechnologies.common.exception.OException; import com.orientechnologies.common.exception.OHighLevelException; import com.orientechnologies.common.listener.OListenerManger; import com.orientechnologies.common.log.OLogManager; import com.orientechnologies.common.util.OCallable; import com.orientechnologies.orient.core.Orient; import com.orientechnologies.orient.core.cache.OLocalRecordCache; import com.orientechnologies.orient.core.command.OCommandOutputListener; import com.orientechnologies.orient.core.command.OCommandRequest; import com.orientechnologies.orient.core.command.OCommandRequestInternal; import com.orientechnologies.orient.core.config.OContextConfiguration; import com.orientechnologies.orient.core.config.OGlobalConfiguration; import com.orientechnologies.orient.core.config.OStorageEntryConfiguration; import com.orientechnologies.orient.core.conflict.ORecordConflictStrategy; import com.orientechnologies.orient.core.db.*; import com.orientechnologies.orient.core.db.record.OCurrentStorageComponentsFactory; import com.orientechnologies.orient.core.db.record.OIdentifiable; import com.orientechnologies.orient.core.db.record.ORecordElement; import com.orientechnologies.orient.core.db.record.ORecordOperation; import com.orientechnologies.orient.core.dictionary.ODictionary; import com.orientechnologies.orient.core.exception.*; import com.orientechnologies.orient.core.fetch.OFetchHelper; import com.orientechnologies.orient.core.hook.ORecordHook; import com.orientechnologies.orient.core.id.ORID; import com.orientechnologies.orient.core.id.ORecordId; import com.orientechnologies.orient.core.intent.OIntent; import com.orientechnologies.orient.core.iterator.ORecordIteratorClass; import com.orientechnologies.orient.core.iterator.ORecordIteratorCluster; import com.orientechnologies.orient.core.metadata.OMetadata; import com.orientechnologies.orient.core.metadata.OMetadataDefault; import com.orientechnologies.orient.core.metadata.schema.OClass; import com.orientechnologies.orient.core.metadata.schema.OSchema; import com.orientechnologies.orient.core.metadata.schema.OSchemaProxy; import com.orientechnologies.orient.core.metadata.security.*; import com.orientechnologies.orient.core.query.OQuery; import com.orientechnologies.orient.core.record.*; import com.orientechnologies.orient.core.record.impl.*; import com.orientechnologies.orient.core.serialization.serializer.binary.OBinarySerializerFactory; import com.orientechnologies.orient.core.serialization.serializer.record.ORecordSerializer; import com.orientechnologies.orient.core.serialization.serializer.record.ORecordSerializerFactory; import com.orientechnologies.orient.core.sql.executor.OResultSet; import com.orientechnologies.orient.core.storage.*; import com.orientechnologies.orient.core.storage.impl.local.OFreezableStorageComponent; import com.orientechnologies.orient.core.storage.impl.local.OMicroTransaction; import com.orientechnologies.orient.core.storage.impl.local.paginated.OOfflineClusterException; import com.orientechnologies.orient.core.storage.impl.local.paginated.ORecordSerializationContext; import com.orientechnologies.orient.core.storage.ridbag.sbtree.OSBTreeCollectionManager; import com.orientechnologies.orient.core.tx.*; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.*; import java.util.concurrent.Callable; /** * Document API entrypoint. * * @author Luca Garulli (l.garulli--(at)--orientdb.com) */ @SuppressWarnings("unchecked") public abstract class ODatabaseDocumentAbstract extends OListenerManger<ODatabaseListener> implements ODatabaseDocumentInternal { protected final Map<String, Object> properties = new HashMap<String, Object>(); protected Map<ORecordHook, ORecordHook.HOOK_POSITION> unmodifiableHooks; protected final Set<OIdentifiable> inHook = new HashSet<OIdentifiable>(); protected ORecordSerializer serializer; protected String url; protected STATUS status; protected OIntent currentIntent; protected ODatabaseInternal<?> databaseOwner; protected OMetadataDefault metadata; protected OImmutableUser user; protected final byte recordType = ODocument.RECORD_TYPE; protected final Map<ORecordHook, ORecordHook.HOOK_POSITION> hooks = new LinkedHashMap<ORecordHook, ORecordHook.HOOK_POSITION>(); protected boolean retainRecords = true; protected OLocalRecordCache localCache; protected OCurrentStorageComponentsFactory componentsFactory; protected boolean initialized = false; protected OTransaction currentTx; protected final ORecordHook[][] hooksByScope = new ORecordHook[ORecordHook.SCOPE.values().length][]; protected OSharedContext sharedContext; private boolean prefetchRecords; protected OMicroTransaction microTransaction = null; protected Map<String, OResultSet> activeQueries = new HashMap<>(); protected ODatabaseDocumentAbstract() { // DO NOTHING IS FOR EXTENDED OBJECTS super(false); } /** * @return default serializer which is used to serialize documents. Default serializer is common for all database instances. */ public static ORecordSerializer getDefaultSerializer() { return ORecordSerializerFactory.instance().getDefaultRecordSerializer(); } /** * Sets default serializer. The default serializer is common for all database instances. * * @param iDefaultSerializer new default serializer value */ public static void setDefaultSerializer(ORecordSerializer iDefaultSerializer) { ORecordSerializerFactory.instance().setDefaultRecordSerializer(iDefaultSerializer); } public void callOnOpenListeners() { // WAKE UP DB LIFECYCLE LISTENER for (Iterator<ODatabaseLifecycleListener> it = Orient.instance().getDbLifecycleListeners(); it.hasNext(); ) it.next().onOpen(getDatabaseOwner()); // WAKE UP LISTENERS for (ODatabaseListener listener : getListenersCopy()) try { listener.onOpen(getDatabaseOwner()); } catch (Exception e) { OLogManager.instance().error(this, "Error during call of database listener", e); } } protected abstract void loadMetadata(); public void callOnCloseListeners() { // WAKE UP DB LIFECYCLE LISTENER for (Iterator<ODatabaseLifecycleListener> it = Orient.instance().getDbLifecycleListeners(); it.hasNext(); ) it.next().onClose(getDatabaseOwner()); // WAKE UP LISTENERS for (ODatabaseListener listener : getListenersCopy()) try { listener.onClose(getDatabaseOwner()); } catch (Exception e) { OLogManager.instance().error(this, "Error during call of database listener", e); } } public void callOnDropListeners() { // WAKE UP LISTENERS for (ODatabaseListener listener : getListenersCopy()) try { activateOnCurrentThread(); listener.onDelete(getDatabaseOwner()); } catch (Exception e) { OLogManager.instance().error(this, "Error during call of database listener", e); } } /** * {@inheritDoc} */ public <RET extends ORecord> RET getRecord(final OIdentifiable iIdentifiable) { if (iIdentifiable instanceof ORecord) return (RET) iIdentifiable; return (RET) load(iIdentifiable.getIdentity()); } @Override public void reload() { checkIfActive(); if (this.isClosed()) throw new ODatabaseException("Cannot reload a closed db"); metadata.reload(); getStorage().reload(); } /** * {@inheritDoc} */ public <RET extends ORecord> RET load(final ORID iRecordId, final String iFetchPlan, final boolean iIgnoreCache) { return (RET) executeReadRecord((ORecordId) iRecordId, null, -1, iFetchPlan, iIgnoreCache, !iIgnoreCache, false, OStorage.LOCKING_STRATEGY.DEFAULT, new SimpleRecordReader(prefetchRecords)); } /** * Deletes the record checking the version. */ public ODatabase<ORecord> delete(final ORID iRecord, final int iVersion) { ORecord record = load(iRecord); ORecordInternal.setVersion(record, iVersion); delete(record); return this; } public ODatabaseDocumentInternal cleanOutRecord(final ORID iRecord, final int iVersion) { executeDeleteRecord(iRecord, iVersion, true, OPERATION_MODE.SYNCHRONOUS, true); return this; } public String getType() { return TYPE; } /** * Deletes the record without checking the version. */ public ODatabaseDocument delete(final ORID iRecord, final OPERATION_MODE iMode) { ORecord record = load(iRecord); if (record == null) return this; delete(record, iMode); return this; } public ODatabaseDocument delete(final ORecord iRecord, final OPERATION_MODE iMode) { checkIfActive(); ODirtyManager dirtyManager = ORecordInternal.getDirtyManager(iRecord); if (iRecord instanceof OElement && dirtyManager != null && dirtyManager.getReferences() != null && !dirtyManager.getReferences() .isEmpty()) { if (((OElement) iRecord).isEdge() || ((OElement) iRecord).isVertex() && !getTransaction().isActive()) { begin(); try { currentTx.deleteRecord(iRecord, iMode); return this; } finally { commit(); } } } currentTx.deleteRecord(iRecord, iMode); return this; } public <REC extends ORecord> ORecordIteratorCluster<REC> browseCluster(final String iClusterName, final Class<REC> iClass) { checkSecurity(ORule.ResourceGeneric.CLUSTER, ORole.PERMISSION_READ, iClusterName); checkIfActive(); final int clusterId = getClusterIdByName(iClusterName); return new ORecordIteratorCluster<REC>(this, this, clusterId); } /** * {@inheritDoc} */ @Override @Deprecated public <REC extends ORecord> ORecordIteratorCluster<REC> browseCluster(final String iClusterName, final Class<REC> iRecordClass, final long startClusterPosition, final long endClusterPosition, final boolean loadTombstones) { checkSecurity(ORule.ResourceGeneric.CLUSTER, ORole.PERMISSION_READ, iClusterName); checkIfActive(); final int clusterId = getClusterIdByName(iClusterName); return new ORecordIteratorCluster<REC>(this, this, clusterId, startClusterPosition, endClusterPosition, loadTombstones, OStorage.LOCKING_STRATEGY.DEFAULT); } @Override public <REC extends ORecord> ORecordIteratorCluster<REC> browseCluster(String iClusterName, Class<REC> iRecordClass, long startClusterPosition, long endClusterPosition) { checkSecurity(ORule.ResourceGeneric.CLUSTER, ORole.PERMISSION_READ, iClusterName); checkIfActive(); final int clusterId = getClusterIdByName(iClusterName); return new ORecordIteratorCluster<REC>(this, this, clusterId, startClusterPosition, endClusterPosition); } /** * {@inheritDoc} */ public OCommandRequest command(final OCommandRequest iCommand) { checkSecurity(ORule.ResourceGeneric.COMMAND, ORole.PERMISSION_READ); checkIfActive(); final OCommandRequestInternal command = (OCommandRequestInternal) iCommand; try { command.reset(); return command; } catch (Exception e) { throw OException.wrapException(new ODatabaseException("Error on command execution"), e); } } /** * {@inheritDoc} */ public <RET extends List<?>> RET query(final OQuery<?> iCommand, final Object... iArgs) { checkIfActive(); iCommand.reset(); return (RET) iCommand.execute(iArgs); } /** * {@inheritDoc} */ public byte getRecordType() { return recordType; } /** * {@inheritDoc} */ @Override public long countClusterElements(final int[] iClusterIds) { return countClusterElements(iClusterIds, false); } /** * {@inheritDoc} */ @Override public long countClusterElements(final int iClusterId) { return countClusterElements(iClusterId, false); } /** * {@inheritDoc} */ @Override public void truncateCluster(String clusterName) { command("truncate cluster " + clusterName); } /** * {@inheritDoc} */ @Override public long countClusterElements(int iClusterId, boolean countTombstones) { final String name = getClusterNameById(iClusterId); if (name == null) return 0; checkSecurity(ORule.ResourceGeneric.CLUSTER, ORole.PERMISSION_READ, name); checkIfActive(); return getStorage().count(iClusterId, countTombstones); } /** * {@inheritDoc} */ @Override public long countClusterElements(int[] iClusterIds, boolean countTombstones) { checkIfActive(); String name; for (int iClusterId : iClusterIds) { name = getClusterNameById(iClusterId); checkSecurity(ORule.ResourceGeneric.CLUSTER, ORole.PERMISSION_READ, name); } return getStorage().count(iClusterIds, countTombstones); } /** * {@inheritDoc} */ @Override public long countClusterElements(final String iClusterName) { checkSecurity(ORule.ResourceGeneric.CLUSTER, ORole.PERMISSION_READ, iClusterName); checkIfActive(); final int clusterId = getClusterIdByName(iClusterName); if (clusterId < 0) throw new IllegalArgumentException("Cluster '" + iClusterName + "' was not found"); return getStorage().count(clusterId); } /** * {@inheritDoc} */ public OMetadataDefault getMetadata() { checkOpenness(); return metadata; } /** * {@inheritDoc} */ public <DB extends ODatabaseDocument> DB checkSecurity(final ORule.ResourceGeneric resourceGeneric, final String resourceSpecific, final int iOperation) { if (user != null) { try { user.allow(resourceGeneric, resourceSpecific, iOperation); } catch (OSecurityAccessException e) { if (OLogManager.instance().isDebugEnabled()) OLogManager.instance() .debug(this, "User '%s' tried to access the reserved resource '%s.%s', operation '%s'", getUser(), resourceGeneric, resourceSpecific, iOperation); throw e; } } return (DB) this; } /** * {@inheritDoc} */ public <DB extends ODatabaseDocument> DB checkSecurity(final ORule.ResourceGeneric iResourceGeneric, final int iOperation, final Object... iResourcesSpecific) { if (user != null) { try { if (iResourcesSpecific.length != 0) { for (Object target : iResourcesSpecific) { if (target != null) { user.allow(iResourceGeneric, target.toString(), iOperation); } else user.allow(iResourceGeneric, null, iOperation); } } else user.allow(iResourceGeneric, null, iOperation); } catch (OSecurityAccessException e) { if (OLogManager.instance().isDebugEnabled()) OLogManager.instance() .debug(this, "[checkSecurity] User '%s' tried to access the reserved resource '%s', target(s) '%s', operation '%s'", getUser(), iResourceGeneric, Arrays.toString(iResourcesSpecific), iOperation); throw e; } } return (DB) this; } /** * {@inheritDoc} */ public <DB extends ODatabaseDocument> DB checkSecurity(final ORule.ResourceGeneric iResourceGeneric, final int iOperation, final Object iResourceSpecific) { checkOpenness(); if (user != null) { try { if (iResourceSpecific != null) user.allow(iResourceGeneric, iResourceSpecific.toString(), iOperation); else user.allow(iResourceGeneric, null, iOperation); } catch (OSecurityAccessException e) { if (OLogManager.instance().isDebugEnabled()) OLogManager.instance() .debug(this, "[checkSecurity] User '%s' tried to access the reserved resource '%s', target '%s', operation '%s'", getUser(), iResourceGeneric, iResourceSpecific, iOperation); throw e; } } return (DB) this; } /** * {@inheritDoc} */ @Override public ODatabaseInternal<?> getDatabaseOwner() { ODatabaseInternal<?> current = databaseOwner; while (current != null && current != this && current.getDatabaseOwner() != current) current = current.getDatabaseOwner(); return current; } /** * {@inheritDoc} */ @Override public ODatabaseInternal<ORecord> setDatabaseOwner(ODatabaseInternal<?> iOwner) { databaseOwner = iOwner; return this; } /** * {@inheritDoc} */ public boolean isRetainRecords() { return retainRecords; } /** * {@inheritDoc} */ public ODatabaseDocument setRetainRecords(boolean retainRecords) { this.retainRecords = retainRecords; return this; } /** * {@inheritDoc} */ public <DB extends ODatabase> DB setStatus(final STATUS status) { checkIfActive(); setStatusInternal(status); return (DB) this; } public void setStatusInternal(final STATUS status) { this.status = status; } /** * Deprecated since v2.2 */ @Deprecated public void setDefaultClusterIdInternal(final int iDefClusterId) { checkIfActive(); getStorage().setDefaultClusterId(iDefClusterId); } /** * {@inheritDoc} */ public void setInternal(final ATTRIBUTES iAttribute, final Object iValue) { set(iAttribute, iValue); } /** * {@inheritDoc} */ public OSecurityUser getUser() { return user; } /** * {@inheritDoc} */ public void setUser(final OSecurityUser user) { checkIfActive(); if (user instanceof OUser) { OMetadata metadata = getMetadata(); if (metadata != null) { final OSecurity security = metadata.getSecurity(); this.user = new OImmutableUser(security.getVersion(), (OUser) user); } else this.user = new OImmutableUser(-1, (OUser) user); } else this.user = (OImmutableUser) user; } public void reloadUser() { if (user != null) { activateOnCurrentThread(); if (user.checkIfAllowed(ORule.ResourceGeneric.CLASS, OUser.CLASS_NAME, ORole.PERMISSION_READ) != null) { OMetadata metadata = getMetadata(); if (metadata != null) { final OSecurity security = metadata.getSecurity(); OUser secGetUser = security.getUser(user.getName()); if (secGetUser != null) user = new OImmutableUser(security.getVersion(), secGetUser); else user = new OImmutableUser(-1, new OUser()); } else user = new OImmutableUser(-1, new OUser()); } } } /** * {@inheritDoc} */ public boolean isMVCC() { return true; } /** * {@inheritDoc} */ public <DB extends ODatabase<?>> DB setMVCC(boolean mvcc) { throw new UnsupportedOperationException(); } /** * {@inheritDoc} */ public ODictionary<ORecord> getDictionary() { checkOpenness(); return metadata.getIndexManager().getDictionary(); } /** * {@inheritDoc} */ public <DB extends ODatabase<?>> DB registerHook(final ORecordHook iHookImpl, final ORecordHook.HOOK_POSITION iPosition) { checkOpenness(); checkIfActive(); final Map<ORecordHook, ORecordHook.HOOK_POSITION> tmp = new LinkedHashMap<ORecordHook, ORecordHook.HOOK_POSITION>(hooks); tmp.put(iHookImpl, iPosition); hooks.clear(); for (ORecordHook.HOOK_POSITION p : ORecordHook.HOOK_POSITION.values()) { for (Map.Entry<ORecordHook, ORecordHook.HOOK_POSITION> e : tmp.entrySet()) { if (e.getValue() == p) hooks.put(e.getKey(), e.getValue()); } } compileHooks(); return (DB) this; } /** * {@inheritDoc} */ public <DB extends ODatabase<?>> DB registerHook(final ORecordHook iHookImpl) { return (DB) registerHook(iHookImpl, ORecordHook.HOOK_POSITION.REGULAR); } /** * {@inheritDoc} */ public <DB extends ODatabase<?>> DB unregisterHook(final ORecordHook iHookImpl) { checkIfActive(); if (iHookImpl != null) { iHookImpl.onUnregister(); hooks.remove(iHookImpl); compileHooks(); } return (DB) this; } /** * {@inheritDoc} */ @Override public OLocalRecordCache getLocalCache() { return localCache; } /** * {@inheritDoc} */ public Map<ORecordHook, ORecordHook.HOOK_POSITION> getHooks() { return unmodifiableHooks; } /** * Callback the registered hooks if any. * * @param type Hook type. Define when hook is called. * @param id Record received in the callback * * @return True if the input record is changed, otherwise false */ public ORecordHook.RESULT callbackHooks(final ORecordHook.TYPE type, final OIdentifiable id) { if (id == null || hooks.isEmpty() || id.getIdentity().getClusterId() == 0) return ORecordHook.RESULT.RECORD_NOT_CHANGED; final ORecordHook.SCOPE scope = ORecordHook.SCOPE.typeToScope(type); final int scopeOrdinal = scope.ordinal(); final ORID identity = id.getIdentity().copy(); if (!pushInHook(identity)) return ORecordHook.RESULT.RECORD_NOT_CHANGED; try { final ORecord rec = id.getRecord(); if (rec == null) return ORecordHook.RESULT.RECORD_NOT_CHANGED; final OScenarioThreadLocal.RUN_MODE runMode = OScenarioThreadLocal.INSTANCE.getRunMode(); boolean recordChanged = false; for (ORecordHook hook : hooksByScope[scopeOrdinal]) { switch (runMode) { case DEFAULT: // NON_DISTRIBUTED OR PROXIED DB if (getStorage().isDistributed() && hook.getDistributedExecutionMode() == ORecordHook.DISTRIBUTED_EXECUTION_MODE.TARGET_NODE) // SKIP continue; break; // TARGET NODE case RUNNING_DISTRIBUTED: if (hook.getDistributedExecutionMode() == ORecordHook.DISTRIBUTED_EXECUTION_MODE.SOURCE_NODE) continue; } final ORecordHook.RESULT res = hook.onTrigger(type, rec); if (res == ORecordHook.RESULT.RECORD_CHANGED) recordChanged = true; else if (res == ORecordHook.RESULT.SKIP_IO) // SKIP IO OPERATION return res; else if (res == ORecordHook.RESULT.SKIP) // SKIP NEXT HOOKS AND RETURN IT return res; else if (res == ORecordHook.RESULT.RECORD_REPLACED) return res; } return recordChanged ? ORecordHook.RESULT.RECORD_CHANGED : ORecordHook.RESULT.RECORD_NOT_CHANGED; } finally { popInHook(identity); } } /** * {@inheritDoc} */ public boolean isValidationEnabled() { return (Boolean) get(ATTRIBUTES.VALIDATION); } /** * {@inheritDoc} */ public <DB extends ODatabaseDocument> DB setValidationEnabled(final boolean iEnabled) { set(ATTRIBUTES.VALIDATION, iEnabled); return (DB) this; } public ORecordConflictStrategy getConflictStrategy() { checkIfActive(); return getStorage().getConflictStrategy(); } public ODatabaseDocumentAbstract setConflictStrategy(final String iStrategyName) { checkIfActive(); getStorage().setConflictStrategy(Orient.instance().getRecordConflictStrategy().getStrategy(iStrategyName)); return this; } public ODatabaseDocumentAbstract setConflictStrategy(final ORecordConflictStrategy iResolver) { checkIfActive(); getStorage().setConflictStrategy(iResolver); return this; } @Override public OContextConfiguration getConfiguration() { checkIfActive(); if (getStorage() != null) return getStorage().getConfiguration().getContextConfiguration(); return null; } @Override public boolean declareIntent(final OIntent iIntent) { checkIfActive(); if (currentIntent != null) { if (iIntent != null && iIntent.getClass().equals(currentIntent.getClass())) // SAME INTENT: JUMP IT return false; // END CURRENT INTENT currentIntent.end(this); } currentIntent = iIntent; if (iIntent != null) iIntent.begin(this); return true; } @Override public OIntent getActiveIntent() { return currentIntent; } @Override public void close() { checkIfActive(); try { closeActiveQueries(); localCache.shutdown(); if (isClosed()) { status = STATUS.CLOSED; return; } try { rollback(true); } catch (Exception e) { OLogManager.instance().error(this, "Exception during commit of active transaction", e); } if (status != STATUS.OPEN) return; callOnCloseListeners(); if (currentIntent != null) { currentIntent.end(this); currentIntent = null; } sharedContext = null; status = STATUS.CLOSED; localCache.clear(); if (getStorage() != null) getStorage().close(); } finally { // ALWAYS RESET TL ODatabaseRecordThreadLocal.instance().remove(); } } @Override public STATUS getStatus() { return status; } @Override public long getSize() { checkIfActive(); return getStorage().getSize(); } @Override public String getName() { return getStorage() != null ? getStorage().getName() : url; } @Override public String getURL() { return url != null ? url : getStorage().getURL(); } @Override public int getDefaultClusterId() { checkIfActive(); return getStorage().getDefaultClusterId(); } @Override public int getClusters() { checkIfActive(); return getStorage().getClusters(); } @Override public boolean existsCluster(final String iClusterName) { checkIfActive(); return getStorage().getClusterNames().contains(iClusterName.toLowerCase(Locale.ENGLISH)); } @Override public Collection<String> getClusterNames() { checkIfActive(); return getStorage().getClusterNames(); } @Override public int getClusterIdByName(final String iClusterName) { if (iClusterName == null) return -1; checkIfActive(); return getStorage().getClusterIdByName(iClusterName.toLowerCase(Locale.ENGLISH)); } @Override public String getClusterNameById(final int iClusterId) { if (iClusterId < 0) return null; checkIfActive(); return getStorage().getPhysicalClusterNameById(iClusterId); } @Override public long getClusterRecordSizeByName(final String clusterName) { checkIfActive(); try { return getStorage().getClusterById(getClusterIdByName(clusterName)).getRecordsSize(); } catch (Exception e) { throw OException.wrapException(new ODatabaseException("Error on reading records size for cluster '" + clusterName + "'"), e); } } @Override public long getClusterRecordSizeById(final int clusterId) { checkIfActive(); try { return getStorage().getClusterById(clusterId).getRecordsSize(); } catch (Exception e) { throw OException .wrapException(new ODatabaseException("Error on reading records size for cluster with id '" + clusterId + "'"), e); } } @Override public boolean isClosed() { return status == STATUS.CLOSED || getStorage().isClosed(); } @Override public int addCluster(final String iClusterName, final Object... iParameters) { checkIfActive(); return getStorage().addCluster(iClusterName, iParameters); } @Override public int addCluster(final String iClusterName, final int iRequestedId, final Object... iParameters) { checkIfActive(); return getStorage().addCluster(iClusterName, iRequestedId, iParameters); } @Override public boolean dropCluster(final String iClusterName, final boolean iTruncate) { checkIfActive(); final int clusterId = getClusterIdByName(iClusterName); OSchemaProxy schema = metadata.getSchema(); OClass clazz = schema.getClassByClusterId(clusterId); if (clazz != null) clazz.removeClusterId(clusterId); if (schema.getBlobClusters().contains(clusterId)) schema.removeBlobCluster(iClusterName); getLocalCache().freeCluster(clusterId); checkForClusterPermissions(iClusterName); return getStorage().dropCluster(iClusterName, iTruncate); } @Override public boolean dropCluster(final int iClusterId, final boolean iTruncate) { checkIfActive(); checkSecurity(ORule.ResourceGeneric.CLUSTER, ORole.PERMISSION_DELETE, getClusterNameById(iClusterId)); OSchemaProxy schema = metadata.getSchema(); final OClass clazz = schema.getClassByClusterId(iClusterId); if (clazz != null) clazz.removeClusterId(iClusterId); getLocalCache().freeCluster(iClusterId); if (schema.getBlobClusters().contains(iClusterId)) schema.removeBlobCluster(getClusterNameById(iClusterId)); checkForClusterPermissions(getClusterNameById(iClusterId)); return getStorage().dropCluster(iClusterId, iTruncate); } public void checkForClusterPermissions(final String iClusterName) { // CHECK FOR ORESTRICTED final Set<OClass> classes = getMetadata().getImmutableSchemaSnapshot().getClassesRelyOnCluster(iClusterName); for (OClass c : classes) { if (c.isSubClassOf(OSecurityShared.RESTRICTED_CLASSNAME)) throw new OSecurityException( "Class '" + c.getName() + "' cannot be truncated because has record level security enabled (extends '" + OSecurityShared.RESTRICTED_CLASSNAME + "')"); } } @Override public Object setProperty(final String iName, final Object iValue) { if (iValue == null) return properties.remove(iName.toLowerCase(Locale.ENGLISH)); else return properties.put(iName.toLowerCase(Locale.ENGLISH), iValue); } @Override public Object getProperty(final String iName) { return properties.get(iName.toLowerCase(Locale.ENGLISH)); } @Override public Iterator<Map.Entry<String, Object>> getProperties() { return properties.entrySet().iterator(); } @Override public Object get(final ATTRIBUTES iAttribute) { checkIfActive(); if (iAttribute == null) throw new IllegalArgumentException("attribute is null"); final OStorage storage = getStorage(); switch (iAttribute) { case STATUS: return getStatus(); case DEFAULTCLUSTERID: return getDefaultClusterId(); case TYPE: return getMetadata().getImmutableSchemaSnapshot().existsClass("V") ? "graph" : "document"; case DATEFORMAT: return storage.getConfiguration().getDateFormat(); case DATETIMEFORMAT: return storage.getConfiguration().getDateTimeFormat(); case TIMEZONE: return storage.getConfiguration().getTimeZone().getID(); case LOCALECOUNTRY: return storage.getConfiguration().getLocaleCountry(); case LOCALELANGUAGE: return storage.getConfiguration().getLocaleLanguage(); case CHARSET: return storage.getConfiguration().getCharset(); case CUSTOM: return storage.getConfiguration().getProperties(); case CLUSTERSELECTION: return storage.getConfiguration().getClusterSelection(); case MINIMUMCLUSTERS: return storage.getConfiguration().getMinimumClusters(); case CONFLICTSTRATEGY: return storage.getConfiguration().getConflictStrategy(); case VALIDATION: return storage.getConfiguration().isValidationEnabled(); } return null; } @Override public ORecordMetadata getRecordMetadata(final ORID rid) { checkIfActive(); return getStorage().getRecordMetadata(rid); } public OTransaction getTransaction() { checkIfActive(); return currentTx; } @Override public OBasicTransaction getMicroOrRegularTransaction() { return microTransaction != null && microTransaction.isActive() ? microTransaction : getTransaction(); } @SuppressWarnings("unchecked") @Override public <RET extends ORecord> RET load(final ORecord iRecord, final String iFetchPlan) { checkIfActive(); return (RET) currentTx.loadRecord(iRecord.getIdentity(), iRecord, iFetchPlan, false, false, OStorage.LOCKING_STRATEGY.DEFAULT); } @SuppressWarnings("unchecked") @Override @Deprecated public <RET extends ORecord> RET load(ORecord iRecord, String iFetchPlan, boolean iIgnoreCache, boolean loadTombstone, OStorage.LOCKING_STRATEGY iLockingStrategy) { checkIfActive(); return (RET) currentTx .loadRecord(iRecord.getIdentity(), iRecord, iFetchPlan, iIgnoreCache, !iIgnoreCache, loadTombstone, iLockingStrategy); } @SuppressWarnings("unchecked") @Override @Deprecated public <RET extends ORecord> RET load(final ORecord iRecord, final String iFetchPlan, final boolean iIgnoreCache, final boolean iUpdateCache, final boolean loadTombstone, final OStorage.LOCKING_STRATEGY iLockingStrategy) { checkIfActive(); return (RET) currentTx .loadRecord(iRecord.getIdentity(), iRecord, iFetchPlan, iIgnoreCache, iUpdateCache, loadTombstone, iLockingStrategy); } @SuppressWarnings("unchecked") @Override public <RET extends ORecord> RET load(final ORecord iRecord) { checkIfActive(); return (RET) currentTx.loadRecord(iRecord.getIdentity(), iRecord, null, false); } @SuppressWarnings("unchecked") @Override public <RET extends ORecord> RET load(final ORID recordId) { return (RET) currentTx.loadRecord(recordId, null, null, false); } @SuppressWarnings("unchecked") @Override public <RET extends ORecord> RET load(final ORID iRecordId, final String iFetchPlan) { checkIfActive(); return (RET) currentTx.loadRecord(iRecordId, null, iFetchPlan, false); } @SuppressWarnings("unchecked") public <RET extends ORecord> RET loadIfVersionIsNotLatest(final ORID rid, final int recordVersion, String fetchPlan, boolean ignoreCache) throws ORecordNotFoundException { checkIfActive(); return (RET) currentTx.loadRecordIfVersionIsNotLatest(rid, recordVersion, fetchPlan, ignoreCache); } @SuppressWarnings("unchecked") @Override @Deprecated public <RET extends ORecord> RET load(final ORID iRecordId, String iFetchPlan, final boolean iIgnoreCache, final boolean loadTombstone, OStorage.LOCKING_STRATEGY iLockingStrategy) { checkIfActive(); return (RET) currentTx.loadRecord(iRecordId, null, iFetchPlan, iIgnoreCache, loadTombstone, iLockingStrategy); } @SuppressWarnings("unchecked") @Override @Deprecated public <RET extends ORecord> RET load(final ORID iRecordId, String iFetchPlan, final boolean iIgnoreCache, final boolean iUpdateCache, final boolean loadTombstone, OStorage.LOCKING_STRATEGY iLockingStrategy) { checkIfActive(); return (RET) currentTx.loadRecord(iRecordId, null, iFetchPlan, iIgnoreCache, iUpdateCache, loadTombstone, iLockingStrategy); } @SuppressWarnings("unchecked") public <RET extends ORecord> RET reload(final ORecord iRecord) { return reload(iRecord, null, false); } @SuppressWarnings("unchecked") public <RET extends ORecord> RET reload(final ORecord iRecord, final String iFetchPlan) { return reload(iRecord, iFetchPlan, false); } @SuppressWarnings("unchecked") @Override public <RET extends ORecord> RET reload(final ORecord iRecord, final String iFetchPlan, final boolean iIgnoreCache) { return reload(iRecord, iFetchPlan, iIgnoreCache, true); } @Override public <RET extends ORecord> RET reload(ORecord record, String fetchPlan, boolean ignoreCache, boolean force) { checkIfActive(); final ORecord loadedRecord = currentTx.reloadRecord(record.getIdentity(), record, fetchPlan, ignoreCache, force); if (loadedRecord != null && record != loadedRecord) { record.fromStream(loadedRecord.toStream()); ORecordInternal.setVersion(record, loadedRecord.getVersion()); } else if (loadedRecord == null) { throw new ORecordNotFoundException(record.getIdentity()); } return (RET) record; } /** * Deletes the record without checking the version. */ public ODatabaseDocument delete(final ORID iRecord) { checkOpenness(); checkIfActive(); final ORecord rec = load(iRecord); if (rec != null) delete(rec); return this; } @Override public boolean hide(ORID rid) { checkOpenness(); checkIfActive(); if (currentTx.isActive()) throw new ODatabaseException("This operation can be executed only in non transaction mode"); return executeHideRecord(rid, OPERATION_MODE.SYNCHRONOUS); } @Override public OBinarySerializerFactory getSerializerFactory() { return componentsFactory.binarySerializerFactory; } @Deprecated public ODatabaseDocument begin(final OTransaction iTx) { begin(); return this; } public void rawBegin(final OTransaction iTx) { checkOpenness(); checkIfActive(); if (currentTx.isActive() && iTx.equals(currentTx)) { currentTx.begin(); } currentTx.rollback(true, 0); // WAKE UP LISTENERS for (ODatabaseListener listener : browseListeners()) try { listener.onBeforeTxBegin(this); } catch (Exception e) { final String message = "Error before the transaction begin"; OLogManager.instance().error(this, message, e); throw OException.wrapException(new OTransactionBlockedException(message), e); } currentTx = iTx; currentTx.begin(); } /** * {@inheritDoc} */ public <RET extends ORecord> RET load(final ORecord iRecord, final String iFetchPlan, final boolean iIgnoreCache) { return (RET) executeReadRecord((ORecordId) iRecord.getIdentity(), iRecord, -1, iFetchPlan, iIgnoreCache, !iIgnoreCache, false, OStorage.LOCKING_STRATEGY.NONE, new SimpleRecordReader(prefetchRecords)); } @Override public void setPrefetchRecords(boolean prefetchRecords) { this.prefetchRecords = prefetchRecords; } @Override public boolean isPrefetchRecords() { return prefetchRecords; } /** * This method is internal, it can be subject to signature change or be removed, do not use. * * @Internal */ public <RET extends ORecord> RET executeReadRecord(final ORecordId rid, ORecord iRecord, final int recordVersion, final String fetchPlan, final boolean ignoreCache, final boolean iUpdateCache, final boolean loadTombstones, final OStorage.LOCKING_STRATEGY lockingStrategy, RecordReader recordReader) { checkOpenness(); checkIfActive(); getMetadata().makeThreadLocalSchemaSnapshot(); ORecordSerializationContext.pushContext(); try { checkSecurity(ORule.ResourceGeneric.CLUSTER, ORole.PERMISSION_READ, getClusterNameById(rid.getClusterId())); // either regular or micro tx must be active or both inactive assert !(getTransaction().isActive() && (microTransaction != null && microTransaction.isActive())); // SEARCH IN LOCAL TX ORecord record = getTransaction().getRecord(rid); if (record == OBasicTransaction.DELETED_RECORD) // DELETED IN TX return null; if (record == null) { if (microTransaction != null && microTransaction.isActive()) { record = microTransaction.getRecord(rid); if (record == OBasicTransaction.DELETED_RECORD) return null; } } if (record == null && !ignoreCache) // SEARCH INTO THE CACHE record = getLocalCache().findRecord(rid); if (record != null) { if (iRecord != null) { iRecord.fromStream(record.toStream()); ORecordInternal.setVersion(iRecord, record.getVersion()); record = iRecord; } OFetchHelper.checkFetchPlanValid(fetchPlan); if (callbackHooks(ORecordHook.TYPE.BEFORE_READ, record) == ORecordHook.RESULT.SKIP) return null; if (record.getInternalStatus() == ORecordElement.STATUS.NOT_LOADED) record.reload(); if (lockingStrategy == OStorage.LOCKING_STRATEGY.KEEP_SHARED_LOCK) { OLogManager.instance() .warn(this, "You use deprecated record locking strategy: %s it may lead to deadlocks " + lockingStrategy); record.lock(false); } else if (lockingStrategy == OStorage.LOCKING_STRATEGY.KEEP_EXCLUSIVE_LOCK) { OLogManager.instance() .warn(this, "You use deprecated record locking strategy: %s it may lead to deadlocks " + lockingStrategy); record.lock(true); } callbackHooks(ORecordHook.TYPE.AFTER_READ, record); if (record instanceof ODocument) ODocumentInternal.checkClass((ODocument) record, this); return (RET) record; } final ORawBuffer recordBuffer; if (!rid.isValid()) recordBuffer = null; else { OFetchHelper.checkFetchPlanValid(fetchPlan); int version; if (iRecord != null) version = iRecord.getVersion(); else version = recordVersion; recordBuffer = recordReader.readRecord(getStorage(), rid, fetchPlan, ignoreCache, version); } if (recordBuffer == null) return null; if (iRecord == null || ORecordInternal.getRecordType(iRecord) != recordBuffer.recordType) // NO SAME RECORD TYPE: CAN'T REUSE OLD ONE BUT CREATE A NEW ONE FOR IT iRecord = Orient.instance().getRecordFactoryManager().newInstance(recordBuffer.recordType, rid.getClusterId(), this); ORecordInternal.fill(iRecord, rid, recordBuffer.version, recordBuffer.buffer, false, this); if (iRecord instanceof ODocument) ODocumentInternal.checkClass((ODocument) iRecord, this); if (ORecordVersionHelper.isTombstone(iRecord.getVersion())) return (RET) iRecord; if (callbackHooks(ORecordHook.TYPE.BEFORE_READ, iRecord) == ORecordHook.RESULT.SKIP) return null; iRecord.fromStream(recordBuffer.buffer); callbackHooks(ORecordHook.TYPE.AFTER_READ, iRecord); if (iUpdateCache) getLocalCache().updateRecord(iRecord); return (RET) iRecord; } catch (OOfflineClusterException t) { throw t; } catch (ORecordNotFoundException t) { throw t; } catch (Exception t) { if (rid.isTemporary()) throw OException.wrapException(new ODatabaseException("Error on retrieving record using temporary RID: " + rid), t); else throw OException.wrapException(new ODatabaseException( "Error on retrieving record " + rid + " (cluster: " + getStorage().getPhysicalClusterNameById(rid.getClusterId()) + ")"), t); } finally { ORecordSerializationContext.pullContext(); getMetadata().clearThreadLocalSchemaSnapshot(); } } public int assignAndCheckCluster(ORecord record, String iClusterName) { ORecordId rid = (ORecordId) record.getIdentity(); // if provided a cluster name use it. if (rid.getClusterId() <= ORID.CLUSTER_POS_INVALID && iClusterName != null) { rid.setClusterId(getClusterIdByName(iClusterName)); if (rid.getClusterId() == -1) throw new IllegalArgumentException("Cluster name '" + iClusterName + "' is not configured"); } OClass schemaClass = null; // if cluster id is not set yet try to find it out if (rid.getClusterId() <= ORID.CLUSTER_ID_INVALID && getStorage().isAssigningClusterIds()) { if (record instanceof ODocument) { schemaClass = ODocumentInternal.getImmutableSchemaClass(((ODocument) record)); if (schemaClass != null) { if (schemaClass.isAbstract()) throw new OSchemaException("Document belongs to abstract class " + schemaClass.getName() + " and cannot be saved"); rid.setClusterId(schemaClass.getClusterForNewInstance((ODocument) record)); } else throw new ODatabaseException("Cannot save (1) document " + record + ": no class or cluster defined"); } else { if (record instanceof ORecordBytes) { Set<Integer> blobs = getBlobClusterIds(); if (blobs.size() == 0) { rid.setClusterId(getDefaultClusterId()); } else { rid.setClusterId(blobs.iterator().next()); } } else { throw new ODatabaseException("Cannot save (3) document " + record + ": no class or cluster defined"); } } } else if (record instanceof ODocument) schemaClass = ODocumentInternal.getImmutableSchemaClass(((ODocument) record)); // If the cluster id was set check is validity if (rid.getClusterId() > ORID.CLUSTER_ID_INVALID) { if (schemaClass != null) { String messageClusterName = getClusterNameById(rid.getClusterId()); checkRecordClass(schemaClass, messageClusterName, rid); if (!schemaClass.hasClusterId(rid.getClusterId())) { throw new IllegalArgumentException( "Cluster name '" + messageClusterName + "' (id=" + rid.getClusterId() + ") is not configured to store the class '" + schemaClass.getName() + "', valid are " + Arrays.toString(schemaClass.getClusterIds())); } } } return rid.getClusterId(); } public <RET extends ORecord> RET executeSaveEmptyRecord(ORecord record, String clusterName) { ORecordId rid = (ORecordId) record.getIdentity(); assert rid.isNew(); ORecordInternal.onBeforeIdentityChanged(record); int id = assignAndCheckCluster(record, clusterName); clusterName = getClusterNameById(id); checkSecurity(ORule.ResourceGeneric.CLUSTER, ORole.PERMISSION_CREATE, clusterName); byte[] content = getSerializer().writeClassOnly(record); final OStorageOperationResult<OPhysicalPosition> ppos = getStorage() .createRecord(rid, content, record.getVersion(), recordType, OPERATION_MODE.SYNCHRONOUS.ordinal(), null); ORecordInternal.setVersion(record, ppos.getResult().recordVersion); ((ORecordId) record.getIdentity()).copyFrom(rid); ORecordInternal.onAfterIdentityChanged(record); return (RET) record; } public abstract <RET extends ORecord> RET executeSaveRecord(final ORecord record, String clusterName, final int ver, final OPERATION_MODE mode, boolean forceCreate, final ORecordCallback<? extends Number> recordCreatedCallback, ORecordCallback<Integer> recordUpdatedCallback); public abstract void executeDeleteRecord(OIdentifiable record, final int iVersion, final boolean iRequired, final OPERATION_MODE iMode, boolean prohibitTombstones); /** * This method is internal, it can be subject to signature change or be removed, do not use. * * @Internal */ public boolean executeHideRecord(OIdentifiable record, final OPERATION_MODE iMode) { checkOpenness(); checkIfActive(); final ORecordId rid = (ORecordId) record.getIdentity(); if (rid == null) throw new ODatabaseException( "Cannot hide record because it has no identity. Probably was created from scratch or contains projections of fields rather than a full record"); if (!rid.isValid()) return false; checkSecurity(ORule.ResourceGeneric.CLUSTER, ORole.PERMISSION_DELETE, getClusterNameById(rid.getClusterId())); getMetadata().makeThreadLocalSchemaSnapshot(); if (record instanceof ODocument) ODocumentInternal.checkClass((ODocument) record, this); ORecordSerializationContext.pushContext(); try { final OStorageOperationResult<Boolean> operationResult; operationResult = getStorage().hideRecord(rid, iMode.ordinal(), null); // REMOVE THE RECORD FROM 1 AND 2 LEVEL CACHES if (!operationResult.isMoved()) getLocalCache().deleteRecord(rid); return operationResult.getResult(); } finally { ORecordSerializationContext.pullContext(); getMetadata().clearThreadLocalSchemaSnapshot(); } } public ODatabaseDocumentAbstract begin() { return begin(OTransaction.TXTYPE.OPTIMISTIC); } public ODatabaseDocumentAbstract begin(final OTransaction.TXTYPE iType) { checkOpenness(); checkIfActive(); if (currentTx.isActive()) { if (iType == OTransaction.TXTYPE.OPTIMISTIC && currentTx instanceof OTransactionOptimistic) { currentTx.begin(); return this; } currentTx.rollback(true, 0); } // CHECK IT'S NOT INSIDE A HOOK if (!inHook.isEmpty()) throw new IllegalStateException("Cannot begin a transaction while a hook is executing"); // WAKE UP LISTENERS for (ODatabaseListener listener : browseListeners()) try { listener.onBeforeTxBegin(this); } catch (Exception e) { OLogManager.instance().error(this, "Error before tx begin", e); } switch (iType) { case NOTX: setDefaultTransactionMode(); break; case OPTIMISTIC: currentTx = new OTransactionOptimistic(this); break; case PESSIMISTIC: throw new UnsupportedOperationException("Pessimistic transaction"); } currentTx.begin(); return this; } public void setDefaultTransactionMode() { if (!(currentTx instanceof OTransactionNoTx)) currentTx = new OTransactionNoTx(this); } /** * {@inheritDoc} */ @Override public void freeze(final boolean throwException) { checkOpenness(); if (!(getStorage() instanceof OFreezableStorageComponent)) { OLogManager.instance().error(this, "Only local paginated storage supports freeze. If you are using remote client please use OServerAdmin instead", null); return; } final long startTime = Orient.instance().getProfiler().startChrono(); final OFreezableStorageComponent storage = getFreezableStorage(); if (storage != null) { storage.freeze(throwException); } Orient.instance().getProfiler() .stopChrono("db." + getName() + ".freeze", "Time to freeze the database", startTime, "db.*.freeze"); } /** * {@inheritDoc} */ @Override public void freeze() { checkOpenness(); if (!(getStorage() instanceof OFreezableStorageComponent)) { OLogManager.instance().error(this, "Only local paginated storage supports freeze. " + "If you use remote client please use OServerAdmin instead", null); return; } final long startTime = Orient.instance().getProfiler().startChrono(); final OFreezableStorageComponent storage = getFreezableStorage(); if (storage != null) { storage.freeze(false); } Orient.instance().getProfiler() .stopChrono("db." + getName() + ".freeze", "Time to freeze the database", startTime, "db.*.freeze"); } @Override public boolean isFrozen() { if (!(getStorage() instanceof OFreezableStorageComponent)) return false; final OFreezableStorageComponent storage = getFreezableStorage(); if (storage != null) return storage.isFrozen(); return false; } /** * {@inheritDoc} */ @Override public void release() { checkOpenness(); if (!(getStorage() instanceof OFreezableStorageComponent)) { OLogManager.instance().error(this, "Only local paginated storage supports release. If you are using remote client please use OServerAdmin instead", null); return; } final long startTime = Orient.instance().getProfiler().startChrono(); final OFreezableStorageComponent storage = getFreezableStorage(); if (storage != null) { storage.release(); } Orient.instance().getProfiler() .stopChrono("db." + getName() + ".release", "Time to release the database", startTime, "db.*.release"); } /** * Creates a new ODocument. */ public ODocument newInstance() { return new ODocument(); } @Override public OBlob newBlob(byte[] bytes) { return new ORecordBytes(bytes); } @Override public OBlob newBlob() { return new ORecordBytes(); } /** * Creates a document with specific class. * * @param iClassName the name of class that should be used as a class of created document. * * @return new instance of document. */ @Override public ODocument newInstance(final String iClassName) { return new ODocument(iClassName); } @Override public OElement newElement() { return newInstance(); } @Override public OElement newElement(String className) { return newInstance(className); } public OElement newElement(OClass clazz) { return newInstance(clazz.getName()); } public OVertex newVertex(final String iClassName) { OClass cl = getClass(iClassName); if (cl == null || !cl.isVertexType()) { throw new IllegalArgumentException("" + iClassName + " is not a vertex class"); } OVertex doc = new OVertexDocument(cl); return doc; } @Override public OVertex newVertex(OClass type) { if (type == null) { return newVertex("E"); } return newVertex(type.getName()); } @Override public OEdge newEdge(OVertex from, OVertex to, String type) { OClass cl = getClass(type); if (cl == null || !cl.isEdgeType()) { throw new IllegalArgumentException("" + type + " is not an edge class"); } ODocument doc = new OEdgeDocument(cl); return addEdgeInternal(from, to, type); } @Override public OEdge newEdge(OVertex from, OVertex to, OClass type) { if (type == null) { return newEdge(from, to, "E"); } return newEdge(from, to, type.getName()); } private OEdge addEdgeInternal(final OVertex currentVertex, final OVertex inVertex, String iClassName, final Object... fields) { if (currentVertex == null) throw new IllegalArgumentException("To vertex is null"); if (inVertex == null) throw new IllegalArgumentException("To vertex is null"); OEdge edge = null; ODocument outDocument = null; ODocument inDocument = null; boolean outDocumentModified = false; if (checkDeletedInTx(currentVertex)) throw new ORecordNotFoundException(currentVertex.getIdentity(), "The vertex " + currentVertex.getIdentity() + " has been deleted"); if (checkDeletedInTx(inVertex)) throw new ORecordNotFoundException(inVertex.getIdentity(), "The vertex " + inVertex.getIdentity() + " has been deleted"); final int maxRetries = 1;//TODO for (int retry = 0; retry < maxRetries; ++retry) { try { // TEMPORARY STATIC LOCK TO AVOID MT PROBLEMS AGAINST OMVRBTreeRID if (outDocument == null) { outDocument = currentVertex.getRecord(); if (outDocument == null) throw new IllegalArgumentException("source vertex is invalid (rid=" + currentVertex.getIdentity() + ")"); } if (inDocument == null) { inDocument = inVertex.getRecord(); if (inDocument == null) throw new IllegalArgumentException("source vertex is invalid (rid=" + inVertex.getIdentity() + ")"); } if (!ODocumentInternal.getImmutableSchemaClass(outDocument).isVertexType()) throw new IllegalArgumentException("source record is not a vertex"); if (!ODocumentInternal.getImmutableSchemaClass(outDocument).isVertexType()) throw new IllegalArgumentException("destination record is not a vertex"); OVertex to = inVertex; OVertex from = currentVertex; OSchema schema = getMetadata().getSchema(); final OClass edgeType = schema.getClass(iClassName); if (edgeType == null) // AUTO CREATE CLASS schema.createClass(iClassName); else // OVERWRITE CLASS NAME BECAUSE ATTRIBUTES ARE CASE SENSITIVE iClassName = edgeType.getName(); final String outFieldName = getConnectionFieldName(ODirection.OUT, iClassName); final String inFieldName = getConnectionFieldName(ODirection.IN, iClassName); // since the label for the edge can potentially get re-assigned // before being pushed into the OrientEdge, the // null check has to go here. if (iClassName == null) throw new IllegalArgumentException("Class " + iClassName + " cannot be found"); // CREATE THE EDGE DOCUMENT TO STORE FIELDS TOO if (isUseLightweightEdges() && (fields == null || fields.length == 0)) { edge = newLightweightEdge(iClassName, from, to); OVertexDelegate.createLink(from.getRecord(), to.getRecord(), outFieldName); OVertexDelegate.createLink(to.getRecord(), from.getRecord(), inFieldName); } else { edge = newInstance(iClassName).asEdge().get(); edge.setProperty("out", currentVertex.getRecord()); edge.setProperty("in", inDocument.getRecord()); if (fields != null) { for (int i = 0; i < fields.length; i += 2) { String fieldName = "" + fields[i]; if (fields.length <= i + 1) { break; } Object fieldValue = fields[i + 1]; edge.setProperty(fieldName, fieldValue); } } if (!outDocumentModified) { // OUT-VERTEX ---> IN-VERTEX/EDGE OVertexDelegate.createLink(outDocument, edge.getRecord(), outFieldName); } // IN-VERTEX ---> OUT-VERTEX/EDGE OVertexDelegate.createLink(inDocument, edge.getRecord(), inFieldName); } // OK break; } catch (ONeedRetryException ignore) { // RETRY if (!outDocumentModified) outDocument.reload(); else if (inDocument != null) inDocument.reload(); } catch (RuntimeException e) { // REVERT CHANGES. EDGE.REMOVE() TAKES CARE TO UPDATE ALSO BOTH VERTICES IN CASE try { edge.delete(); } catch (Exception ex) { OLogManager.instance().error(this, "Error during edge deletion", ex); } throw e; } catch (Exception e) { // REVERT CHANGES. EDGE.REMOVE() TAKES CARE TO UPDATE ALSO BOTH VERTICES IN CASE try { edge.delete(); } catch (Exception ex) { OLogManager.instance().error(this, "Error during edge deletion", ex); } throw new IllegalStateException("Error on addEdge in non tx environment", e); } } return edge; } private boolean checkDeletedInTx(OVertex currentVertex) { ORID id; if (currentVertex.getRecord() != null) id = currentVertex.getRecord().getIdentity(); else return false; final ORecordOperation oper = getTransaction().getRecordEntry(id); if (oper == null) return id.isTemporary(); else return oper.type == ORecordOperation.DELETED; } private static String getConnectionFieldName(final ODirection iDirection, final String iClassName) { if (iDirection == null || iDirection == ODirection.BOTH) throw new IllegalArgumentException("Direction not valid"); // PREFIX "out_" or "in_" TO THE FIELD NAME final String prefix = iDirection == ODirection.OUT ? "out_" : "in_"; if (iClassName == null || iClassName.isEmpty() || iClassName.equals("E")) return prefix; return prefix + iClassName; } /** * {@inheritDoc} */ public ORecordIteratorClass<ODocument> browseClass(final String iClassName) { return browseClass(iClassName, true); } /** * {@inheritDoc} */ public ORecordIteratorClass<ODocument> browseClass(final String iClassName, final boolean iPolymorphic) { if (getMetadata().getImmutableSchemaSnapshot().getClass(iClassName) == null) throw new IllegalArgumentException("Class '" + iClassName + "' not found in current database"); checkSecurity(ORule.ResourceGeneric.CLASS, ORole.PERMISSION_READ, iClassName); return new ORecordIteratorClass<ODocument>(this, this, iClassName, iPolymorphic, false); } /** * {@inheritDoc} */ @Override public ORecordIteratorCluster<ODocument> browseCluster(final String iClusterName) { checkSecurity(ORule.ResourceGeneric.CLUSTER, ORole.PERMISSION_READ, iClusterName); return new ORecordIteratorCluster<ODocument>(this, this, getClusterIdByName(iClusterName)); } /** * {@inheritDoc} */ @Override public Iterable<ODatabaseListener> getListeners() { return getListenersCopy(); } /** * {@inheritDoc} */ @Override @Deprecated public ORecordIteratorCluster<ODocument> browseCluster(String iClusterName, long startClusterPosition, long endClusterPosition, boolean loadTombstones) { checkSecurity(ORule.ResourceGeneric.CLUSTER, ORole.PERMISSION_READ, iClusterName); return new ORecordIteratorCluster<ODocument>(this, this, getClusterIdByName(iClusterName), startClusterPosition, endClusterPosition, loadTombstones, OStorage.LOCKING_STRATEGY.DEFAULT); } /** * Saves a document to the database. Behavior depends by the current running transaction if any. If no transaction is running then * changes apply immediately. If an Optimistic transaction is running then the record will be changed at commit time. The current * transaction will continue to see the record as modified, while others not. If a Pessimistic transaction is running, then an * exclusive lock is acquired against the record. Current transaction will continue to see the record as modified, while others * cannot access to it since it's locked. * <p> * If MVCC is enabled and the version of the document is different by the version stored in the database, then a {@link * OConcurrentModificationException} exception is thrown.Before to save the document it must be valid following the constraints * declared in the schema if any (can work also in schema-less mode). To validate the document the {@link ODocument#validate()} is * called. * * @param iRecord Record to save. * * @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain. * * @throws OConcurrentModificationException if the version of the document is different by the version contained in the database. * @throws OValidationException if the document breaks some validation constraints defined in the schema * @see #setMVCC(boolean), {@link #isMVCC()} */ @Override public <RET extends ORecord> RET save(final ORecord iRecord) { return (RET) save(iRecord, null, OPERATION_MODE.SYNCHRONOUS, false, null, null); } /** * Saves a document to the database. Behavior depends by the current running transaction if any. If no transaction is running then * changes apply immediately. If an Optimistic transaction is running then the record will be changed at commit time. The current * transaction will continue to see the record as modified, while others not. If a Pessimistic transaction is running, then an * exclusive lock is acquired against the record. Current transaction will continue to see the record as modified, while others * cannot access to it since it's locked. * <p> * If MVCC is enabled and the version of the document is different by the version stored in the database, then a {@link * OConcurrentModificationException} exception is thrown.Before to save the document it must be valid following the constraints * declared in the schema if any (can work also in schema-less mode). To validate the document the {@link ODocument#validate()} is * called. * * @param iRecord Record to save. * @param iForceCreate Flag that indicates that record should be created. If record with current rid already exists, * exception is thrown * @param iRecordCreatedCallback callback that is called after creation of new record * @param iRecordUpdatedCallback callback that is called after record update * * @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain. * * @throws OConcurrentModificationException if the version of the document is different by the version contained in the database. * @throws OValidationException if the document breaks some validation constraints defined in the schema * @see #setMVCC(boolean), {@link #isMVCC()} */ @Override public <RET extends ORecord> RET save(final ORecord iRecord, final OPERATION_MODE iMode, boolean iForceCreate, final ORecordCallback<? extends Number> iRecordCreatedCallback, ORecordCallback<Integer> iRecordUpdatedCallback) { return save(iRecord, null, iMode, iForceCreate, iRecordCreatedCallback, iRecordUpdatedCallback); } /** * Saves a document specifying a cluster where to store the record. Behavior depends by the current running transaction if any. If * no transaction is running then changes apply immediately. If an Optimistic transaction is running then the record will be * changed at commit time. The current transaction will continue to see the record as modified, while others not. If a Pessimistic * transaction is running, then an exclusive lock is acquired against the record. Current transaction will continue to see the * record as modified, while others cannot access to it since it's locked. * <p> * If MVCC is enabled and the version of the document is different by the version stored in the database, then a {@link * OConcurrentModificationException} exception is thrown. Before to save the document it must be valid following the constraints * declared in the schema if any (can work also in schema-less mode). To validate the document the {@link ODocument#validate()} is * called. * * @param iRecord Record to save * @param iClusterName Cluster name where to save the record * * @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain. * * @throws OConcurrentModificationException if the version of the document is different by the version contained in the database. * @throws OValidationException if the document breaks some validation constraints defined in the schema * @see #setMVCC(boolean), {@link #isMVCC()}, ODocument#validate() */ @Override public <RET extends ORecord> RET save(final ORecord iRecord, final String iClusterName) { return (RET) save(iRecord, iClusterName, OPERATION_MODE.SYNCHRONOUS, false, null, null); } /** * Saves a document specifying a cluster where to store the record. Behavior depends by the current running transaction if any. If * no transaction is running then changes apply immediately. If an Optimistic transaction is running then the record will be * changed at commit time. The current transaction will continue to see the record as modified, while others not. If a Pessimistic * transaction is running, then an exclusive lock is acquired against the record. Current transaction will continue to see the * record as modified, while others cannot access to it since it's locked. * <p> * If MVCC is enabled and the version of the document is different by the version stored in the database, then a {@link * OConcurrentModificationException} exception is thrown. Before to save the document it must be valid following the constraints * declared in the schema if any (can work also in schema-less mode). To validate the document the {@link ODocument#validate()} is * called. * * @param iRecord Record to save * @param iClusterName Cluster name where to save the record * @param iMode Mode of save: synchronous (default) or asynchronous * @param iForceCreate Flag that indicates that record should be created. If record with current rid already exists, * exception is thrown * @param iRecordCreatedCallback callback that is called after creation of new record * @param iRecordUpdatedCallback callback that is called after record update * * @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain. * * @throws OConcurrentModificationException if the version of the document is different by the version contained in the database. * @throws OValidationException if the document breaks some validation constraints defined in the schema * @see #setMVCC(boolean), {@link #isMVCC()}, ODocument#validate() */ @Override public <RET extends ORecord> RET save(ORecord iRecord, String iClusterName, final OPERATION_MODE iMode, boolean iForceCreate, final ORecordCallback<? extends Number> iRecordCreatedCallback, ORecordCallback<Integer> iRecordUpdatedCallback) { checkOpenness(); ODirtyManager dirtyManager = ORecordInternal.getDirtyManager(iRecord); if (iRecord instanceof OElement && dirtyManager != null && dirtyManager.getReferences() != null && !dirtyManager.getReferences() .isEmpty()) { if ((((OElement) iRecord).isVertex() || ((OElement) iRecord).isEdge()) && !getTransaction().isActive() && inHook.isEmpty()) { return saveGraph(iRecord, iClusterName, iMode, iForceCreate, iRecordCreatedCallback, iRecordUpdatedCallback); } } return saveInternal(iRecord, iClusterName, iMode, iForceCreate, iRecordCreatedCallback, iRecordUpdatedCallback); } private <RET extends ORecord> RET saveInternal(ORecord iRecord, String iClusterName, OPERATION_MODE iMode, boolean iForceCreate, ORecordCallback<? extends Number> iRecordCreatedCallback, ORecordCallback<Integer> iRecordUpdatedCallback) { if (iRecord instanceof OVertex) { iRecord = iRecord.getRecord(); } if (iRecord instanceof OEdge) { iRecord = iRecord.getRecord(); } if (!(iRecord instanceof ODocument)) { assignAndCheckCluster(iRecord, iClusterName); return (RET) currentTx.saveRecord(iRecord, iClusterName, iMode, iForceCreate, iRecordCreatedCallback, iRecordUpdatedCallback); } ODocument doc = (ODocument) iRecord; ODocumentInternal.checkClass(doc, this); // IN TX THE VALIDATION MAY BE RUN TWICE BUT IS CORRECT BECAUSE OF DIFFERENT RECORD STATUS try { doc.validate(); } catch (OValidationException e) { doc.undo(); throw e; } ODocumentInternal.convertAllMultiValuesToTrackedVersions(doc); if (iForceCreate || !doc.getIdentity().isValid()) { if (doc.getClassName() != null) checkSecurity(ORule.ResourceGeneric.CLASS, ORole.PERMISSION_CREATE, doc.getClassName()); assignAndCheckCluster(doc, iClusterName); } else { // UPDATE: CHECK ACCESS ON SCHEMA CLASS NAME (IF ANY) if (doc.getClassName() != null) checkSecurity(ORule.ResourceGeneric.CLASS, ORole.PERMISSION_UPDATE, doc.getClassName()); } doc = (ODocument) currentTx .saveRecord(iRecord, iClusterName, iMode, iForceCreate, iRecordCreatedCallback, iRecordUpdatedCallback); return (RET) doc; } private <RET extends ORecord> RET saveGraph(ORecord iRecord, String iClusterName, OPERATION_MODE iMode, boolean iForceCreate, ORecordCallback<? extends Number> iRecordCreatedCallback, ORecordCallback<Integer> iRecordUpdatedCallback) { begin(); try { return saveInternal(iRecord, iClusterName, iMode, iForceCreate, iRecordCreatedCallback, iRecordUpdatedCallback); } finally { commit(); } } /** * Deletes a document. Behavior depends by the current running transaction if any. If no transaction is running then the record is * deleted immediately. If an Optimistic transaction is running then the record will be deleted at commit time. The current * transaction will continue to see the record as deleted, while others not. If a Pessimistic transaction is running, then an * exclusive lock is acquired against the record. Current transaction will continue to see the record as deleted, while others * cannot access to it since it's locked. * <p> * If MVCC is enabled and the version of the document is different by the version stored in the database, then a {@link * OConcurrentModificationException} exception is thrown. * * @param record record to delete * * @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain. * * @see #setMVCC(boolean), {@link #isMVCC()} */ public ODatabaseDocumentAbstract delete(final ORecord record) { checkOpenness(); if (record == null) throw new ODatabaseException("Cannot delete null document"); if (record instanceof OVertex) { OVertexDelegate.deleteLinks((OVertex) record); } else if (record instanceof OEdge) { OEdgeDelegate.deleteLinks((OEdge) record); } // CHECK ACCESS ON SCHEMA CLASS NAME (IF ANY) if (record instanceof ODocument && ((ODocument) record).getClassName() != null) checkSecurity(ORule.ResourceGeneric.CLASS, ORole.PERMISSION_DELETE, ((ODocument) record).getClassName()); try { currentTx.deleteRecord(record, OPERATION_MODE.SYNCHRONOUS); } catch (OException e) { throw e; } catch (Exception e) { if (record instanceof ODocument) throw OException.wrapException(new ODatabaseException( "Error on deleting record " + record.getIdentity() + " of class '" + ((ODocument) record).getClassName() + "'"), e); else throw OException.wrapException(new ODatabaseException("Error on deleting record " + record.getIdentity()), e); } return this; } /** * Returns the number of the records of the class iClassName. */ public long countClass(final String iClassName) { return countClass(iClassName, true); } /** * Returns the number of the records of the class iClassName considering also sub classes if polymorphic is true. */ public long countClass(final String iClassName, final boolean iPolymorphic) { final OClass cls = getMetadata().getImmutableSchemaSnapshot().getClass(iClassName); if (cls == null) throw new IllegalArgumentException("Class '" + iClassName + "' not found in database"); long totalOnDb = cls.count(iPolymorphic); long deletedInTx = 0; long addedInTx = 0; if (getTransaction().isActive()) for (ORecordOperation op : getTransaction().getRecordOperations()) { if (op.type == ORecordOperation.DELETED) { final ORecord rec = op.getRecord(); if (rec != null && rec instanceof ODocument) { OClass schemaClass = ((ODocument) rec).getSchemaClass(); if (iPolymorphic) { if (schemaClass.isSubClassOf(iClassName)) deletedInTx++; } else { if (iClassName.equals(schemaClass.getName()) || iClassName.equals(schemaClass.getShortName())) deletedInTx++; } } } if (op.type == ORecordOperation.CREATED) { final ORecord rec = op.getRecord(); if (rec != null && rec instanceof ODocument) { OClass schemaClass = ((ODocument) rec).getSchemaClass(); if (schemaClass != null) { if (iPolymorphic) { if (schemaClass.isSubClassOf(iClassName)) addedInTx++; } else { if (iClassName.equals(schemaClass.getName()) || iClassName.equals(schemaClass.getShortName())) addedInTx++; } } } } } return (totalOnDb + addedInTx) - deletedInTx; } /** * {@inheritDoc} */ @Override public ODatabase<ORecord> commit() { return commit(false); } @Override public ODatabaseDocument commit(boolean force) throws OTransactionException { checkOpenness(); checkIfActive(); if (!currentTx.isActive()) return this; if (!force && currentTx.amountOfNestedTxs() > 1) { //This just do count down no real commit here currentTx.commit(); return this; } // WAKE UP LISTENERS try { beforeCommitOperations(); } catch (OException e) { try { rollback(force); } catch (Exception re) { OLogManager.instance().error(this, "Exception during rollback `%08X`", re, System.identityHashCode(re)); } throw e; } try { currentTx.commit(force); } catch (RuntimeException e) { if ((e instanceof OHighLevelException) || (e instanceof ONeedRetryException)) OLogManager.instance().debug(this, "Error on transaction commit `%08X`", e, System.identityHashCode(e)); else OLogManager.instance().error(this, "Error on transaction commit `%08X`", e, System.identityHashCode(e)); // WAKE UP ROLLBACK LISTENERS beforeRollbackOperations(); try { // ROLLBACK TX AT DB LEVEL ((OTransactionAbstract) currentTx).internalRollback(); } catch (Exception re) { OLogManager.instance().error(this, "Error during transaction rollback `%08X`", re, System.identityHashCode(re)); } getLocalCache().clear(); // WAKE UP ROLLBACK LISTENERS afterRollbackOperations(); throw e; } // WAKE UP LISTENERS afterCommitOperations(); return this; } protected void beforeCommitOperations() { for (ODatabaseListener listener : browseListeners()) try { listener.onBeforeTxCommit(this); } catch (Exception e) { OLogManager.instance() .error(this, "Cannot commit the transaction: caught exception on execution of %s.onBeforeTxCommit() `%08X`", e, listener.getClass().getName(), System.identityHashCode(e)); throw OException.wrapException(new OTransactionException( "Cannot commit the transaction: caught exception on execution of " + listener.getClass().getName() + "#onBeforeTxCommit()"), e); } } protected void afterCommitOperations() { for (ODatabaseListener listener : browseListeners()) try { listener.onAfterTxCommit(this); } catch (Exception e) { final String message = "Error after the transaction has been committed. The transaction remains valid. The exception caught was on execution of " + listener.getClass() + ".onAfterTxCommit() `%08X`"; OLogManager.instance().error(this, message, e, System.identityHashCode(e)); throw OException.wrapException(new OTransactionBlockedException(message), e); } } protected void beforeRollbackOperations() { for (ODatabaseListener listener : browseListeners()) try { listener.onBeforeTxRollback(this); } catch (Exception t) { OLogManager.instance().error(this, "Error before transaction rollback `%08X`", t, System.identityHashCode(t)); } } protected void afterRollbackOperations() { for (ODatabaseListener listener : browseListeners()) try { listener.onAfterTxRollback(this); } catch (Exception t) { OLogManager.instance().error(this, "Error after transaction rollback `%08X`", t, System.identityHashCode(t)); } } /** * {@inheritDoc} */ @Override public ODatabase<ORecord> rollback() { return rollback(false); } @Override public ODatabaseDocument rollback(boolean force) throws OTransactionException { checkOpenness(); if (currentTx.isActive()) { if (!force && currentTx.amountOfNestedTxs() > 1) { //This just decrement the counter no real rollback here currentTx.rollback(); return this; } // WAKE UP LISTENERS beforeRollbackOperations(); currentTx.rollback(force, -1); // WAKE UP LISTENERS afterRollbackOperations(); } getLocalCache().clear(); return this; } /** * This method is internal, it can be subject to signature change or be removed, do not use. * * @Internal */ @Override public <DB extends ODatabase> DB getUnderlying() { throw new UnsupportedOperationException(); } @Override public <V> V callInLock(final Callable<V> iCallable, final boolean iExclusiveLock) { return getStorage().callInLock(iCallable, iExclusiveLock); } @Override public List<String> backup(final OutputStream out, final Map<String, Object> options, final Callable<Object> callable, final OCommandOutputListener iListener, final int compressionLevel, final int bufferSize) throws IOException { checkOpenness(); return getStorage().backup(out, options, callable, iListener, compressionLevel, bufferSize); } @Override public void restore(final InputStream in, final Map<String, Object> options, final Callable<Object> callable, final OCommandOutputListener iListener) throws IOException { checkOpenness(); getStorage().restore(in, options, callable, iListener); if (!isClosed()) { loadMetadata(); sharedContext = null; } } /** * {@inheritDoc} */ public OSBTreeCollectionManager getSbTreeCollectionManager() { return getStorage().getSBtreeCollectionManager(); } @Override public OCurrentStorageComponentsFactory getStorageVersions() { return componentsFactory; } public ORecordSerializer getSerializer() { return serializer; } /** * Sets serializer for the database which will be used for document serialization. * * @param serializer the serializer to set. */ public void setSerializer(ORecordSerializer serializer) { this.serializer = serializer; } @Override public void resetInitialization() { for (ORecordHook h : hooks.keySet()) h.onUnregister(); hooks.clear(); compileHooks(); close(); initialized = false; } @Override public String incrementalBackup(final String path) { checkOpenness(); checkIfActive(); return getStorage().incrementalBackup(path); } @Override @Deprecated public <DB extends ODatabaseDocument> DB checkSecurity(final String iResource, final int iOperation) { final String resourceSpecific = ORule.mapLegacyResourceToSpecificResource(iResource); final ORule.ResourceGeneric resourceGeneric = ORule.mapLegacyResourceToGenericResource(iResource); if (resourceSpecific == null || resourceSpecific.equals("*")) checkSecurity(resourceGeneric, null, iOperation); return checkSecurity(resourceGeneric, resourceSpecific, iOperation); } @Override @Deprecated public <DB extends ODatabaseDocument> DB checkSecurity(final String iResourceGeneric, final int iOperation, final Object iResourceSpecific) { final ORule.ResourceGeneric resourceGeneric = ORule.mapLegacyResourceToGenericResource(iResourceGeneric); if (iResourceSpecific == null || iResourceSpecific.equals("*")) return checkSecurity(resourceGeneric, iOperation, (Object) null); return checkSecurity(resourceGeneric, iOperation, iResourceSpecific); } @Override @Deprecated public <DB extends ODatabaseDocument> DB checkSecurity(final String iResourceGeneric, final int iOperation, final Object... iResourcesSpecific) { final ORule.ResourceGeneric resourceGeneric = ORule.mapLegacyResourceToGenericResource(iResourceGeneric); return checkSecurity(resourceGeneric, iOperation, iResourcesSpecific); } /** * @return <code>true</code> if database is obtained from the pool and <code>false</code> otherwise. */ @Override public boolean isPooled() { return false; } /** * Use #activateOnCurrentThread instead. */ @Deprecated public void setCurrentDatabaseInThreadLocal() { activateOnCurrentThread(); } /** * Activates current database instance on current thread. */ @Override public ODatabaseDocumentAbstract activateOnCurrentThread() { final ODatabaseRecordThreadLocal tl = ODatabaseRecordThreadLocal.instance(); if (tl != null) tl.set(this); return this; } @Override public boolean isActiveOnCurrentThread() { final ODatabaseRecordThreadLocal tl = ODatabaseRecordThreadLocal.instance(); final ODatabaseDocumentInternal db = tl != null ? tl.getIfDefined() : null; return db == this; } protected void checkOpenness() { if (status == STATUS.CLOSED) throw new ODatabaseException("Database '" + getURL() + "' is closed"); } private void popInHook(OIdentifiable id) { inHook.remove(id); } private boolean pushInHook(OIdentifiable id) { return inHook.add(id); } protected void callbackHookFailure(ORecord record, boolean wasNew, byte[] stream) { if (stream != null && stream.length > 0) callbackHooks(wasNew ? ORecordHook.TYPE.CREATE_FAILED : ORecordHook.TYPE.UPDATE_FAILED, record); } protected void callbackHookSuccess(final ORecord record, final boolean wasNew, final byte[] stream, final OStorageOperationResult<Integer> operationResult) { if (stream != null && stream.length > 0) { final ORecordHook.TYPE hookType; if (!operationResult.isMoved()) { hookType = wasNew ? ORecordHook.TYPE.AFTER_CREATE : ORecordHook.TYPE.AFTER_UPDATE; } else { hookType = wasNew ? ORecordHook.TYPE.CREATE_REPLICATED : ORecordHook.TYPE.UPDATE_REPLICATED; } callbackHooks(hookType, record); } } protected void callbackHookFinalize(final ORecord record, final boolean wasNew, final byte[] stream) { if (stream != null && stream.length > 0) { final ORecordHook.TYPE hookType; hookType = wasNew ? ORecordHook.TYPE.FINALIZE_CREATION : ORecordHook.TYPE.FINALIZE_UPDATE; callbackHooks(hookType, record); clearDocumentTracking(record); } } protected void clearDocumentTracking(final ORecord record) { if (record instanceof ODocument && ((ODocument) record).isTrackingChanges()) { ODocumentInternal.clearTrackData((ODocument) record); } } protected void checkRecordClass(final OClass recordClass, final String iClusterName, final ORecordId rid) { final OClass clusterIdClass = metadata.getImmutableSchemaSnapshot().getClassByClusterId(rid.getClusterId()); if (recordClass == null && clusterIdClass != null || clusterIdClass == null && recordClass != null || (recordClass != null && !recordClass.equals(clusterIdClass))) throw new IllegalArgumentException( "Record saved into cluster '" + iClusterName + "' should be saved with class '" + clusterIdClass + "' but has been created with class '" + recordClass + "'"); } protected void init() { currentTx = new OTransactionNoTx(this); } private OFreezableStorageComponent getFreezableStorage() { OStorage s = getStorage(); if (s instanceof OFreezableStorageComponent) return (OFreezableStorageComponent) s; else { OLogManager.instance().error(this, "Storage of type " + s.getType() + " does not support freeze operation", null); return null; } } public void checkIfActive() { final ODatabaseRecordThreadLocal tl = ODatabaseRecordThreadLocal.instance(); ODatabaseDocumentInternal currentDatabase = tl != null ? tl.get() : null; if (currentDatabase instanceof ODatabaseDocumentTx) { currentDatabase = ((ODatabaseDocumentTx) currentDatabase).internal; } if (currentDatabase != this) throw new IllegalStateException( "The current database instance (" + toString() + ") is not active on the current thread (" + Thread.currentThread() + "). Current active database is: " + currentDatabase); } public Set<Integer> getBlobClusterIds() { return getMetadata().getSchema().getBlobClusters(); } private void compileHooks() { final List<ORecordHook>[] intermediateHooksByScope = new List[ORecordHook.SCOPE.values().length]; for (ORecordHook.SCOPE scope : ORecordHook.SCOPE.values()) intermediateHooksByScope[scope.ordinal()] = new ArrayList<>(); for (ORecordHook hook : hooks.keySet()) for (ORecordHook.SCOPE scope : hook.getScopes()) intermediateHooksByScope[scope.ordinal()].add(hook); for (ORecordHook.SCOPE scope : ORecordHook.SCOPE.values()) { final int ordinal = scope.ordinal(); final List<ORecordHook> scopeHooks = intermediateHooksByScope[ordinal]; hooksByScope[ordinal] = scopeHooks.toArray(new ORecordHook[scopeHooks.size()]); } } @Override public OSharedContext getSharedContext() { // NOW NEED TO GET THE CONTEXT FROM RESOURCES IN FUTURE WILL BE NOT NEEDED if (sharedContext == null) { sharedContext = getStorage().getResource(OSharedContext.class.getName(), new Callable<OSharedContext>() { @Override public OSharedContext call() throws Exception { throw new ODatabaseException("Accessing to the database context before the database has bean initialized"); } }); } return sharedContext; } public static Object executeWithRetries(final OCallable<Object, Integer> callback, final int maxRetry) { return executeWithRetries(callback, maxRetry, 0, null); } public static Object executeWithRetries(final OCallable<Object, Integer> callback, final int maxRetry, final int waitBetweenRetry) { return executeWithRetries(callback, maxRetry, waitBetweenRetry, null); } public static Object executeWithRetries(final OCallable<Object, Integer> callback, final int maxRetry, final int waitBetweenRetry, final ORecord[] recordToReloadOnRetry) { ONeedRetryException lastException = null; for (int retry = 0; retry < maxRetry; ++retry) { try { return callback.call(retry); } catch (ONeedRetryException e) { // SAVE LAST EXCEPTION AND RETRY lastException = e; if (recordToReloadOnRetry != null) { // RELOAD THE RECORDS for (ORecord r : recordToReloadOnRetry) r.reload(); } if (waitBetweenRetry > 0) try { Thread.sleep(waitBetweenRetry); } catch (InterruptedException ignore) { Thread.currentThread().interrupt(); break; } } } throw lastException; } private void bindPropertiesToContext(OContextConfiguration configuration, final Map<String, Object> iProperties) { final String connectionStrategy = iProperties != null ? (String) iProperties.get("connectionStrategy") : null; if (connectionStrategy != null) configuration.setValue(OGlobalConfiguration.CLIENT_CONNECTION_STRATEGY, connectionStrategy); final String compressionMethod = iProperties != null ? (String) iProperties.get(OGlobalConfiguration.STORAGE_COMPRESSION_METHOD.getKey().toLowerCase(Locale.ENGLISH)) : null; if (compressionMethod != null) // SAVE COMPRESSION METHOD IN CONFIGURATION configuration.setValue(OGlobalConfiguration.STORAGE_COMPRESSION_METHOD, compressionMethod); final String encryptionMethod = iProperties != null ? (String) iProperties.get(OGlobalConfiguration.STORAGE_ENCRYPTION_METHOD.getKey().toLowerCase(Locale.ENGLISH)) : null; if (encryptionMethod != null) // SAVE ENCRYPTION METHOD IN CONFIGURATION configuration.setValue(OGlobalConfiguration.STORAGE_ENCRYPTION_METHOD, encryptionMethod); final String encryptionKey = iProperties != null ? (String) iProperties.get(OGlobalConfiguration.STORAGE_ENCRYPTION_KEY.getKey().toLowerCase(Locale.ENGLISH)) : null; if (encryptionKey != null) // SAVE ENCRYPTION KEY IN CONFIGURATION configuration.setValue(OGlobalConfiguration.STORAGE_ENCRYPTION_KEY, encryptionKey); } private void bindPropertiesToContextGlobal(OContextConfiguration configuration, final Map<OGlobalConfiguration, Object> iProperties) { final String connectionStrategy = iProperties != null ? (String) iProperties.get("connectionStrategy") : null; if (connectionStrategy != null) configuration.setValue(OGlobalConfiguration.CLIENT_CONNECTION_STRATEGY, connectionStrategy); final String compressionMethod = iProperties != null ? (String) iProperties.get(OGlobalConfiguration.STORAGE_COMPRESSION_METHOD) : null; if (compressionMethod != null) // SAVE COMPRESSION METHOD IN CONFIGURATION configuration.setValue(OGlobalConfiguration.STORAGE_COMPRESSION_METHOD, compressionMethod); final String encryptionMethod = iProperties != null ? (String) iProperties.get(OGlobalConfiguration.STORAGE_ENCRYPTION_METHOD) : null; if (encryptionMethod != null) // SAVE ENCRYPTION METHOD IN CONFIGURATION configuration.setValue(OGlobalConfiguration.STORAGE_ENCRYPTION_METHOD, encryptionMethod); final String encryptionKey = iProperties != null ? (String) iProperties.get(OGlobalConfiguration.STORAGE_ENCRYPTION_KEY) : null; if (encryptionKey != null) // SAVE ENCRYPTION KEY IN CONFIGURATION configuration.setValue(OGlobalConfiguration.STORAGE_ENCRYPTION_KEY, encryptionKey); } public boolean isUseLightweightEdges() { final List<OStorageEntryConfiguration> custom = (List<OStorageEntryConfiguration>) this.get(ATTRIBUTES.CUSTOM); for (OStorageEntryConfiguration c : custom) { if (c.name.equals("useLightweightEdges")) return Boolean.parseBoolean(c.value); } return false; } public void setUseLightweightEdges(boolean b) { this.setCustom("useLightweightEdges", b); } public OEdge newLightweightEdge(String iClassName, OVertex from, OVertex to) { OClass clazz = getMetadata().getSchema().getClass(iClassName); OEdgeDelegate result = new OEdgeDelegate(from, to, clazz, iClassName); return result; } public void queryStarted(String id, OResultSet rs) { if (this.activeQueries.size() > 1 && this.activeQueries.size() % 10 == 0) { StringBuilder msg = new StringBuilder(); msg.append("This database instance has "); msg.append(activeQueries.size()); msg.append(" open command/query result sets, please make sure you close them with OResultSet.close()"); OLogManager.instance().warn(this, msg.toString(), null); if (OLogManager.instance().isDebugEnabled()) { activeQueries.values().stream().map(pendingQuery -> pendingQuery.getExecutionPlan()).filter(plan -> plan != null) .forEach(plan -> OLogManager.instance().debug(this, plan.toString())); } } this.activeQueries.put(id, rs); } public void queryClosed(String id) { this.activeQueries.remove(id); } protected void closeActiveQueries() { while (activeQueries.size() > 0) { this.activeQueries.values().iterator().next().close();//the query automatically unregisters itself } } public OResultSet getActiveQuery(String id) { return activeQueries.get(id); } @Override public void internalCommit(OTransactionInternal transaction) { this.getStorage().commit(transaction); } @Override public boolean isClusterEdge(int cluster) { OClass clazz = getMetadata().getSchema().getClassByClusterId(cluster); if (clazz != null && clazz.isEdgeType()) return true; return false; } @Override public boolean isClusterVertex(int cluster) { OClass clazz = getMetadata().getSchema().getClassByClusterId(cluster); if (clazz != null && clazz.isVertexType()) return true; return false; } }
Avoid race conditions on query start/close
core/src/main/java/com/orientechnologies/orient/core/db/document/ODatabaseDocumentAbstract.java
Avoid race conditions on query start/close
Java
apache-2.0
99cf3e6bf14da955433387e2c000fc956597d267
0
cheng-li/pyramid,cheng-li/pyramid
package edu.neu.ccs.pyramid.experiment; import edu.neu.ccs.pyramid.configuration.Config; import edu.neu.ccs.pyramid.dataset.DataSetType; import edu.neu.ccs.pyramid.dataset.MultiLabel; import edu.neu.ccs.pyramid.dataset.MultiLabelClfDataSet; import edu.neu.ccs.pyramid.dataset.TRECFormat; import edu.neu.ccs.pyramid.eval.Accuracy; import edu.neu.ccs.pyramid.eval.Overlap; import edu.neu.ccs.pyramid.multilabel_classification.bmm.BMMClassifier; import edu.neu.ccs.pyramid.multilabel_classification.bmm.BMMInitializer; import edu.neu.ccs.pyramid.multilabel_classification.bmm.BMMOptimizer; import java.io.File; import java.util.HashSet; import java.util.Set; /** * BMM multi-label * Created by chengli on 10/8/15. */ public class Exp210 { public static void main(String[] args) throws Exception { if (args.length != 1) { throw new IllegalArgumentException("Please specify a properties file."); } Config config = new Config(args[0]); System.out.println(config); MultiLabelClfDataSet trainSet = TRECFormat.loadMultiLabelClfDataSet(config.getString("input.trainData"), DataSetType.ML_CLF_SPARSE, true); MultiLabelClfDataSet testSet = TRECFormat.loadMultiLabelClfDataSet(config.getString("input.testData"), DataSetType.ML_CLF_SPARSE, true); int numClusters = config.getInt("numClusters"); double variance = config.getDouble("variance"); int numIterations = config.getInt("numIterations"); int numSamples = config.getInt("numSamples"); String output = config.getString("output"); String modelName = config.getString("modelName"); BMMClassifier bmmClassifier; if (config.getBoolean("train.warmStart")) { bmmClassifier = BMMClassifier.deserialize(new File(output, modelName)); } else { bmmClassifier = new BMMClassifier(trainSet.getNumClasses(),numClusters,trainSet.getNumFeatures()); BMMOptimizer optimizer = new BMMOptimizer(bmmClassifier,trainSet,variance); bmmClassifier.setNumSample(numSamples); System.out.print("random init" + "\t" ); System.out.print("trainAcc : "+ Accuracy.accuracy(bmmClassifier, trainSet) + "\t"); System.out.print("trainOver: "+ Overlap.overlap(bmmClassifier, trainSet) + "\t"); System.out.print("testACC : "+ Accuracy.accuracy(bmmClassifier,testSet) + "\t"); System.out.println("testOver : "+ Overlap.overlap(bmmClassifier, testSet) + "\t"); if (config.getBoolean("initialize")){ BMMInitializer bmmInitializer = new BMMInitializer(); bmmInitializer.initialize(bmmClassifier,trainSet); System.out.print("pure-label" + "\t"); System.out.print("trainAcc : "+ Accuracy.accuracy(bmmClassifier,trainSet)+ "\t"); System.out.print("trainOver: "+ Overlap.overlap(bmmClassifier, trainSet)+ "\t"); System.out.print("testAcc : "+ Accuracy.accuracy(bmmClassifier,testSet)+ "\t"); System.out.println("testOver : "+ Overlap.overlap(bmmClassifier, testSet)+ "\t"); } for (int i=1;i<=numIterations;i++){ optimizer.iterate(); System.out.print("iter : "+i + "\t"); System.out.print("objective: "+optimizer.getTerminator().getLastValue() + "\t"); System.out.print("trainAcc : "+ Accuracy.accuracy(bmmClassifier,trainSet)+ "\t"); System.out.print("trainOver: "+ Overlap.overlap(bmmClassifier, trainSet)+ "\t"); System.out.print("testAcc : "+ Accuracy.accuracy(bmmClassifier,testSet)+ "\t"); System.out.println("testOver : "+ Overlap.overlap(bmmClassifier, testSet)+ "\t"); } System.out.println("history = "+optimizer.getTerminator().getHistory()); } System.out.println("--------------------------------Results-----------------------------\n"); System.out.println(); System.out.print("trainAcc : " + Accuracy.accuracy(bmmClassifier, trainSet) + "\t"); System.out.print("trainOver: "+ Overlap.overlap(bmmClassifier, trainSet)+ "\t"); System.out.print("testAcc : "+ Accuracy.accuracy(bmmClassifier,testSet)+ "\t"); System.out.println("testOver : "+ Overlap.overlap(bmmClassifier, testSet)+ "\t"); System.out.println(); System.out.println(); System.out.println(bmmClassifier); if (config.getBoolean("generateNewRate")) { Set<MultiLabel> samples = bmmClassifier.sampleFromSingles(config.getInt("topSample")); System.out.println("total samples: " + samples.size()); MultiLabel[] predictions = bmmClassifier.predict(trainSet); int cover = 0; Set<MultiLabel> uniqueTrainPred = new HashSet<>(); for (MultiLabel l : predictions) { if (samples.contains(l)) { cover += 1; } if (!uniqueTrainPred.contains(l)) { uniqueTrainPred.add(l); } } Set<MultiLabel> uniqueTrainY = new HashSet<>(); for (MultiLabel l : trainSet.getMultiLabels()) { if (!uniqueTrainY.contains(l)) { uniqueTrainY.add(l); } } System.out.println("Training unique prediction combinations: " + uniqueTrainPred.size()); System.out.println("Training unique label combinations: " + uniqueTrainY.size()); System.out.println("Training cover rate: " + (float)cover/ (float)predictions.length); predictions = bmmClassifier.predict(testSet); cover = 0; Set<MultiLabel> uniqueTestPred = new HashSet<>(); for (MultiLabel l : predictions) { if (samples.contains(l)) { cover += 1; } if (!uniqueTestPred.contains(l)) { uniqueTestPred.add(l); } } Set<MultiLabel> uniqueTestY = new HashSet<>(); for (MultiLabel l : testSet.getMultiLabels()) { if (!uniqueTestY.contains(l)) { uniqueTestY.add(l); } } System.out.println("Testing unique prediction combinations: " + uniqueTestPred.size()); System.out.println("Testing unique label combinations: " + uniqueTestY.size()); System.out.println("Testing cover rate: " + (float)cover/ (float)predictions.length); } if (config.getBoolean("saveModel")) { File serializeModel = new File(output,modelName); bmmClassifier.serialize(serializeModel); } } }
src/main/java/edu/neu/ccs/pyramid/experiment/Exp210.java
package edu.neu.ccs.pyramid.experiment; import edu.neu.ccs.pyramid.configuration.Config; import edu.neu.ccs.pyramid.dataset.DataSetType; import edu.neu.ccs.pyramid.dataset.MultiLabel; import edu.neu.ccs.pyramid.dataset.MultiLabelClfDataSet; import edu.neu.ccs.pyramid.dataset.TRECFormat; import edu.neu.ccs.pyramid.eval.Accuracy; import edu.neu.ccs.pyramid.eval.Overlap; import edu.neu.ccs.pyramid.multilabel_classification.bmm.BMMClassifier; import edu.neu.ccs.pyramid.multilabel_classification.bmm.BMMInitializer; import edu.neu.ccs.pyramid.multilabel_classification.bmm.BMMOptimizer; import java.io.File; import java.util.Set; /** * BMM multi-label * Created by chengli on 10/8/15. */ public class Exp210 { public static void main(String[] args) throws Exception { if (args.length != 1) { throw new IllegalArgumentException("Please specify a properties file."); } Config config = new Config(args[0]); System.out.println(config); MultiLabelClfDataSet trainSet = TRECFormat.loadMultiLabelClfDataSet(config.getString("input.trainData"), DataSetType.ML_CLF_SPARSE, true); MultiLabelClfDataSet testSet = TRECFormat.loadMultiLabelClfDataSet(config.getString("input.testData"), DataSetType.ML_CLF_SPARSE, true); int numClusters = config.getInt("numClusters"); double variance = config.getDouble("variance"); int numIterations = config.getInt("numIterations"); int numSamples = config.getInt("numSamples"); String output = config.getString("output"); String modelName = config.getString("modelName"); BMMClassifier bmmClassifier; if (config.getBoolean("train.warmStart")) { bmmClassifier = BMMClassifier.deserialize(new File(output, modelName)); } else { bmmClassifier = new BMMClassifier(trainSet.getNumClasses(),numClusters,trainSet.getNumFeatures()); BMMOptimizer optimizer = new BMMOptimizer(bmmClassifier,trainSet,variance); bmmClassifier.setNumSample(numSamples); System.out.print("random init" + "\t" ); System.out.print("trainAcc : "+ Accuracy.accuracy(bmmClassifier, trainSet) + "\t"); System.out.print("trainOver: "+ Overlap.overlap(bmmClassifier, trainSet) + "\t"); System.out.print("testACC : "+ Accuracy.accuracy(bmmClassifier,testSet) + "\t"); System.out.println("testOver : "+ Overlap.overlap(bmmClassifier, testSet) + "\t"); if (config.getBoolean("initialize")){ BMMInitializer bmmInitializer = new BMMInitializer(); bmmInitializer.initialize(bmmClassifier,trainSet); System.out.print("pure-label" + "\t"); System.out.print("trainAcc : "+ Accuracy.accuracy(bmmClassifier,trainSet)+ "\t"); System.out.print("trainOver: "+ Overlap.overlap(bmmClassifier, trainSet)+ "\t"); System.out.print("testAcc : "+ Accuracy.accuracy(bmmClassifier,testSet)+ "\t"); System.out.println("testOver : "+ Overlap.overlap(bmmClassifier, testSet)+ "\t"); } for (int i=1;i<=numIterations;i++){ optimizer.iterate(); System.out.print("iter : "+i + "\t"); System.out.print("objective: "+optimizer.getTerminator().getLastValue() + "\t"); System.out.print("trainAcc : "+ Accuracy.accuracy(bmmClassifier,trainSet)+ "\t"); System.out.print("trainOver: "+ Overlap.overlap(bmmClassifier, trainSet)+ "\t"); System.out.print("testAcc : "+ Accuracy.accuracy(bmmClassifier,testSet)+ "\t"); System.out.println("testOver : "+ Overlap.overlap(bmmClassifier, testSet)+ "\t"); } System.out.println("history = "+optimizer.getTerminator().getHistory()); } System.out.println("--------------------------------Results-----------------------------\n"); System.out.println(); System.out.print("trainAcc : " + Accuracy.accuracy(bmmClassifier, trainSet) + "\t"); System.out.print("trainOver: "+ Overlap.overlap(bmmClassifier, trainSet)+ "\t"); System.out.print("testAcc : "+ Accuracy.accuracy(bmmClassifier,testSet)+ "\t"); System.out.println("testOver : "+ Overlap.overlap(bmmClassifier, testSet)+ "\t"); System.out.println(); System.out.println(); System.out.println(bmmClassifier); if (config.getBoolean("generateNewRate")) { Set<MultiLabel> samples = bmmClassifier.sampleFromSingles(config.getInt("topSample")); MultiLabel[] predictions = bmmClassifier.predict(trainSet); int cover = 0; for (MultiLabel l : predictions) { if (samples.contains(l)) { cover += 1; } } System.out.println("Training cover rate: " + (float)cover/ (float)predictions.length); predictions = bmmClassifier.predict(testSet); cover = 0; for (MultiLabel l : predictions) { if (samples.contains(l)) { cover += 1; } } System.out.println("Testing cover rate: " + (float)cover/ (float)predictions.length); } if (config.getBoolean("saveModel")) { File serializeModel = new File(output,modelName); bmmClassifier.serialize(serializeModel); } } }
other statistic for training and testing info.
src/main/java/edu/neu/ccs/pyramid/experiment/Exp210.java
other statistic for training and testing info.
Java
apache-2.0
ddd9091a471fdc953e4c90636bae0c796f75d55f
0
esteinberg/plantuml4idea,esteinberg/plantuml4idea,esteinberg/plantuml4idea
package org.plantuml.idea.toolwindow; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.ActionGroup; import com.intellij.openapi.actionSystem.ActionManager; import com.intellij.openapi.actionSystem.ActionPlaces; import com.intellij.openapi.actionSystem.ActionToolbar; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.wm.ToolWindow; import com.intellij.ui.components.JBScrollPane; import org.jetbrains.annotations.NotNull; import org.plantuml.idea.action.SelectPageAction; import org.plantuml.idea.lang.settings.PlantUmlSettings; import org.plantuml.idea.rendering.*; import org.plantuml.idea.toolwindow.listener.PlantUmlAncestorListener; import org.plantuml.idea.util.ImageWithUrlData; import org.plantuml.idea.util.UIUtils; import javax.swing.*; import javax.swing.event.AncestorListener; import java.awt.*; import java.awt.event.MouseEvent; import java.awt.event.MouseMotionListener; import java.awt.event.MouseWheelEvent; import java.awt.event.MouseWheelListener; import java.io.File; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; /** * @author Eugene Steinberg */ public class PlantUmlToolWindow extends JPanel implements Disposable { private static Logger logger = Logger.getInstance(PlantUmlToolWindow.class); private ToolWindow toolWindow; private JPanel imagesPanel; private JScrollPane scrollPane; private int zoom = 100; private int page = -1; private RenderCache renderCache = new RenderCache(10); private AncestorListener plantUmlAncestorListener; private final LazyApplicationPoolExecutor lazyExecutor; private SelectPageAction selectPageAction; private Project project; private AtomicInteger sequence = new AtomicInteger(); public PlantUmlToolWindow(Project project, ToolWindow toolWindow) { super(new BorderLayout()); this.project = project; this.toolWindow = toolWindow; PlantUmlSettings instance = PlantUmlSettings.getInstance();// Make sure settings are loaded and applied before we start rendering. setupUI(); lazyExecutor = new LazyApplicationPoolExecutor(instance.getRenderDelayAsInt()); plantUmlAncestorListener = new PlantUmlAncestorListener(this, project); //must be last this.toolWindow.getComponent().addAncestorListener(plantUmlAncestorListener); } private void setupUI() { ActionGroup group = (ActionGroup) ActionManager.getInstance().getAction("PlantUML.Toolbar"); final ActionToolbar actionToolbar = ActionManager.getInstance().createActionToolbar(ActionPlaces.UNKNOWN, group, true); actionToolbar.setTargetComponent(this); add(actionToolbar.getComponent(), BorderLayout.PAGE_START); imagesPanel = new JPanel(); imagesPanel.setLayout(new BoxLayout(imagesPanel, BoxLayout.Y_AXIS)); scrollPane = new JBScrollPane(imagesPanel); scrollPane.getVerticalScrollBar().setUnitIncrement(20); add(scrollPane, BorderLayout.CENTER); addScrollBarListeners(imagesPanel); selectPageAction = (SelectPageAction) ActionManager.getInstance().getAction("PlantUML.SelectPage"); } private void addScrollBarListeners(JComponent panel) { panel.addMouseWheelListener(new MouseWheelListener() { @Override public void mouseWheelMoved(MouseWheelEvent e) { if (e.isControlDown()) { setZoom(Math.max(getZoom() - e.getWheelRotation() * 10, 1)); } else { scrollPane.dispatchEvent(e); } } }); panel.addMouseMotionListener(new MouseMotionListener() { private int x, y; @Override public void mouseDragged(MouseEvent e) { JScrollBar h = scrollPane.getHorizontalScrollBar(); JScrollBar v = scrollPane.getVerticalScrollBar(); int dx = x - e.getXOnScreen(); int dy = y - e.getYOnScreen(); h.setValue(h.getValue() + dx); v.setValue(v.getValue() + dy); x = e.getXOnScreen(); y = e.getYOnScreen(); } @Override public void mouseMoved(MouseEvent e) { x = e.getXOnScreen(); y = e.getYOnScreen(); } }); } @Override public void dispose() { logger.debug("dispose"); toolWindow.getComponent().removeAncestorListener(plantUmlAncestorListener); } public void renderLater(final LazyApplicationPoolExecutor.Delay delay) { logger.debug("renderLater ", project.getName(), " ", delay); ApplicationManager.getApplication().invokeLater(new Runnable() { @Override public void run() { if (isProjectValid(project)) { final String source = UIUtils.getSelectedSourceWithCaret(project); if ("".equals(source)) { //is included file or some crap? logger.debug("empty source"); VirtualFile selectedFile = UIUtils.getSelectedFile(project); RenderCacheItem last = renderCache.getDisplayedItem(); //todo check all items for included file if (last != null && last.isIncludedFile(selectedFile)) { logger.debug("include file selected"); if (last.isIncludedFileChanged(selectedFile)) { logger.debug("includes changed, executing command"); lazyExecutor.execute(getCommand(last.getSourceFilePath(), last.getSource(), last.getBaseDir(), page, zoom, null, delay), delay); } else if (last.renderRequired(project, last.getSource(), page)) { logger.debug("render required"); lazyExecutor.execute(getCommand(last.getSourceFilePath(), last.getSource(), last.getBaseDir(), page, zoom, last, delay), delay); } else if (!renderCache.isDisplayed(last, page)) { logger.debug("displaying cached item ", last); displayExistingDiagram(last); } else { logger.debug("include file, not changed"); } } else if (last != null && !renderCache.isDisplayed(last, page)) { logger.debug("empty source, not include file, displaying cached item ", last); displayExistingDiagram(last); } else { logger.debug("nothing needed"); } return; } String sourceFilePath = UIUtils.getSelectedFile(project).getPath(); if (delay == LazyApplicationPoolExecutor.Delay.NOW) { logger.debug("executing Delay.NOW"); final File selectedDir = UIUtils.getSelectedDir(project); lazyExecutor.execute(getCommand(sourceFilePath, source, selectedDir, page, zoom, null, delay), delay); return; } RenderCacheItem cachedItem = renderCache.getCachedItem(sourceFilePath, source, zoom); if (cachedItem == null || cachedItem.renderRequired(project, source, page)) { logger.debug("render required"); final File selectedDir = UIUtils.getSelectedDir(project); lazyExecutor.execute(getCommand(sourceFilePath, source, selectedDir, page, zoom, cachedItem, delay), delay); } else { if (!renderCache.isDisplayed(cachedItem, page)) { logger.debug("displaying cached item ", cachedItem); displayExistingDiagram(cachedItem); } else { logger.debug("item already displayed ", cachedItem); } } } } }); } public void displayExistingDiagram(RenderCacheItem last) { last.setVersion(sequence.incrementAndGet()); last.setPage(page); displayDiagram(last); } @NotNull protected RenderCommand getCommand(String selectedFile, final String source, final File baseDir, final int page, final int zoom, RenderCacheItem cachedItem, LazyApplicationPoolExecutor.Delay delay) { logger.debug("#getCommand selectedFile='", selectedFile, "', baseDir=", baseDir, ", page=", page, ", zoom=", zoom); int version = sequence.incrementAndGet(); return new MyRenderCommand(selectedFile, source, baseDir, page, zoom, cachedItem, version, delay); } private class MyRenderCommand extends RenderCommand { private final LazyApplicationPoolExecutor.Delay delay; public MyRenderCommand(String selectedFile, String source, File baseDir, int page, int zoom, RenderCacheItem cachedItem, int version, LazyApplicationPoolExecutor.Delay delay) { super(selectedFile, source, baseDir, page, zoom, cachedItem, version); this.delay = delay; } @Override public void postRenderOnEDT(final RenderResult imageResult, final ImageWithUrlData[] imagesWithData, final Map<File, Long> includedFiles) { if (delay == LazyApplicationPoolExecutor.Delay.NOW) { if (cachedItem != null) { renderCache.removeFromCache(cachedItem); } } RenderCacheItem newItem = new RenderCacheItem(sourceFilePath, source, baseDir, zoom, page, includedFiles, imageResult, imagesWithData, version); renderCache.addToCache(newItem); displayDiagram(newItem); } } public void displayDiagram(RenderCacheItem cacheItem) { if (renderCache.isOlderRequest(cacheItem)) { //ctrl+z with cached image vs older request in progress logger.debug("skipping displaying older result", cacheItem); return; } logger.debug("displaying item ", cacheItem); renderCache.setDisplayedItem(cacheItem); ImageWithUrlData[] imagesWithData = cacheItem.getImagesWithData(); RenderResult imageResult = cacheItem.getImageResult(); imagesPanel.removeAll(); if (this.page >= imageResult.getPages()) { this.page = -1; selectPageAction.setPage(page); } if (this.page == -1) { for (int i = 0; i < imagesWithData.length; i++) { displayImage(cacheItem, imageResult, i, imagesWithData[i]); } } else { displayImage(cacheItem, imageResult, page, imagesWithData[page]); } imagesPanel.revalidate(); imagesPanel.repaint(); } public void displayImage(RenderCacheItem cacheItem, RenderResult imageResult, int i, ImageWithUrlData imageWithData) { if (imageWithData == null) { logger.error("trying to display null image. selectedPage=" + page + ", nullPage=" + i + ", cacheItem=" + cacheItem); } logger.debug("displaying image ", i); PlantUmlLabel label = new PlantUmlLabel(imageWithData, i, imageResult.getRenderRequest()); addScrollBarListeners(label); if (i != 0) { imagesPanel.add(separator()); } imagesPanel.add(label); } public void applyNewSettings(PlantUmlSettings plantUmlSettings) { lazyExecutor.setDelay(plantUmlSettings.getRenderDelayAsInt()); renderCache.setMaxCacheSize(plantUmlSettings.getCacheSizeAsInt()); } private JSeparator separator() { JSeparator separator = new JSeparator(SwingConstants.HORIZONTAL); Dimension size = new Dimension(separator.getPreferredSize().width, 10); separator.setVisible(true); separator.setMaximumSize(size); separator.setPreferredSize(size); return separator; } public int getZoom() { return zoom; } public void setZoom(int zoom) { this.zoom = zoom; renderLater(LazyApplicationPoolExecutor.Delay.POST_DELAY); } public void setPage(int page) { if (page >= -1 && page < getNumPages()) { logger.debug("page ", page, " selected"); this.page = page; selectPageAction.setPage(page); renderLater(LazyApplicationPoolExecutor.Delay.POST_DELAY); } } public void nextPage() { setPage(this.page + 1); } public void prevPage() { setPage(this.page - 1); } public int getNumPages() { int pages = -1; RenderCacheItem last = renderCache.getDisplayedItem(); if (last != null) { RenderResult imageResult = last.getImageResult(); if (imageResult != null) { pages = imageResult.getPages(); } } return pages; } private boolean isProjectValid(Project project) { return project != null && !project.isDisposed(); } public JPanel getImagesPanel() { return imagesPanel; } }
src/org/plantuml/idea/toolwindow/PlantUmlToolWindow.java
package org.plantuml.idea.toolwindow; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.ActionGroup; import com.intellij.openapi.actionSystem.ActionManager; import com.intellij.openapi.actionSystem.ActionPlaces; import com.intellij.openapi.actionSystem.ActionToolbar; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.wm.ToolWindow; import com.intellij.ui.components.JBScrollPane; import org.jetbrains.annotations.NotNull; import org.plantuml.idea.action.SelectPageAction; import org.plantuml.idea.lang.settings.PlantUmlSettings; import org.plantuml.idea.rendering.*; import org.plantuml.idea.toolwindow.listener.PlantUmlAncestorListener; import org.plantuml.idea.util.ImageWithUrlData; import org.plantuml.idea.util.UIUtils; import javax.swing.*; import javax.swing.event.AncestorListener; import java.awt.*; import java.awt.event.MouseEvent; import java.awt.event.MouseMotionListener; import java.awt.event.MouseWheelEvent; import java.awt.event.MouseWheelListener; import java.io.File; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; /** * @author Eugene Steinberg */ public class PlantUmlToolWindow extends JPanel implements Disposable { private static Logger logger = Logger.getInstance(PlantUmlToolWindow.class); private ToolWindow toolWindow; private JPanel imagesPanel; private JScrollPane scrollPane; private int zoom = 100; private int page = -1; private RenderCache renderCache = new RenderCache(10); private AncestorListener plantUmlAncestorListener; private final LazyApplicationPoolExecutor lazyExecutor; private SelectPageAction selectPageAction; private Project project; private AtomicInteger sequence = new AtomicInteger(); public PlantUmlToolWindow(Project project, ToolWindow toolWindow) { super(new BorderLayout()); this.project = project; this.toolWindow = toolWindow; PlantUmlSettings instance = PlantUmlSettings.getInstance();// Make sure settings are loaded and applied before we start rendering. setupUI(); lazyExecutor = new LazyApplicationPoolExecutor(instance.getRenderDelayAsInt()); plantUmlAncestorListener = new PlantUmlAncestorListener(this, project); //must be last this.toolWindow.getComponent().addAncestorListener(plantUmlAncestorListener); } private void setupUI() { ActionGroup group = (ActionGroup) ActionManager.getInstance().getAction("PlantUML.Toolbar"); final ActionToolbar actionToolbar = ActionManager.getInstance().createActionToolbar(ActionPlaces.UNKNOWN, group, true); actionToolbar.setTargetComponent(this); add(actionToolbar.getComponent(), BorderLayout.PAGE_START); imagesPanel = new JPanel(); imagesPanel.setLayout(new BoxLayout(imagesPanel, BoxLayout.Y_AXIS)); scrollPane = new JBScrollPane(imagesPanel); scrollPane.getVerticalScrollBar().setUnitIncrement(20); add(scrollPane, BorderLayout.CENTER); addScrollBarListeners(imagesPanel); selectPageAction = (SelectPageAction) ActionManager.getInstance().getAction("PlantUML.SelectPage"); } private void addScrollBarListeners(JComponent panel) { panel.addMouseWheelListener(new MouseWheelListener() { @Override public void mouseWheelMoved(MouseWheelEvent e) { if (e.isControlDown()) { setZoom(Math.max(getZoom() - e.getWheelRotation() * 10, 1)); } else { scrollPane.dispatchEvent(e); } } }); panel.addMouseMotionListener(new MouseMotionListener() { private int x, y; @Override public void mouseDragged(MouseEvent e) { JScrollBar h = scrollPane.getHorizontalScrollBar(); JScrollBar v = scrollPane.getVerticalScrollBar(); int dx = x - e.getXOnScreen(); int dy = y - e.getYOnScreen(); h.setValue(h.getValue() + dx); v.setValue(v.getValue() + dy); x = e.getXOnScreen(); y = e.getYOnScreen(); } @Override public void mouseMoved(MouseEvent e) { x = e.getXOnScreen(); y = e.getYOnScreen(); } }); } @Override public void dispose() { logger.debug("dispose"); toolWindow.getComponent().removeAncestorListener(plantUmlAncestorListener); } public void renderLater(final LazyApplicationPoolExecutor.Delay delay) { logger.debug("renderLater ", project.getName(), " ", delay); ApplicationManager.getApplication().invokeLater(new Runnable() { @Override public void run() { if (isProjectValid(project)) { final String source = UIUtils.getSelectedSourceWithCaret(project); if ("".equals(source)) { //is included file or some crap? VirtualFile selectedFile = UIUtils.getSelectedFile(project); RenderCacheItem last = renderCache.getDisplayedItem(); //todo check all items for included file if (last != null && last.isIncludedFile(selectedFile)) { if (last.isIncludedFileChanged(selectedFile)) { logger.debug("includes changed, executing command"); lazyExecutor.execute(getCommand(last.getSourceFilePath(), last.getSource(), last.getBaseDir(), page, zoom, null, delay), delay); } else if (last.renderRequired(project, source, page)) { logger.debug("render required"); lazyExecutor.execute(getCommand(last.getSourceFilePath(), last.getSource(), last.getBaseDir(), page, zoom, last, delay), delay); } else { logger.debug("include file, not changed"); } } else { logger.debug("empty source, not include file from last image"); } return; } String sourceFilePath = UIUtils.getSelectedFile(project).getPath(); if (delay == LazyApplicationPoolExecutor.Delay.NOW) { logger.debug("executing Delay.NOW"); final File selectedDir = UIUtils.getSelectedDir(project); lazyExecutor.execute(getCommand(sourceFilePath, source, selectedDir, page, zoom, null, delay), delay); return; } RenderCacheItem cachedItem = renderCache.getCachedItem(sourceFilePath, source, zoom); if (cachedItem == null || cachedItem.renderRequired(project, source, page)) { logger.debug("render required"); final File selectedDir = UIUtils.getSelectedDir(project); lazyExecutor.execute(getCommand(sourceFilePath, source, selectedDir, page, zoom, cachedItem, delay), delay); } else { if (!renderCache.isDisplayed(cachedItem, page)) { logger.debug("displaying cached item ", cachedItem); cachedItem.setVersion(sequence.incrementAndGet()); cachedItem.setPage(page); displayDiagram(cachedItem); } else { logger.debug("item already displayed ", cachedItem); } } } } }); } @NotNull protected RenderCommand getCommand(String selectedFile, final String source, final File baseDir, final int page, final int zoom, RenderCacheItem cachedItem, LazyApplicationPoolExecutor.Delay delay) { logger.debug("#getCommand selectedFile='", selectedFile, "', baseDir=", baseDir, ", page=", page, ", zoom=", zoom); int version = sequence.incrementAndGet(); return new MyRenderCommand(selectedFile, source, baseDir, page, zoom, cachedItem, version, delay); } private class MyRenderCommand extends RenderCommand { private final LazyApplicationPoolExecutor.Delay delay; public MyRenderCommand(String selectedFile, String source, File baseDir, int page, int zoom, RenderCacheItem cachedItem, int version, LazyApplicationPoolExecutor.Delay delay) { super(selectedFile, source, baseDir, page, zoom, cachedItem, version); this.delay = delay; } @Override public void postRenderOnEDT(final RenderResult imageResult, final ImageWithUrlData[] imagesWithData, final Map<File, Long> includedFiles) { if (delay == LazyApplicationPoolExecutor.Delay.NOW) { if (cachedItem != null) { renderCache.removeFromCache(cachedItem); } } RenderCacheItem newItem = new RenderCacheItem(sourceFilePath, source, baseDir, zoom, page, includedFiles, imageResult, imagesWithData, version); renderCache.addToCache(newItem); displayDiagram(newItem); } } public void displayDiagram(RenderCacheItem cacheItem) { if (renderCache.isOlderRequest(cacheItem)) { //ctrl+z with cached image vs older request in progress logger.debug("skipping displaying older result", cacheItem); return; } logger.debug("displaying item ", cacheItem); renderCache.setDisplayedItem(cacheItem); ImageWithUrlData[] imagesWithData = cacheItem.getImagesWithData(); RenderResult imageResult = cacheItem.getImageResult(); imagesPanel.removeAll(); if (this.page >= imageResult.getPages()) { this.page = -1; selectPageAction.setPage(page); } if (this.page == -1) { for (int i = 0; i < imagesWithData.length; i++) { displayImage(cacheItem, imageResult, i, imagesWithData[i]); } } else { displayImage(cacheItem, imageResult, page, imagesWithData[page]); } imagesPanel.revalidate(); imagesPanel.repaint(); } public void displayImage(RenderCacheItem cacheItem, RenderResult imageResult, int i, ImageWithUrlData imageWithData) { if (imageWithData == null) { logger.error("trying to display null image. selectedPage=" + page + ", nullPage=" + i + ", cacheItem=" + cacheItem); } logger.debug("displaying image ", i); PlantUmlLabel label = new PlantUmlLabel(imageWithData, i, imageResult.getRenderRequest()); addScrollBarListeners(label); if (i != 0) { imagesPanel.add(separator()); } imagesPanel.add(label); } public void applyNewSettings(PlantUmlSettings plantUmlSettings) { lazyExecutor.setDelay(plantUmlSettings.getRenderDelayAsInt()); renderCache.setMaxCacheSize(plantUmlSettings.getCacheSizeAsInt()); } private JSeparator separator() { JSeparator separator = new JSeparator(SwingConstants.HORIZONTAL); Dimension size = new Dimension(separator.getPreferredSize().width, 10); separator.setVisible(true); separator.setMaximumSize(size); separator.setPreferredSize(size); return separator; } public int getZoom() { return zoom; } public void setZoom(int zoom) { this.zoom = zoom; renderLater(LazyApplicationPoolExecutor.Delay.POST_DELAY); } public void setPage(int page) { if (page >= -1 && page < getNumPages()) { logger.debug("page ", page, " selected"); this.page = page; selectPageAction.setPage(page); renderLater(LazyApplicationPoolExecutor.Delay.POST_DELAY); } } public void nextPage() { setPage(this.page + 1); } public void prevPage() { setPage(this.page - 1); } public int getNumPages() { int pages = -1; RenderCacheItem last = renderCache.getDisplayedItem(); if (last != null) { RenderResult imageResult = last.getImageResult(); if (imageResult != null) { pages = imageResult.getPages(); } } return pages; } private boolean isProjectValid(Project project) { return project != null && !project.isDisposed(); } public JPanel getImagesPanel() { return imagesPanel; } }
#112 select page fix when focus is not in diagram file
src/org/plantuml/idea/toolwindow/PlantUmlToolWindow.java
#112 select page fix when focus is not in diagram file
Java
apache-2.0
f56c5478b5aca726ebd9645dff5b83e0dce0960e
0
gchq/stroom,gchq/stroom,gchq/stroom,gchq/stroom,gchq/stroom,gchq/stroom
package stroom.test.common.util.test; import stroom.util.jersey.WebTargetFactory; import stroom.util.logging.LogUtil; import stroom.util.shared.ResourcePaths; import stroom.util.shared.RestResource; import io.dropwizard.jersey.errors.ErrorMessage; import org.assertj.core.api.Assertions; import org.glassfish.jersey.client.ClientConfig; import org.glassfish.jersey.logging.LoggingFeature; import org.glassfish.jersey.server.ResourceConfig; import org.glassfish.jersey.server.monitoring.ApplicationEvent; import org.glassfish.jersey.server.monitoring.ApplicationEventListener; import org.glassfish.jersey.server.monitoring.RequestEvent; import org.glassfish.jersey.server.monitoring.RequestEventListener; import org.glassfish.jersey.test.JerseyTest; import org.glassfish.jersey.test.TestProperties; import org.glassfish.jersey.test.spi.TestContainerException; import org.glassfish.jersey.test.spi.TestContainerFactory; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.net.URI; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.function.Function; import java.util.function.Supplier; import java.util.logging.Level; import java.util.stream.Collectors; import javax.ws.rs.WebApplicationException; import javax.ws.rs.client.ClientBuilder; import javax.ws.rs.client.Entity; import javax.ws.rs.client.Invocation; import javax.ws.rs.client.WebTarget; import javax.ws.rs.core.Application; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import javax.ws.rs.core.UriBuilder; import javax.ws.rs.ext.ExceptionMapper; @ExtendWith(MockitoExtension.class) public abstract class AbstractMultiNodeResourceTest<R extends RestResource> { private static final Logger LOGGER = LoggerFactory.getLogger(AbstractMultiNodeResourceTest.class); private static final WebTargetFactory WEB_TARGET_FACTORY = url -> ClientBuilder.newClient( new ClientConfig().register(LoggingFeature.class)) .target(url); private static final String CONTAINER_FACTORY = "org.glassfish.jersey.test.grizzly.GrizzlyTestContainerFactory"; private final List<TestNode> testNodes; private final Map<String, JerseyTest> nodeToJerseyTestMap = new HashMap<>(); private final Map<String, RequestListener> nodeToListenerMap = new HashMap<>(); public static List<TestNode> createNodeList(final int base) { return List.of( new TestNode("node1", base, true), new TestNode("node2", base + 1, true), new TestNode("node3", base + 2, false)); } /** * Uses the supplied nodes for testing. */ @SuppressWarnings("unused") protected AbstractMultiNodeResourceTest(final List<TestNode> testNodes) { // Force the container factory to ensure the jersey-test-framework-provider-grizzly2 // dependency is in place. Without forcing it, it will just try to use whatever is there // which may be the in memory one which won't work for multi node. // Tried using the jetty container factory but there was as dependency version mismatch // that causes a method not found error. System.setProperty(TestProperties.CONTAINER_FACTORY, CONTAINER_FACTORY); try { Class.forName(CONTAINER_FACTORY); } catch (ClassNotFoundException e) { throw new RuntimeException("You are missing a test runtime dependency for " + "jersey-test-framework-provider-grizzly2"); } this.testNodes = testNodes; } /** * @return The base path of the resource, e.g. /node/v1 */ public abstract String getResourceBasePath(); /** * This will be called during initNodes() and provides a means for the sub-class * to provide a fully mocked out implementation of the rest resource. The arguments * are provided so you can have mocks tailored to the node. */ public abstract R getRestResource(final TestNode node, final List<TestNode> allNodes, final Map<String, String> baseEndPointUrls); private String getFullResourcePath() { return ResourcePaths.buildAuthenticatedApiPath(getResourceBasePath()); } public String getBaseEndPointUrl(final TestNode node) { return "http://localhost:" + node.getPort(); } private Map<String, String> getBaseEndPointUrls() { return testNodes.stream() .collect(Collectors.toMap( TestNode::getNodeName, this::getBaseEndPointUrl)); } public void stopNodes() { nodeToJerseyTestMap.values().forEach(jerseyTest -> { try { jerseyTest.tearDown(); } catch (Exception e) { throw new RuntimeException(e); } }); } @AfterEach void afterEach() { stopNodes(); nodeToJerseyTestMap.clear(); nodeToListenerMap.clear(); } /** * Initialises all nodes the getTestNodes() * Calls getRestResource() for each node initialised. */ protected void initNodes() { initNodes(Integer.MAX_VALUE); } /** * Initialises the fist node in getTestNodes() * For use when your test does not require the calling out to other nodes. * Calls getRestResource() for the node being initialised. */ protected void initSingleNode() { initNodes(1); } private void initNodes(final int maxNodeCount) { testNodes.stream() .limit(maxNodeCount) .forEach(node -> { final String baseEndPointUrl = getBaseEndPointUrl(node); RequestListener requestListener = new RequestListener(node); nodeToListenerMap.put(node.getNodeName(), requestListener); final JerseyTest jerseyTest = new JerseyTestBuilder<>( () -> getRestResource(node, testNodes, getBaseEndPointUrls()), node.getPort(), requestListener) .build(); nodeToJerseyTestMap.put(node.getNodeName(), jerseyTest); try { if (node.isEnabled) { LOGGER.info("Starting node [{}] (enabled: {}) at {}", node.getNodeName(), node.isEnabled, baseEndPointUrl); jerseyTest.setUp(); } } catch (Exception e) { throw new RuntimeException("Error starting jersey test on " + baseEndPointUrl, e); } }); } /** * Override if you want to use more nodes or different ports */ public List<TestNode> getTestNodes() { return testNodes; } public List<RequestEvent> getRequestEvents(final String nodeName) { return nodeToListenerMap.get(nodeName).getRequestLog(); } /** * @return The JerseyTest instance for the first node */ public JerseyTest getJerseyTest() { return nodeToJerseyTestMap.get(testNodes.get(0).getNodeName()); } /** * @return The JerseyTest instance for the first node */ public JerseyTest getJerseyTest(final String nodeName) { return nodeToJerseyTestMap.get(nodeName); } public static WebTargetFactory webTargetFactory() { return WEB_TARGET_FACTORY; } public <T_RESP> T_RESP doGetTest(final String subPath, final Class<T_RESP> responseType, final T_RESP expectedResponse, final Function<WebTarget, WebTarget>... builderMethods) { LOGGER.info("Calling GET on {}{}, expecting {}", getResourceBasePath(), subPath, expectedResponse); return doTest(Invocation.Builder::get, subPath, responseType, expectedResponse, builderMethods); } public <T_REQ, T_RESP> T_RESP doPostTest(final String subPath, final T_REQ requestEntity, final Class<T_RESP> responseType, final T_RESP expectedResponse, final Function<WebTarget, WebTarget>... builderMethods) { LOGGER.info("Calling POST on {}{}, expecting {}", getResourceBasePath(), subPath, expectedResponse); return doTest(builder -> builder.post(Entity.json(requestEntity)), subPath, responseType, expectedResponse, builderMethods); } public <T_REQ> void doPostTest(final String subPath, final T_REQ requestEntity, final Function<WebTarget, WebTarget>... builderMethods) { LOGGER.info("Calling POST on {}{}, passing {}", getResourceBasePath(), subPath, requestEntity); WebTarget webTarget = getJerseyTest() .target(getResourceBasePath()) .path(subPath); for (Function<WebTarget, WebTarget> method : builderMethods) { webTarget = method.apply(webTarget); } Invocation.Builder builder = webTarget .request(); Response response = builder.post(Entity.json(requestEntity)); if (!isSuccessful(response.getStatus())) { throw new RuntimeException(LogUtil.message("Error: {} {}", response.getStatus(), response)); } } public <T_REQ, T_RESP> T_RESP doPutTest(final String subPath, final T_REQ requestEntity, final Class<T_RESP> responseType, final T_RESP expectedResponse, final Function<WebTarget, WebTarget>... builderMethods) { LOGGER.info("Calling PUT on {}{}, expecting {}", getResourceBasePath(), subPath, expectedResponse); return doTest(builder -> builder.put(Entity.json(requestEntity)), subPath, responseType, expectedResponse, builderMethods); } public <T_REQ> void doPutTest(final String subPath, final T_REQ requestEntity, final Function<WebTarget, WebTarget>... builderMethods) { LOGGER.info("Calling PUT on {}{}, passing {}", getResourceBasePath(), subPath, requestEntity); WebTarget webTarget = getJerseyTest() .target(getResourceBasePath()) .path(subPath); for (Function<WebTarget, WebTarget> method : builderMethods) { webTarget = method.apply(webTarget); } Invocation.Builder builder = webTarget .request(); Response response = builder.put(Entity.json(requestEntity)); if (!isSuccessful(response.getStatus())) { throw new RuntimeException(LogUtil.message("Error: {} {}", response.getStatus(), response)); } } public <T_RESP> T_RESP doDeleteTest(final String subPath, final Class<T_RESP> responseType, final T_RESP expectedResponse, final Function<WebTarget, WebTarget>... builderMethods) { LOGGER.info("Calling DELETE on {}{}, expecting {}", getResourceBasePath(), subPath, expectedResponse); return doTest(Invocation.Builder::delete, subPath, responseType, expectedResponse, builderMethods); } private <T_RESP> T_RESP doTest(final Function<Invocation.Builder, Response> operation, final String subPath, final Class<T_RESP> responseType, final T_RESP expectedResponse, final Function<WebTarget, WebTarget>... builderMethods) { WebTarget webTarget = getJerseyTest() .target(getResourceBasePath()) .path(subPath); for (Function<WebTarget, WebTarget> method : builderMethods) { webTarget = method.apply(webTarget); } final Invocation.Builder builder = webTarget .request(); final Response response = operation.apply(builder); if (!isSuccessful(response.getStatus())) { throw new RuntimeException(LogUtil.message("Error: {} {}", response.getStatus(), response)); } final T_RESP entity = response.readEntity(responseType); if (expectedResponse != null) { Assertions.assertThat(entity) .isEqualTo(expectedResponse); } return entity; } private <T_REQ, T_RESP> T_RESP doTest(final Function<Invocation.Builder, Response> operation, final String subPath, final T_REQ requestEntity, final Class<T_RESP> responseType, final T_RESP expectedResponse, final Function<WebTarget, WebTarget>... builderMethods) { LOGGER.info("Calling GET on {}{}, expecting {}", getResourceBasePath(), subPath, expectedResponse); WebTarget webTarget = getJerseyTest() .target(getResourceBasePath()) .path(subPath); for (Function<WebTarget, WebTarget> method : builderMethods) { webTarget = method.apply(webTarget); } Invocation.Builder builder = webTarget .request(); final Response response = operation.apply(builder); final T_RESP entity = response.readEntity(responseType); if (expectedResponse != null) { Assertions.assertThat(response) .isEqualTo(expectedResponse); } return entity; } public WebTarget getWebTarget(final String subPath) { return getJerseyTest() .target(getFullResourcePath()) .path(subPath); } public static <T> T createNamedMock(final Class<T> clazz, final TestNode node) { return Mockito.mock(clazz, clazz.getName() + "_" + node.getNodeName()); } private boolean isSuccessful(final int statusCode) { return statusCode >= 200 && statusCode < 300; } private static class JerseyTestBuilder<R extends RestResource> { private final Supplier<R> resourceSupplier; private final int port; private final ApplicationEventListener listener; public JerseyTestBuilder(final Supplier<R> resourceSupplier, final int port, final ApplicationEventListener listener) { this.resourceSupplier = resourceSupplier; this.port = port; this.listener = listener; } public JerseyTest build() { return new JerseyTest() { @Override protected TestContainerFactory getTestContainerFactory() throws TestContainerException { return super.getTestContainerFactory(); } @Override protected Application configure() { final LoggingFeature loggingFeature = new LoggingFeature( java.util.logging.Logger.getLogger(LoggingFeature.DEFAULT_LOGGER_NAME), Level.INFO, LoggingFeature.Verbosity.PAYLOAD_ANY, LoggingFeature.DEFAULT_MAX_ENTITY_SIZE); return new ResourceConfig() .register(resourceSupplier.get()) .register(listener) .register(new MyExceptionMapper()) // So we can get details of server side exceptions .register(loggingFeature); } @Override protected URI getBaseUri() { return UriBuilder .fromUri("http://localhost") .port(port) .path(ResourcePaths.API_ROOT_PATH) .build(); } }; } } public static class TestNode { private final String nodeName; private final int port; private final boolean isEnabled; public TestNode(final String nodeName, final int port, final boolean isEnabled) { this.nodeName = nodeName; this.port = port; this.isEnabled = isEnabled; } public String getNodeName() { return nodeName; } public int getPort() { return port; } public boolean isEnabled() { return isEnabled; } @SuppressWarnings("checkstyle:needbraces") @Override public boolean equals(final Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } final TestNode testNode = (TestNode) o; return port == testNode.port && isEnabled == testNode.isEnabled && Objects.equals(nodeName, testNode.nodeName); } @Override public int hashCode() { return Objects.hash(nodeName, port, isEnabled); } @Override public String toString() { return "TestNode{" + "nodeName='" + nodeName + '\'' + ", port=" + port + ", isEnabled=" + isEnabled + '}'; } } public static class RequestListener implements ApplicationEventListener { private final List<RequestEvent> requestLog = new ArrayList<>(); private final TestNode node; RequestListener(final TestNode node) { this.node = node; } @Override public void onEvent(final ApplicationEvent event) { LOGGER.debug("ApplicationEvent on node {}", node.getNodeName()); } @Override public RequestEventListener onRequest(final RequestEvent requestEvent) { LOGGER.debug("{} to {} request received on node {} ", requestEvent.getType(), requestEvent.getUriInfo().getPath(), node.getNodeName()); requestLog.add(requestEvent); return null; } public List<RequestEvent> getRequestLog() { return requestLog; } } private static class MyExceptionMapper implements ExceptionMapper<Throwable> { private static final Logger LOGGER = LoggerFactory.getLogger(MyExceptionMapper.class); @Override public Response toResponse(final Throwable exception) { if (exception instanceof WebApplicationException) { WebApplicationException wae = (WebApplicationException) exception; return wae.getResponse(); } else { return createExceptionResponse(Status.INTERNAL_SERVER_ERROR, exception); } } private Response createExceptionResponse(final Response.Status status, final Throwable throwable) { LOGGER.debug(throwable.getMessage(), throwable); return Response.status(status) .type(MediaType.APPLICATION_JSON_TYPE) .entity(new ErrorMessage(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), throwable.getMessage(), throwable.toString())) .build(); } } }
stroom-test-common/src/main/java/stroom/test/common/util/test/AbstractMultiNodeResourceTest.java
package stroom.test.common.util.test; import stroom.util.jersey.WebTargetFactory; import stroom.util.logging.LogUtil; import stroom.util.shared.ResourcePaths; import stroom.util.shared.RestResource; import org.assertj.core.api.Assertions; import org.glassfish.jersey.client.ClientConfig; import org.glassfish.jersey.logging.LoggingFeature; import org.glassfish.jersey.server.ResourceConfig; import org.glassfish.jersey.server.monitoring.ApplicationEvent; import org.glassfish.jersey.server.monitoring.ApplicationEventListener; import org.glassfish.jersey.server.monitoring.RequestEvent; import org.glassfish.jersey.server.monitoring.RequestEventListener; import org.glassfish.jersey.test.JerseyTest; import org.glassfish.jersey.test.TestProperties; import org.glassfish.jersey.test.spi.TestContainerException; import org.glassfish.jersey.test.spi.TestContainerFactory; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.net.URI; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.function.Function; import java.util.function.Supplier; import java.util.logging.Level; import java.util.stream.Collectors; import javax.ws.rs.client.ClientBuilder; import javax.ws.rs.client.Entity; import javax.ws.rs.client.Invocation; import javax.ws.rs.client.WebTarget; import javax.ws.rs.core.Application; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriBuilder; @ExtendWith(MockitoExtension.class) public abstract class AbstractMultiNodeResourceTest<R extends RestResource> { private static final Logger LOGGER = LoggerFactory.getLogger(AbstractMultiNodeResourceTest.class); private static final WebTargetFactory WEB_TARGET_FACTORY = url -> ClientBuilder.newClient( new ClientConfig().register(LoggingFeature.class)) .target(url); private static final String CONTAINER_FACTORY = "org.glassfish.jersey.test.grizzly.GrizzlyTestContainerFactory"; private final List<TestNode> testNodes; private final Map<String, JerseyTest> nodeToJerseyTestMap = new HashMap<>(); private final Map<String, RequestListener> nodeToListenerMap = new HashMap<>(); public static List<TestNode> createNodeList(final int base) { return List.of( new TestNode("node1", base, true), new TestNode("node2", base + 1, true), new TestNode("node3", base + 2, false)); } /** * Uses the supplied nodes for testing. */ @SuppressWarnings("unused") protected AbstractMultiNodeResourceTest(final List<TestNode> testNodes) { // Force the container factory to ensure the jersey-test-framework-provider-grizzly2 // dependency is in place. Without forcing it, it will just try to use whatever is there // which may be the in memory one which won't work for multi node. // Tried using the jetty container factory but there was as dependency version mismatch // that causes a method not found error. System.setProperty(TestProperties.CONTAINER_FACTORY, CONTAINER_FACTORY); try { Class.forName(CONTAINER_FACTORY); } catch (ClassNotFoundException e) { throw new RuntimeException("You are missing a test runtime dependency for " + "jersey-test-framework-provider-grizzly2"); } this.testNodes = testNodes; } /** * @return The base path of the resource, e.g. /node/v1 */ public abstract String getResourceBasePath(); /** * This will be called during initNodes() and provides a means for the sub-class * to provide a fully mocked out implementation of the rest resource. The arguments * are provided so you can have mocks tailored to the node. */ public abstract R getRestResource(final TestNode node, final List<TestNode> allNodes, final Map<String, String> baseEndPointUrls); private String getFullResourcePath() { return ResourcePaths.buildAuthenticatedApiPath(getResourceBasePath()); } public String getBaseEndPointUrl(final TestNode node) { return "http://localhost:" + node.getPort(); } private Map<String, String> getBaseEndPointUrls() { return testNodes.stream() .collect(Collectors.toMap( TestNode::getNodeName, this::getBaseEndPointUrl)); } public void stopNodes() { nodeToJerseyTestMap.values().forEach(jerseyTest -> { try { jerseyTest.tearDown(); } catch (Exception e) { throw new RuntimeException(e); } }); } @AfterEach void afterEach() { stopNodes(); nodeToJerseyTestMap.clear(); nodeToListenerMap.clear(); } /** * Initialises all nodes the getTestNodes() * Calls getRestResource() for each node initialised. */ protected void initNodes() { initNodes(Integer.MAX_VALUE); } /** * Initialises the fist node in getTestNodes() * For use when your test does not require the calling out to other nodes. * Calls getRestResource() for the node being initialised. */ protected void initSingleNode() { initNodes(1); } private void initNodes(final int maxNodeCount) { testNodes.stream() .limit(maxNodeCount) .forEach(node -> { final String baseEndPointUrl = getBaseEndPointUrl(node); RequestListener requestListener = new RequestListener(node); nodeToListenerMap.put(node.getNodeName(), requestListener); final JerseyTest jerseyTest = new JerseyTestBuilder<>( () -> getRestResource(node, testNodes, getBaseEndPointUrls()), node.getPort(), requestListener) .build(); nodeToJerseyTestMap.put(node.getNodeName(), jerseyTest); try { if (node.isEnabled) { LOGGER.info("Starting node [{}] (enabled: {}) at {}", node.getNodeName(), node.isEnabled, baseEndPointUrl); jerseyTest.setUp(); } } catch (Exception e) { throw new RuntimeException("Error starting jersey test on " + baseEndPointUrl, e); } }); } /** * Override if you want to use more nodes or different ports */ public List<TestNode> getTestNodes() { return testNodes; } public List<RequestEvent> getRequestEvents(final String nodeName) { return nodeToListenerMap.get(nodeName).getRequestLog(); } /** * @return The JerseyTest instance for the first node */ public JerseyTest getJerseyTest() { return nodeToJerseyTestMap.get(testNodes.get(0).getNodeName()); } /** * @return The JerseyTest instance for the first node */ public JerseyTest getJerseyTest(final String nodeName) { return nodeToJerseyTestMap.get(nodeName); } public static WebTargetFactory webTargetFactory() { return WEB_TARGET_FACTORY; } public <T_RESP> T_RESP doGetTest(final String subPath, final Class<T_RESP> responseType, final T_RESP expectedResponse, final Function<WebTarget, WebTarget>... builderMethods) { LOGGER.info("Calling GET on {}{}, expecting {}", getResourceBasePath(), subPath, expectedResponse); return doTest(Invocation.Builder::get, subPath, responseType, expectedResponse, builderMethods); } public <T_REQ, T_RESP> T_RESP doPostTest(final String subPath, final T_REQ requestEntity, final Class<T_RESP> responseType, final T_RESP expectedResponse, final Function<WebTarget, WebTarget>... builderMethods) { LOGGER.info("Calling POST on {}{}, expecting {}", getResourceBasePath(), subPath, expectedResponse); return doTest(builder -> builder.post(Entity.json(requestEntity)), subPath, responseType, expectedResponse, builderMethods); } public <T_REQ> void doPostTest(final String subPath, final T_REQ requestEntity, final Function<WebTarget, WebTarget>... builderMethods) { LOGGER.info("Calling POST on {}{}, passing {}", getResourceBasePath(), subPath, requestEntity); WebTarget webTarget = getJerseyTest() .target(getResourceBasePath()) .path(subPath); for (Function<WebTarget, WebTarget> method : builderMethods) { webTarget = method.apply(webTarget); } Invocation.Builder builder = webTarget .request(); Response response = builder.post(Entity.json(requestEntity)); if (!isSuccessful(response.getStatus())) { throw new RuntimeException(LogUtil.message("Error: {} {}", response.getStatus(), response)); } } public <T_REQ, T_RESP> T_RESP doPutTest(final String subPath, final T_REQ requestEntity, final Class<T_RESP> responseType, final T_RESP expectedResponse, final Function<WebTarget, WebTarget>... builderMethods) { LOGGER.info("Calling PUT on {}{}, expecting {}", getResourceBasePath(), subPath, expectedResponse); return doTest(builder -> builder.put(Entity.json(requestEntity)), subPath, responseType, expectedResponse, builderMethods); } public <T_REQ> void doPutTest(final String subPath, final T_REQ requestEntity, final Function<WebTarget, WebTarget>... builderMethods) { LOGGER.info("Calling PUT on {}{}, passing {}", getResourceBasePath(), subPath, requestEntity); WebTarget webTarget = getJerseyTest() .target(getResourceBasePath()) .path(subPath); for (Function<WebTarget, WebTarget> method : builderMethods) { webTarget = method.apply(webTarget); } Invocation.Builder builder = webTarget .request(); Response response = builder.put(Entity.json(requestEntity)); if (!isSuccessful(response.getStatus())) { throw new RuntimeException(LogUtil.message("Error: {} {}", response.getStatus(), response)); } } public <T_RESP> T_RESP doDeleteTest(final String subPath, final Class<T_RESP> responseType, final T_RESP expectedResponse, final Function<WebTarget, WebTarget>... builderMethods) { LOGGER.info("Calling DELETE on {}{}, expecting {}", getResourceBasePath(), subPath, expectedResponse); return doTest(Invocation.Builder::delete, subPath, responseType, expectedResponse, builderMethods); } private <T_RESP> T_RESP doTest(final Function<Invocation.Builder, Response> operation, final String subPath, final Class<T_RESP> responseType, final T_RESP expectedResponse, final Function<WebTarget, WebTarget>... builderMethods) { WebTarget webTarget = getJerseyTest() .target(getResourceBasePath()) .path(subPath); for (Function<WebTarget, WebTarget> method : builderMethods) { webTarget = method.apply(webTarget); } final Invocation.Builder builder = webTarget .request(); final Response response = operation.apply(builder); if (!isSuccessful(response.getStatus())) { throw new RuntimeException(LogUtil.message("Error: {} {}", response.getStatus(), response)); } final T_RESP entity = response.readEntity(responseType); if (expectedResponse != null) { Assertions.assertThat(entity) .isEqualTo(expectedResponse); } return entity; } private <T_REQ, T_RESP> T_RESP doTest(final Function<Invocation.Builder, Response> operation, final String subPath, final T_REQ requestEntity, final Class<T_RESP> responseType, final T_RESP expectedResponse, final Function<WebTarget, WebTarget>... builderMethods) { LOGGER.info("Calling GET on {}{}, expecting {}", getResourceBasePath(), subPath, expectedResponse); WebTarget webTarget = getJerseyTest() .target(getResourceBasePath()) .path(subPath); for (Function<WebTarget, WebTarget> method : builderMethods) { webTarget = method.apply(webTarget); } Invocation.Builder builder = webTarget .request(); final Response response = operation.apply(builder); final T_RESP entity = response.readEntity(responseType); if (expectedResponse != null) { Assertions.assertThat(response) .isEqualTo(expectedResponse); } return entity; } public WebTarget getWebTarget(final String subPath) { return getJerseyTest() .target(getFullResourcePath()) .path(subPath); } public static <T> T createNamedMock(final Class<T> clazz, final TestNode node) { return Mockito.mock(clazz, clazz.getName() + "_" + node.getNodeName()); } private boolean isSuccessful(final int statusCode) { return statusCode >= 200 && statusCode < 300; } private static class JerseyTestBuilder<R extends RestResource> { private final Supplier<R> resourceSupplier; private final int port; private final ApplicationEventListener listener; public JerseyTestBuilder(final Supplier<R> resourceSupplier, final int port, final ApplicationEventListener listener) { this.resourceSupplier = resourceSupplier; this.port = port; this.listener = listener; } public JerseyTest build() { return new JerseyTest() { @Override protected TestContainerFactory getTestContainerFactory() throws TestContainerException { return super.getTestContainerFactory(); } @Override protected Application configure() { return new ResourceConfig() .register(resourceSupplier.get()) .register(listener) .register( new LoggingFeature( java.util.logging.Logger.getLogger(LoggingFeature.DEFAULT_LOGGER_NAME), Level.INFO, LoggingFeature.Verbosity.PAYLOAD_ANY, LoggingFeature.DEFAULT_MAX_ENTITY_SIZE)); } @Override protected URI getBaseUri() { return UriBuilder .fromUri("http://localhost") .port(port) .path(ResourcePaths.API_ROOT_PATH) .build(); } }; } } public static class TestNode { private final String nodeName; private final int port; private final boolean isEnabled; public TestNode(final String nodeName, final int port, final boolean isEnabled) { this.nodeName = nodeName; this.port = port; this.isEnabled = isEnabled; } public String getNodeName() { return nodeName; } public int getPort() { return port; } public boolean isEnabled() { return isEnabled; } @SuppressWarnings("checkstyle:needbraces") @Override public boolean equals(final Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } final TestNode testNode = (TestNode) o; return port == testNode.port && isEnabled == testNode.isEnabled && Objects.equals(nodeName, testNode.nodeName); } @Override public int hashCode() { return Objects.hash(nodeName, port, isEnabled); } @Override public String toString() { return "TestNode{" + "nodeName='" + nodeName + '\'' + ", port=" + port + ", isEnabled=" + isEnabled + '}'; } } public static class RequestListener implements ApplicationEventListener { private final List<RequestEvent> requestLog = new ArrayList<>(); private final TestNode node; RequestListener(final TestNode node) { this.node = node; } @Override public void onEvent(final ApplicationEvent event) { LOGGER.debug("ApplicationEvent on node {}", node.getNodeName()); } @Override public RequestEventListener onRequest(final RequestEvent requestEvent) { LOGGER.debug("{} to {} request received on node {} ", requestEvent.getType(), requestEvent.getUriInfo().getPath(), node.getNodeName()); requestLog.add(requestEvent); return null; } public List<RequestEvent> getRequestLog() { return requestLog; } } }
Add ex mapper to AbstractMultiNodeResourceTest
stroom-test-common/src/main/java/stroom/test/common/util/test/AbstractMultiNodeResourceTest.java
Add ex mapper to AbstractMultiNodeResourceTest
Java
apache-2.0
6a1fff3585485efca123e7d84d4fa730a4df185d
0
gstevey/gradle,gstevey/gradle,gstevey/gradle,blindpirate/gradle,blindpirate/gradle,gradle/gradle,lsmaira/gradle,lsmaira/gradle,robinverduijn/gradle,gradle/gradle,robinverduijn/gradle,gradle/gradle,blindpirate/gradle,lsmaira/gradle,gstevey/gradle,robinverduijn/gradle,gradle/gradle,blindpirate/gradle,lsmaira/gradle,lsmaira/gradle,robinverduijn/gradle,lsmaira/gradle,blindpirate/gradle,gstevey/gradle,robinverduijn/gradle,gstevey/gradle,robinverduijn/gradle,gradle/gradle,gradle/gradle,blindpirate/gradle,gradle/gradle,robinverduijn/gradle,blindpirate/gradle,lsmaira/gradle,gstevey/gradle,gstevey/gradle,blindpirate/gradle,blindpirate/gradle,gradle/gradle,gradle/gradle,lsmaira/gradle,lsmaira/gradle,robinverduijn/gradle,gstevey/gradle,robinverduijn/gradle,gradle/gradle,robinverduijn/gradle,lsmaira/gradle,blindpirate/gradle,robinverduijn/gradle
/* * Copyright 2011 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.plugins.ide.internal.tooling; import org.gradle.api.Project; import org.gradle.api.Task; import org.gradle.api.internal.project.ProjectTaskLister; import org.gradle.api.internal.tasks.PublicTaskSpecification; import org.gradle.api.internal.tasks.TaskContainerInternal; import org.gradle.tooling.internal.gradle.DefaultGradleProject; import org.gradle.tooling.internal.impl.LaunchableGradleProjectTask; import org.gradle.tooling.internal.impl.LaunchableGradleTask; import org.gradle.tooling.provider.model.ToolingModelBuilder; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; /** * Builds the GradleProject that contains the project hierarchy and task information */ public class GradleProjectBuilder implements ToolingModelBuilder { private final ProjectTaskLister taskLister; public GradleProjectBuilder(ProjectTaskLister taskLister) { this.taskLister = taskLister; } public boolean canBuild(String modelName) { return modelName.equals("org.gradle.tooling.model.GradleProject"); } public Object buildAll(String modelName, Project project) { return buildHierarchy(project.getRootProject()); } public DefaultGradleProject buildAll(Project project) { return buildHierarchy(project.getRootProject()); } private DefaultGradleProject<LaunchableGradleTask> buildHierarchy(Project project) { List<DefaultGradleProject<LaunchableGradleTask>> children = new ArrayList<DefaultGradleProject<LaunchableGradleTask>>(); for (Project child : project.getChildProjects().values()) { children.add(buildHierarchy(child)); } DefaultGradleProject<LaunchableGradleTask> gradleProject = new DefaultGradleProject<LaunchableGradleTask>() .setPath(project.getPath()) .setName(project.getName()) .setDescription(project.getDescription()) .setBuildDirectory(project.getBuildDir()) .setProjectDirectory(project.getProjectDir()) .setChildren(children); gradleProject.getBuildScript().setSourceFile(project.getBuildFile()); gradleProject.setTasks(tasks(gradleProject, (TaskContainerInternal)project.getTasks())); for (DefaultGradleProject child : children) { child.setParent(gradleProject); } return gradleProject; } private static List<LaunchableGradleTask> tasks(DefaultGradleProject owner, TaskContainerInternal tasks) { List<LaunchableGradleTask> out = new LinkedList<LaunchableGradleTask>(); for (String taskName : tasks.getNames()) { Task t = tasks.findByName(taskName); if(t!=null){ out.add(new LaunchableGradleProjectTask() .setProject(owner) .setPath(t.getPath()) .setName(t.getName()) .setDisplayName(t.toString()) .setDescription(t.getDescription()) .setPublic(PublicTaskSpecification.INSTANCE.isSatisfiedBy(t)) ); } } return out; } }
subprojects/ide/src/main/groovy/org/gradle/plugins/ide/internal/tooling/GradleProjectBuilder.java
/* * Copyright 2011 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.plugins.ide.internal.tooling; import org.gradle.api.Project; import org.gradle.api.Task; import org.gradle.api.internal.project.ProjectTaskLister; import org.gradle.api.internal.tasks.PublicTaskSpecification; import org.gradle.tooling.internal.gradle.DefaultGradleProject; import org.gradle.tooling.internal.impl.LaunchableGradleProjectTask; import org.gradle.tooling.internal.impl.LaunchableGradleTask; import org.gradle.tooling.provider.model.ToolingModelBuilder; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; /** * Builds the GradleProject that contains the project hierarchy and task information */ public class GradleProjectBuilder implements ToolingModelBuilder { private final ProjectTaskLister taskLister; public GradleProjectBuilder(ProjectTaskLister taskLister) { this.taskLister = taskLister; } public boolean canBuild(String modelName) { return modelName.equals("org.gradle.tooling.model.GradleProject"); } public Object buildAll(String modelName, Project project) { return buildHierarchy(project.getRootProject()); } public DefaultGradleProject buildAll(Project project) { return buildHierarchy(project.getRootProject()); } private DefaultGradleProject<LaunchableGradleTask> buildHierarchy(Project project) { List<DefaultGradleProject<LaunchableGradleTask>> children = new ArrayList<DefaultGradleProject<LaunchableGradleTask>>(); for (Project child : project.getChildProjects().values()) { children.add(buildHierarchy(child)); } DefaultGradleProject<LaunchableGradleTask> gradleProject = new DefaultGradleProject<LaunchableGradleTask>() .setPath(project.getPath()) .setName(project.getName()) .setDescription(project.getDescription()) .setBuildDirectory(project.getBuildDir()) .setProjectDirectory(project.getProjectDir()) .setChildren(children); gradleProject.getBuildScript().setSourceFile(project.getBuildFile()); gradleProject.setTasks(tasks(gradleProject, project.getTasks())); for (DefaultGradleProject child : children) { child.setParent(gradleProject); } return gradleProject; } private static List<LaunchableGradleTask> tasks(DefaultGradleProject owner, Iterable<Task> tasks) { List<LaunchableGradleTask> out = new LinkedList<LaunchableGradleTask>(); for (String taskName : tasks.getNames()) { Task t = tasks.findByName(taskName); if(t!=null){ out.add(new LaunchableGradleProjectTask() .setProject(owner) .setPath(t.getPath()) .setName(t.getName()) .setDisplayName(t.toString()) .setDescription(t.getDescription()) .setPublic(PublicTaskSpecification.INSTANCE.isSatisfiedBy(t)) ); } } return out; } }
fix merge issues
subprojects/ide/src/main/groovy/org/gradle/plugins/ide/internal/tooling/GradleProjectBuilder.java
fix merge issues
Java
apache-2.0
4e90f8f37995ad6ccfd7fc7eb6fd472d255e9e48
0
sreedishps/pintail,rajubairishetti/pintail,InMobi/pintail,InMobi/pintail,rajubairishetti/pintail,sreedishps/pintail
package com.inmobi.messaging.consumer.examples; import java.io.ByteArrayInputStream; import java.io.DataInputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; import java.util.HashMap; import java.util.Map; import com.inmobi.messaging.ClientConfig; import com.inmobi.messaging.Message; import com.inmobi.messaging.consumer.MessageConsumer; import com.inmobi.messaging.consumer.MessageConsumerFactory; import com.inmobi.messaging.publisher.AbstractMessagePublisher; import com.inmobi.messaging.publisher.MessagePublisherFactory; import com.inmobi.messaging.util.ConsumerUtil; import org.apache.commons.codec.binary.Base64; import org.apache.hadoop.io.Text; public class StreamingBenchmark { static final String DELIMITER = "/t"; static final SimpleDateFormat LogDateFormat = new SimpleDateFormat( "yyyy:MM:dd hh:mm:ss"); static void printUsage() { System.out.println( "Usage: StreamingBenchmark " + " [-producer <topic-name> <no-of-msgs> <sleepMillis-every-msg> ]" + " [-consumer <no-of-producers> <no-of-msgs> [<hadoopconsumerflag>] [<timezone>]]"); System.exit(-1); } public static void main(String[] args) throws Exception { if (args.length < 2) { printUsage(); } long maxSent = -1; int sleepMillis = -1; String timezone = null; String topic = null; int numProducers = 1; boolean runProducer = false; boolean runConsumer = false; boolean hadoopConsumer = false; if (args.length >= 3) { int consumerOptionIndex = -1; if (args[0].equals("-producer")) { topic = args[1]; maxSent = Long.parseLong(args[2]); sleepMillis = Integer.parseInt(args[3]); runProducer = true; consumerOptionIndex = 4; } else { consumerOptionIndex = 0; } if (args.length > consumerOptionIndex) { if (args[consumerOptionIndex].equals("-consumer")) { numProducers = Integer.parseInt(args[consumerOptionIndex + 1]); maxSent = Long.parseLong(args[consumerOptionIndex + 2]); if (args.length > consumerOptionIndex + 3) { hadoopConsumer = (Integer.parseInt(args[consumerOptionIndex + 3]) > 0); } if (args.length > consumerOptionIndex + 4) { timezone = args[consumerOptionIndex + 4]; } runConsumer = true; } } } else { printUsage(); } assert(runProducer || runConsumer == true); Producer producer = null; Consumer consumer = null; StatusLogger statusPrinter; if (runProducer) { System.out.println("Using topic: " + topic); producer = createProducer(topic, maxSent, sleepMillis); producer.start(); } if (runConsumer) { ClientConfig config = ClientConfig.loadFromClasspath( MessageConsumerFactory.MESSAGE_CLIENT_CONF_FILE); Date now; if (timezone != null) { now = ConsumerUtil.getCurrenDateForTimeZone(timezone); } else { now = Calendar.getInstance().getTime(); } System.out.println("Starting from " + now); // create and start consumer assert(config != null); consumer = createConsumer(config, maxSent, now, numProducers, hadoopConsumer); consumer.start(); } statusPrinter = new StatusLogger(producer, consumer); statusPrinter.start(); if (runProducer) { assert (producer != null); producer.join(); if (!runConsumer) { statusPrinter.stopped = true; } } if (runConsumer) { assert (consumer !=null); consumer.join(); statusPrinter.stopped = true; } statusPrinter.join(); if (runConsumer) { if (!consumer.success) { System.out.println("Data validation FAILED!"); } else { System.out.println("Data validation SUCCESS!"); } } System.exit(0); } static Producer createProducer(String topic, long maxSent, int sleepMillis) throws IOException { return new Producer(topic, maxSent, sleepMillis); } static Consumer createConsumer(ClientConfig config, long maxSent, Date startTime, int numProducers, boolean hadoopConsumer) throws IOException { return new Consumer(config, maxSent, startTime, numProducers, hadoopConsumer); } static class Producer extends Thread { volatile AbstractMessagePublisher publisher; String topic; long maxSent; int sleepMillis; Producer(String topic, long maxSent, int sleepMillis) throws IOException { this.topic = topic; this.maxSent = maxSent; this.sleepMillis = sleepMillis; publisher = (AbstractMessagePublisher) MessagePublisherFactory.create(); } @Override public void run() { System.out.println("Producer started!"); for (long i = 1; i <= maxSent; i++) { long time = System.currentTimeMillis(); String s = i + DELIMITER + Long.toString(time); Message msg = new Message(ByteBuffer.wrap(s.getBytes())); publisher.publish(topic, msg); try { Thread.sleep(sleepMillis); } catch (InterruptedException e) { e.printStackTrace(); return; } } // wait for complete while (publisher.getStats().getInFlight() > 0) { try { Thread.sleep(100); } catch (InterruptedException e) { e.printStackTrace(); return; } } try { Thread.sleep(1000); } catch (InterruptedException e) { e.printStackTrace(); return; } publisher.close(); System.out.println("Producer closed"); } } static class Consumer extends Thread { //volatile Set<Long> seqSet = new HashSet<Long>(); final Map<Long, Integer> messageToProducerCount; final MessageConsumer consumer; final long maxSent; volatile long received = 0; volatile long totalLatency = 0; int numProducers; boolean success = false; boolean hadoopConsumer = false; Consumer(ClientConfig config, long maxSent, Date startTime, int numProducers, boolean hadoopConsumer) throws IOException { this.maxSent = maxSent; messageToProducerCount = new HashMap<Long, Integer>((int)maxSent); this.numProducers = numProducers; consumer = MessageConsumerFactory.create(config, startTime); this.hadoopConsumer = hadoopConsumer; } private String getMessage(Message msg) throws IOException { byte[] byteArray = msg.getData().array(); if (!hadoopConsumer) { return new String(byteArray); } else { Text text = new Text(); ByteArrayInputStream bais = new ByteArrayInputStream(byteArray); text.readFields(new DataInputStream(bais)); return new String(Base64.decodeBase64(text.getBytes())); } } @Override public void run() { System.out.println("Consumer started!"); while (true) { Message msg = null; try { msg = consumer.next(); received++; String s = getMessage(msg); String[] ar = s.split(DELIMITER); Long seq = Long.parseLong(ar[0]); Integer pcount = messageToProducerCount.get(seq); if (pcount == null) { messageToProducerCount.put(seq, new Integer(1)); } else { pcount++; messageToProducerCount.put(seq, pcount); } long sentTime = Long.parseLong(ar[1]); totalLatency += System.currentTimeMillis() - sentTime; if (received == maxSent * numProducers) { break; } } catch (InterruptedException e) { e.printStackTrace(); return; } catch (Exception e) { System.out.println("Got exception for " + new String(msg.getData().array())); e.printStackTrace(); } } if (messageToProducerCount.size() == maxSent) { for (Integer pcount : messageToProducerCount.values()) { if (pcount != numProducers) { success = false; break; } else { success = true; } } } else { success = false; } consumer.close(); System.out.println("Consumer closed"); } } static class StatusLogger extends Thread { volatile boolean stopped; Producer producer; Consumer consumer; StatusLogger(Producer producer, Consumer consumer) { this.producer = producer; this.consumer = consumer; } @Override public void run() { while(!stopped) { try { Thread.sleep(5000); } catch (InterruptedException e) { e.printStackTrace(); return; } StringBuffer sb = new StringBuffer(); sb.append(LogDateFormat.format(System.currentTimeMillis())); if (producer != null) { constructProducerString(sb); } if (consumer != null) { constructConsumerString(sb); } System.out.println(sb.toString()); } } void constructProducerString(StringBuffer sb) { sb.append(" Invocations:" + producer.publisher.getStats(). getInvocationCount()); sb.append(" Inflight:" + producer.publisher.getStats().getInFlight()); sb.append(" SentSuccess:" + producer.publisher.getStats(). getSuccessCount()); sb.append(" UnhandledExceptions:" + producer.publisher.getStats(). getUnhandledExceptionCount()); } void constructConsumerString(StringBuffer sb) { sb.append(" Received:" + consumer.received); sb.append(" UniqueReceived:"); sb.append(consumer.messageToProducerCount.size()); if (consumer.received != 0) { sb.append(" MeanLatency(ms):" + (consumer.totalLatency / consumer.received)); } } } }
messaging-client-core/src/main/java/com/inmobi/messaging/consumer/examples/StreamingBenchmark.java
package com.inmobi.messaging.consumer.examples; import java.io.ByteArrayInputStream; import java.io.DataInputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; import java.util.HashMap; import java.util.Map; import com.inmobi.messaging.ClientConfig; import com.inmobi.messaging.Message; import com.inmobi.messaging.consumer.MessageConsumer; import com.inmobi.messaging.consumer.MessageConsumerFactory; import com.inmobi.messaging.publisher.AbstractMessagePublisher; import com.inmobi.messaging.publisher.MessagePublisherFactory; import com.inmobi.messaging.util.ConsumerUtil; import org.apache.commons.codec.binary.Base64; import org.apache.hadoop.io.Text; public class StreamingBenchmark { static final String DELIMITER = "/t"; static final SimpleDateFormat LogDateFormat = new SimpleDateFormat( "yyyy:MM:dd hh:mm:ss"); static void printUsage() { System.out.println( "Usage: StreamingBenchmark " + " [-producer <topic-name> <no-of-msgs> <sleepMillis-every-msg> ]" + " [-consumer <no-of-producers> <no-of-msgs> [<hadoopconsumerflag>] [<timezone>]]"); System.exit(-1); } public static void main(String[] args) throws Exception { if (args.length < 2) { printUsage(); } long maxSent = -1; int sleepMillis = -1; String timezone = null; String topic = null; int numProducers = 1; boolean runProducer = false; boolean runConsumer = false; boolean hadoopConsumer = false; if (args.length >= 3) { int consumerOptionIndex = -1; if (args[0].equals("-producer")) { topic = args[1]; maxSent = Long.parseLong(args[2]); sleepMillis = Integer.parseInt(args[3]); runProducer = true; consumerOptionIndex = 4; } else { consumerOptionIndex = 0; } if (args.length > consumerOptionIndex) { if (args[consumerOptionIndex].equals("-consumer")) { numProducers = Integer.parseInt(args[consumerOptionIndex + 1]); maxSent = Long.parseLong(args[consumerOptionIndex + 2]); if (args.length > consumerOptionIndex + 3) { hadoopConsumer = (Integer.parseInt(args[consumerOptionIndex + 3]) > 0); } if (args.length > consumerOptionIndex + 4) { timezone = args[consumerOptionIndex + 4]; } runConsumer = true; } } } else { printUsage(); } assert(runProducer || runConsumer == true); Producer producer = null; Consumer consumer = null; StatusLogger statusPrinter; if (runProducer) { System.out.println("Using topic: " + topic); producer = createProducer(topic, maxSent, sleepMillis); producer.start(); } if (runConsumer) { ClientConfig config = ClientConfig.loadFromClasspath( MessageConsumerFactory.MESSAGE_CLIENT_CONF_FILE); Date now; if (timezone != null) { now = ConsumerUtil.getCurrenDateForTimeZone(timezone); } else { now = Calendar.getInstance().getTime(); } System.out.println("Starting from " + now); // create and start consumer assert(config != null); consumer = createConsumer(config, maxSent, now, numProducers, hadoopConsumer); consumer.start(); } statusPrinter = new StatusLogger(producer, consumer); statusPrinter.start(); if (runProducer) { assert (producer != null); producer.join(); if (!runConsumer) { statusPrinter.stopped = true; } } if (runConsumer) { assert (consumer !=null); consumer.join(); statusPrinter.stopped = true; } statusPrinter.join(); if (runConsumer) { if (!consumer.success) { System.out.println("Data validation FAILED!"); } else { System.out.println("Data validation SUCCESS!"); } } System.exit(0); } static Producer createProducer(String topic, long maxSent, int sleepMillis) throws IOException { return new Producer(topic, maxSent, sleepMillis); } static Consumer createConsumer(ClientConfig config, long maxSent, Date startTime, int numProducers, boolean hadoopConsumer) throws IOException { return new Consumer(config, maxSent, startTime, numProducers, hadoopConsumer); } static class Producer extends Thread { volatile AbstractMessagePublisher publisher; String topic; long maxSent; int sleepMillis; Producer(String topic, long maxSent, int sleepMillis) throws IOException { this.topic = topic; this.maxSent = maxSent; this.sleepMillis = sleepMillis; publisher = (AbstractMessagePublisher) MessagePublisherFactory.create(); } @Override public void run() { System.out.println("Producer started!"); for (long i = 1; i <= maxSent; i++) { long time = System.currentTimeMillis(); String s = i + DELIMITER + Long.toString(time); Message msg = new Message(ByteBuffer.wrap(s.getBytes())); publisher.publish(topic, msg); try { Thread.sleep(sleepMillis); } catch (InterruptedException e) { e.printStackTrace(); return; } } // wait for complete while (publisher.getStats().getInFlight() >= 0) { try { Thread.sleep(100); } catch (InterruptedException e) { e.printStackTrace(); return; } } try { Thread.sleep(1000); } catch (InterruptedException e) { e.printStackTrace(); return; } publisher.close(); System.out.println("Producer closed"); } } static class Consumer extends Thread { //volatile Set<Long> seqSet = new HashSet<Long>(); final Map<Long, Integer> messageToProducerCount; final MessageConsumer consumer; final long maxSent; volatile long received = 0; volatile long totalLatency = 0; int numProducers; boolean success = false; boolean hadoopConsumer = false; Consumer(ClientConfig config, long maxSent, Date startTime, int numProducers, boolean hadoopConsumer) throws IOException { this.maxSent = maxSent; messageToProducerCount = new HashMap<Long, Integer>((int)maxSent); this.numProducers = numProducers; consumer = MessageConsumerFactory.create(config, startTime); this.hadoopConsumer = hadoopConsumer; } private String getMessage(Message msg) throws IOException { byte[] byteArray = msg.getData().array(); if (!hadoopConsumer) { return new String(byteArray); } else { Text text = new Text(); ByteArrayInputStream bais = new ByteArrayInputStream(byteArray); text.readFields(new DataInputStream(bais)); return new String(Base64.decodeBase64(text.getBytes())); } } @Override public void run() { System.out.println("Consumer started!"); while (true) { Message msg = null; try { msg = consumer.next(); received++; String s = getMessage(msg); String[] ar = s.split(DELIMITER); Long seq = Long.parseLong(ar[0]); Integer pcount = messageToProducerCount.get(seq); if (pcount == null) { messageToProducerCount.put(seq, new Integer(1)); } else { pcount++; messageToProducerCount.put(seq, pcount); } long sentTime = Long.parseLong(ar[1]); totalLatency += System.currentTimeMillis() - sentTime; if (received == maxSent * numProducers) { break; } } catch (InterruptedException e) { e.printStackTrace(); return; } catch (Exception e) { System.out.println("Got exception for " + new String(msg.getData().array())); e.printStackTrace(); } } if (messageToProducerCount.size() == maxSent) { for (Integer pcount : messageToProducerCount.values()) { if (pcount != numProducers) { success = false; break; } else { success = true; } } } else { success = false; } consumer.close(); System.out.println("Consumer closed"); } } static class StatusLogger extends Thread { volatile boolean stopped; Producer producer; Consumer consumer; StatusLogger(Producer producer, Consumer consumer) { this.producer = producer; this.consumer = consumer; } @Override public void run() { while(!stopped) { try { Thread.sleep(5000); } catch (InterruptedException e) { e.printStackTrace(); return; } StringBuffer sb = new StringBuffer(); sb.append(LogDateFormat.format(System.currentTimeMillis())); if (producer != null) { constructProducerString(sb); } if (consumer != null) { constructConsumerString(sb); } System.out.println(sb.toString()); } } void constructProducerString(StringBuffer sb) { sb.append(" Invocations:" + producer.publisher.getStats(). getInvocationCount()); sb.append(" Inflight:" + producer.publisher.getStats().getInFlight()); sb.append(" SentSuccess:" + producer.publisher.getStats(). getSuccessCount()); sb.append(" UnhandledExceptions:" + producer.publisher.getStats(). getUnhandledExceptionCount()); } void constructConsumerString(StringBuffer sb) { sb.append(" Received:" + consumer.received); sb.append(" UniqueReceived:"); sb.append(consumer.messageToProducerCount.size()); if (consumer.received != 0) { sb.append(" MeanLatency(ms):" + (consumer.totalLatency / consumer.received)); } } } }
Producer should wait for positive inflight messages, not zero.
messaging-client-core/src/main/java/com/inmobi/messaging/consumer/examples/StreamingBenchmark.java
Producer should wait for positive inflight messages, not zero.
Java
apache-2.0
9bf615cdff9dd84aa165af71d70a91367a930cdd
0
schrodingercat/emptytalk_projects
package et.naruto.base; import java.io.PrintWriter; import java.io.StringWriter; import java.nio.file.Paths; import java.text.SimpleDateFormat; import java.util.Date; import java.util.List; import java.util.TreeSet; import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.ZooDefs.Ids; import org.apache.zookeeper.ZooKeeper; public class Util { public static enum UNIQ { VALUE; } public static enum DIAG { Log; public _Diag d=new _Diag(); public _Diag _=d; public _Diag __=d; public _Diag ___=d; public _Diag ____=d; public _Diag _____=d; public _Diag ______=d; public _Diag _______=d; public _Diag ________=d; public _Diag _________=d; public _Diag __________=d; public _Diag ___________=d; public _Diag ____________=d; public _Diag _____________=d; public _Diag ______________=d; public _Diag _______________=d; public _Diag ________________=d; public _Diag _________________=d; public _Diag __________________=d; public _Diag ___________________=d; public _Diag ____________________=d; public _Diag _____________________=d; public _Diag ______________________=d; public _Diag _______________________=d; public _Diag ________________________=d; public _Diag _________________________=d; public _Diag __________________________=d; public _Diag ___________________________=d; public _Diag ____________________________=d; public _Diag _____________________________=d; public _Diag ______________________________=d; public _Diag _______________________________=d; public _Diag ________________________________=d; public _Diag _________________________________=d; public _Diag __________________________________=d; public _Diag ___________________________________=d; public _Diag ____________________________________=d; public _Diag _____________________________________=d; public _Diag ______________________________________=d; public _Diag _______________________________________=d; public _Diag ________________________________________=d; public _Diag _________________________________________=d; public _Diag __________________________________________=d; public _Diag ___________________________________________=d; public _Diag ____________________________________________=d; public _Diag _____________________________________________=d; public _Diag ______________________________________________=d; public _Diag _______________________________________________=d; public _Diag ________________________________________________=d; public _Diag _________________________________________________=d; public _Diag __________________________________________________=d; public _Diag ___________________________________________________=d; public _Diag ____________________________________________________=d; public _Diag _____________________________________________________=d; public _Diag ______________________________________________________=d; public _Diag _______________________________________________________=d; public _Diag ________________________________________________________=d; public _Diag _________________________________________________________=d; public _Diag __________________________________________________________=d; public _Diag ___________________________________________________________=d; public _Diag ____________________________________________________________=d; public _Diag _____________________________________________________________=d; public _Diag ______________________________________________________________=d; public _Diag _______________________________________________________________=d; public _Diag ________________________________________________________________=d; /*public _Diag d=new _Diag() { private static final Logger l=Logger.getLogger(DIAG.class); protected void DoInfo(String msg){ l.info(msg); }; protected void DoError(String msg){ l.error(msg); }; protected void DoDebug(String msg){ l.debug(msg); }; }*/ } public static class _Diag { public static enum LEVEL { INFO("[INFO] "), ERROR("[ERROR]"), DEBUG("[DEBUG]"); private final String tag; private LEVEL(String tag) { this.tag=tag; } public String toString() { return this.tag; } } public static final String version=""; public static final String class_name=_Diag.class.getName(); private static final SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); protected void DoOutput(LEVEL level,String msg){ System.out.println(String.format("%s[%s] %s",level.toString(),df.format(new Date()),msg)); }; public static String GetParentStackClass() { if(true) { StackTraceElement[] ss=Thread.currentThread().getStackTrace(); for(int i=0;i<ss.length;i++) { if(i>0) { String classname=ss[i].getClassName(); if(classname.indexOf(class_name)==-1) { return classname+":"+ss[i].getMethodName(); } } } return "Unknow.ClassName"; } else { return ""; } } private static String PassMsg(String msg) { return String.format( "%s[VER=%s][TID=%s][CLS=%s]", msg.replaceAll("\n","<[n]>"), version, Thread.currentThread().getId(), GetParentStackClass() ); } private static String throwable_string(Throwable e) { StringWriter sw=new StringWriter(); e.printStackTrace(new PrintWriter(sw,true)); return sw.toString(); } public void out(LEVEL level,String msg) { DoOutput(level,PassMsg(msg)); } public void info(String msg) { out(LEVEL.INFO,msg); } public void I(String msg,Object... args) { info(String.format(msg,args)); } public void debug(String msg) { out(LEVEL.DEBUG,msg); } public void D(String msg,Object... args) { debug(String.format(msg,args)); } public void error(String msg) { out(LEVEL.ERROR,msg); } public void E(String msg,Object... args) { error(String.format(msg,args)); } public RuntimeException dig_error(String msg,Throwable e) { RuntimeException re = new RuntimeException(e); if (e.getCause() != null) { re.setStackTrace(e.getCause().getStackTrace()); } else { re.setStackTrace(e.getStackTrace()); } out(LEVEL.ERROR,msg+throwable_string(re)); return re; } public void pass_error(String msg,Throwable e) { RuntimeException re = dig_error(msg,e); throw re; } public void Assert(boolean checked, String info) { if (!checked) { try { throw new RuntimeException(PassMsg("Assert for: " + info)); } catch (Exception e) { pass_error("",e); } } } public void Error() { Assert(false, ""); } public void Error(String msg,Object... args) { Assert(false,String.format(msg,args)); } } public static void Sleep(long t) { try { Thread.sleep(t); } catch (Exception e) { DIAG.Log.d.pass_error("",e); } } public static class ZKArgs { public final String connectString; public final long sessionTimeout=10*1000; public ZKArgs(final String connectString) { this.connectString=connectString; } public ZooKeeper Create() { try { return new ZooKeeper("localhost:2181",10*1000,null); } catch (Exception e) { DIAG.Log.d.pass_error("",e); return null; } } } public static void ForceDeleteNode(ZooKeeper zk,String path) { try { List<String> childs=zk.getChildren(path,false); for(String child:childs) { ForceDeleteNode(zk,path+"/"+child); } zk.delete(path,-1); } catch (Exception e) { DIAG.Log.d.dig_error("",e); } } public static void ForceCreateNode(ZooKeeper zk,String path,String data) { ForceCreateNode(zk,path,data,false); } public static String GetNodeData(ZooKeeper zk,String path) { try { return new String(zk.getData(path,false,null),"UTF-8"); } catch (Exception e) { DIAG.Log.d.dig_error("",e); return null; } } public static TreeSet<String> GetNodeChilds(ZooKeeper zk,String path) { try { return new TreeSet(zk.getChildren(path,false)); } catch (Exception e) { DIAG.Log.d.dig_error("",e); return null; } } public static void ForceCreateNode(ZooKeeper zk,String path,String data,boolean persist) { try { zk.create(path,data.getBytes(),Ids.OPEN_ACL_UNSAFE,persist?CreateMode.PERSISTENT:CreateMode.EPHEMERAL); } catch (Exception e) { DIAG.Log.d.dig_error("",e); } } public static String Long2String(final long value) { if(value>=0) { return String.format("%08x",value); } else { return String.format("-%08x",-value); } } public static long String2Long(final String value) { return Long.valueOf(value,16); } public static String Bytes2String(final byte[] data) { try { return new String(data,"UTF-8"); } catch (Exception e) { DIAG.Log.d.pass_error("",e); return null; } } public static String GetPathName(String path) { return Paths.get(path).getFileName().toString(); } }
Naruto/src/main/java/et/naruto/base/Util.java
package et.naruto.base; import java.io.PrintWriter; import java.io.StringWriter; import java.nio.file.Paths; import java.text.SimpleDateFormat; import java.util.Date; import java.util.List; import java.util.TreeSet; import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.ZooDefs.Ids; import org.apache.zookeeper.ZooKeeper; public class Util { public static enum UNIQ { VALUE; } public static enum DIAG { Log; public Diag d=new Diag(); public Diag _=d; public Diag __=d; public Diag ___=d; public Diag ____=d; public Diag _____=d; public Diag ______=d; public Diag _______=d; public Diag ________=d; public Diag _________=d; public Diag __________=d; public Diag ___________=d; public Diag ____________=d; public Diag _____________=d; public Diag ______________=d; public Diag _______________=d; public Diag ________________=d; public Diag _________________=d; public Diag __________________=d; public Diag ___________________=d; public Diag ____________________=d; public Diag _____________________=d; public Diag ______________________=d; public Diag _______________________=d; public Diag ________________________=d; public Diag _________________________=d; public Diag __________________________=d; public Diag ___________________________=d; public Diag ____________________________=d; public Diag _____________________________=d; public Diag ______________________________=d; public Diag _______________________________=d; public Diag ________________________________=d; public Diag _________________________________=d; public Diag __________________________________=d; public Diag ___________________________________=d; public Diag ____________________________________=d; public Diag _____________________________________=d; public Diag ______________________________________=d; public Diag _______________________________________=d; public Diag ________________________________________=d; public Diag _________________________________________=d; public Diag __________________________________________=d; public Diag ___________________________________________=d; public Diag ____________________________________________=d; public Diag _____________________________________________=d; public Diag ______________________________________________=d; public Diag _______________________________________________=d; public Diag ________________________________________________=d; public Diag _________________________________________________=d; public Diag __________________________________________________=d; public Diag ___________________________________________________=d; public Diag ____________________________________________________=d; public Diag _____________________________________________________=d; public Diag ______________________________________________________=d; public Diag _______________________________________________________=d; public Diag ________________________________________________________=d; public Diag _________________________________________________________=d; public Diag __________________________________________________________=d; public Diag ___________________________________________________________=d; public Diag ____________________________________________________________=d; public Diag _____________________________________________________________=d; public Diag ______________________________________________________________=d; public Diag _______________________________________________________________=d; public Diag ________________________________________________________________=d; /*public Diag d=new Diag() { private static final Logger l=Logger.getLogger(DIAG.class); protected void DoInfo(String msg){ l.info(msg); }; protected void DoError(String msg){ l.error(msg); }; protected void DoDebug(String msg){ l.debug(msg); }; }*/ } public static class Diag { public static enum LEVEL { INFO("[INFO] "), ERROR("[ERROR]"), DEBUG("[DEBUG]"); private final String tag; private LEVEL(String tag) { this.tag=tag; } public String toString() { return this.tag; } } public static final String version=""; public static final String class_name=Diag.class.getName(); private static final SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); protected void DoOutput(LEVEL level,String msg){ System.out.println(String.format("%s[%s] %s",level.toString(),df.format(new Date()),msg)); }; public static String GetParentStackClass() { if(true) { StackTraceElement[] ss=Thread.currentThread().getStackTrace(); for(int i=0;i<ss.length;i++) { if(i>0) { String classname=ss[i].getClassName(); if(classname.indexOf(class_name)==-1) { return classname+":"+ss[i].getMethodName(); } } } return "Unknow.ClassName"; } else { return ""; } } private static String PassMsg(String msg) { return String.format( "%s[VER=%s][TID=%s][CLS=%s]", msg.replaceAll("\n","<[n]>"), version, Thread.currentThread().getId(), GetParentStackClass() ); } private static String throwable_string(Throwable e) { StringWriter sw=new StringWriter(); e.printStackTrace(new PrintWriter(sw,true)); return sw.toString(); } public void out(LEVEL level,String msg) { DoOutput(level,PassMsg(msg)); } public void info(String msg) { out(LEVEL.INFO,msg); } public void I(String msg,Object... args) { info(String.format(msg,args)); } public void debug(String msg) { out(LEVEL.DEBUG,msg); } public void D(String msg,Object... args) { debug(String.format(msg,args)); } public void error(String msg) { out(LEVEL.ERROR,msg); } public void E(String msg,Object... args) { error(String.format(msg,args)); } public RuntimeException dig_error(String msg,Throwable e) { RuntimeException re = new RuntimeException(e); if (e.getCause() != null) { re.setStackTrace(e.getCause().getStackTrace()); } else { re.setStackTrace(e.getStackTrace()); } out(LEVEL.ERROR,msg+throwable_string(re)); return re; } public void pass_error(String msg,Throwable e) { RuntimeException re = dig_error(msg,e); throw re; } public void Assert(boolean checked, String info) { if (!checked) { try { throw new RuntimeException(PassMsg("Assert for: " + info)); } catch (Exception e) { pass_error("",e); } } } public void Error() { Assert(false, ""); } public void Error(String msg,Object... args) { Assert(false,String.format(msg,args)); } } public static void Sleep(long t) { try { Thread.sleep(t); } catch (Exception e) { DIAG.Log.d.pass_error("",e); } } public static class ZKArgs { public final String connectString; public final long sessionTimeout=10*1000; public ZKArgs(final String connectString) { this.connectString=connectString; } public ZooKeeper Create() { try { return new ZooKeeper("localhost:2181",10*1000,null); } catch (Exception e) { DIAG.Log.d.pass_error("",e); return null; } } } public static void ForceDeleteNode(ZooKeeper zk,String path) { try { List<String> childs=zk.getChildren(path,false); for(String child:childs) { ForceDeleteNode(zk,path+"/"+child); } zk.delete(path,-1); } catch (Exception e) { DIAG.Log.d.dig_error("",e); } } public static void ForceCreateNode(ZooKeeper zk,String path,String data) { ForceCreateNode(zk,path,data,false); } public static String GetNodeData(ZooKeeper zk,String path) { try { return new String(zk.getData(path,false,null),"UTF-8"); } catch (Exception e) { DIAG.Log.d.dig_error("",e); return null; } } public static TreeSet<String> GetNodeChilds(ZooKeeper zk,String path) { try { return new TreeSet(zk.getChildren(path,false)); } catch (Exception e) { DIAG.Log.d.dig_error("",e); return null; } } public static void ForceCreateNode(ZooKeeper zk,String path,String data,boolean persist) { try { zk.create(path,data.getBytes(),Ids.OPEN_ACL_UNSAFE,persist?CreateMode.PERSISTENT:CreateMode.EPHEMERAL); } catch (Exception e) { DIAG.Log.d.dig_error("",e); } } public static String Long2String(final long value) { if(value>=0) { return String.format("%08x",value); } else { return String.format("-%08x",-value); } } public static long String2Long(final String value) { return Long.valueOf(value,16); } public static String Bytes2String(final byte[] data) { try { return new String(data,"UTF-8"); } catch (Exception e) { DIAG.Log.d.pass_error("",e); return null; } } public static String GetPathName(String path) { return Paths.get(path).getFileName().toString(); } }
fix a bug for windows
Naruto/src/main/java/et/naruto/base/Util.java
fix a bug for windows
Java
apache-2.0
caa99cec9fb313adbc6d8c35f6d9ef69403e699c
0
hbs/warp10-platform,StevenLeRoux/warp10-platform,cityzendata/warp10-platform,hbs/warp10-platform,hbs/warp10-platform,hbs/warp10-platform,StevenLeRoux/warp10-platform,StevenLeRoux/warp10-platform,cityzendata/warp10-platform,StevenLeRoux/warp10-platform,cityzendata/warp10-platform,cityzendata/warp10-platform
// // Copyright 2016 Cityzen Data // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package io.warp10.script.functions; import io.warp10.WarpDist; import io.warp10.continuum.TimeSource; import io.warp10.continuum.Tokens; import io.warp10.continuum.egress.EgressFetchHandler; import io.warp10.continuum.geo.GeoDirectoryClient; import io.warp10.continuum.gts.GTSDecoder; import io.warp10.continuum.gts.GTSHelper; import io.warp10.continuum.gts.GeoTimeSerie; import io.warp10.continuum.gts.GeoTimeSerie.TYPE; import io.warp10.continuum.sensision.SensisionConstants; import io.warp10.continuum.store.Constants; import io.warp10.continuum.store.DirectoryClient; import io.warp10.continuum.store.GTSDecoderIterator; import io.warp10.continuum.store.MetadataIterator; import io.warp10.continuum.store.StoreClient; import io.warp10.continuum.store.thrift.data.MetaSet; import io.warp10.continuum.store.thrift.data.Metadata; import io.warp10.crypto.CryptoUtils; import io.warp10.crypto.KeyStore; import io.warp10.crypto.OrderPreservingBase64; import io.warp10.crypto.SipHashInline; import io.warp10.quasar.token.thrift.data.ReadToken; import io.warp10.script.NamedWarpScriptFunction; import io.warp10.script.WarpScriptStackFunction; import io.warp10.script.WarpScriptException; import io.warp10.script.WarpScriptStack; import io.warp10.sensision.Sensision; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicLong; import java.util.zip.GZIPInputStream; import org.apache.commons.io.output.ByteArrayOutputStream; import org.apache.commons.lang3.JavaVersion; import org.apache.commons.lang3.SystemUtils; import org.apache.commons.lang3.tuple.Pair; import org.apache.thrift.TDeserializer; import org.apache.thrift.TException; import org.apache.thrift.protocol.TCompactProtocol; import org.joda.time.format.DateTimeFormatter; import org.joda.time.format.ISODateTimeFormat; import com.geoxp.GeoXPLib.GeoXPShape; import com.google.common.base.Charsets; /** * Fetch GeoTimeSeries from continuum * FIXME(hbs): we need to retrieve an OAuth token, where do we put it? * * The top of the stack must contain a list of the following parameters * * @param token The OAuth 2.0 token to use for data retrieval * @param classSelector Class selector. * @param labelsSelectors Map of label name to label selector. * @param now Most recent timestamp to consider (in us since the Epoch) * @param timespan Width of time period to consider (in us). Timestamps at or before now - timespan will be ignored. * * The last two parameters can be replaced by String parameters representing the end and start ISO8601 timestamps */ public class FETCH extends NamedWarpScriptFunction implements WarpScriptStackFunction { private static final String PARAM_CLASS = "class"; /** * Extra classes to retrieve after Directory/GeoDirectory have been called */ private static final String PARAM_EXTRA = "extra"; private static final String PARAM_LABELS = "labels"; private static final String PARAM_SELECTOR = "selector"; private static final String PARAM_SELECTORS = "selectors"; private static final String PARAM_SELECTOR_PAIRS = "selpairs"; private static final String PARAM_TOKEN = "token"; private static final String PARAM_END = "end"; private static final String PARAM_START = "start"; private static final String PARAM_COUNT = "count"; private static final String PARAM_TIMESPAN = "timespan"; private static final String PARAM_TYPE = "type"; private static final String PARAM_GEO = "geo"; private static final String PARAM_GEODIR = "geodir"; private static final String PARAM_GEOOP = "geoop"; private static final String PARAM_GEOOP_IN = "in"; private static final String PARAM_GEOOP_OUT = "out"; private static final String PARAM_WRITE_TIMESTAMP = "wtimestamp"; private static final String PARAM_SHOWUUID = "showuuid"; private static final String PARAM_TYPEATTR = "typeattr"; private static final String PARAM_METASET = "metaset"; public static final String POSTFETCH_HOOK = "postfetch"; private DateTimeFormatter fmt = ISODateTimeFormat.dateTimeParser(); private WarpScriptStackFunction listTo = new LISTTO(""); private final boolean fromArchive; private final TYPE forcedType; private final long[] SIPHASH_CLASS; private final long[] SIPHASH_LABELS; private final byte[] AES_METASET; public FETCH(String name, boolean fromArchive, TYPE type) { super(name); this.fromArchive = fromArchive; this.forcedType = type; KeyStore ks = null; try { ks = WarpDist.getKeyStore(); } catch (Throwable t) { // Catch NoClassDefFound } if (null != ks) { this.SIPHASH_CLASS = SipHashInline.getKey(ks.getKey(KeyStore.SIPHASH_CLASS)); this.SIPHASH_LABELS = SipHashInline.getKey(ks.getKey(KeyStore.SIPHASH_LABELS)); this.AES_METASET = ks.getKey(KeyStore.AES_METASETS); } else { this.SIPHASH_CLASS = null; this.SIPHASH_LABELS = null; this.AES_METASET = null; } } @Override public Object apply(WarpScriptStack stack) throws WarpScriptException { // // Extract parameters from the stack // Object top = stack.peek(); // // Handle the new (as of 20150805) parameter passing mechanism as a map // Map<String,Object> params = null; if (top instanceof Map) { stack.pop(); params = paramsFromMap(stack, (Map<String,Object>) top); } if (top instanceof List) { if (5 != ((List) top).size()) { stack.drop(); throw new WarpScriptException(getName() + " expects 5 parameters."); } // // Explode list and remove its size // listTo.apply(stack); stack.drop(); } if (null == params) { params = new HashMap<String, Object>(); // // Extract time span // Object oStop = stack.pop(); Object oStart = stack.pop(); long endts; long timespan; if (oStart instanceof String && oStop instanceof String) { long start; long stop; if (SystemUtils.isJavaVersionAtLeast(JavaVersion.JAVA_1_8)) { start = io.warp10.script.unary.TOTIMESTAMP.parseTimestamp(oStart.toString()); } else { start = fmt.parseDateTime((String) oStart).getMillis() * Constants.TIME_UNITS_PER_MS; } if (SystemUtils.isJavaVersionAtLeast(JavaVersion.JAVA_1_8)) { stop = io.warp10.script.unary.TOTIMESTAMP.parseTimestamp(oStop.toString()); } else { stop = fmt.parseDateTime((String) oStop).getMillis() * Constants.TIME_UNITS_PER_MS; } if (start < stop) { endts = stop; timespan = stop - start; } else { endts = start; timespan = start - stop; } } else if (oStart instanceof Long && oStop instanceof Long) { endts = (long) oStart; timespan = (long) oStop; } else { throw new WarpScriptException("Invalid timespan specification."); } params.put(PARAM_END, endts); if (timespan < 0) { params.put(PARAM_COUNT, -timespan); } else { params.put(PARAM_TIMESPAN, timespan); } // // Extract labels selector // Object oLabelsSelector = stack.pop(); if (!(oLabelsSelector instanceof Map)) { throw new WarpScriptException("Label selectors must be a map."); } Map<String,String> labelSelectors = new HashMap<String,String>((Map<String,String>) oLabelsSelector); params.put(PARAM_LABELS, labelSelectors); // // Extract class selector // Object oClassSelector = stack.pop(); if (!(oClassSelector instanceof String)) { throw new WarpScriptException("Class selector must be a string."); } String classSelector = (String) oClassSelector; params.put(PARAM_CLASS, classSelector); // // Extract token // Object oToken = stack.pop(); if (!(oToken instanceof String)) { throw new WarpScriptException("Token must be a string."); } String token = (String) oToken; params.put(PARAM_TOKEN, token); } StoreClient gtsStore = stack.getStoreClient(); DirectoryClient directoryClient = stack.getDirectoryClient(); GeoTimeSerie base = null; GeoTimeSerie[] bases = null; String typelabel = (String) params.get(PARAM_TYPEATTR); if (null != typelabel) { bases = new GeoTimeSerie[4]; } ReadToken rtoken = Tokens.extractReadToken(params.get(PARAM_TOKEN).toString()); List<String> clsSels = new ArrayList<String>(); List<Map<String,String>> lblsSels = new ArrayList<Map<String,String>>(); MetaSet metaset = null; List<Metadata> metadatas = null; Iterator<Metadata> iter = null; if (params.containsKey(PARAM_METASET)) { metaset = (MetaSet) params.get(PARAM_METASET); iter = metaset.getMetadatas().iterator(); } else { if (params.containsKey(PARAM_SELECTOR_PAIRS)) { for (Pair<Object,Object> pair: (List<Pair<Object,Object>>) params.get(PARAM_SELECTOR_PAIRS)) { clsSels.add(pair.getLeft().toString()); Map<String,String> labelSelectors = (Map<String,String>) pair.getRight(); labelSelectors.putAll(Tokens.labelSelectorsFromReadToken(rtoken)); lblsSels.add((Map<String,String>) labelSelectors); } } else { Map<String,String> labelSelectors = (Map<String,String>) params.get(PARAM_LABELS); labelSelectors.putAll(Tokens.labelSelectorsFromReadToken(rtoken)); clsSels.add(params.get(PARAM_CLASS).toString()); lblsSels.add(labelSelectors); } try { metadatas = directoryClient.find(clsSels, lblsSels); iter = metadatas.iterator(); } catch (IOException ioe) { try { iter = directoryClient.iterator(clsSels, lblsSels); } catch (Exception e) { throw new WarpScriptException(e); } } } metadatas = new ArrayList<Metadata>(); List<GeoTimeSerie> series = new ArrayList<GeoTimeSerie>(); AtomicLong fetched = (AtomicLong) stack.getAttribute(WarpScriptStack.ATTRIBUTE_FETCH_COUNT); long fetchLimit = (long) stack.getAttribute(WarpScriptStack.ATTRIBUTE_FETCH_LIMIT); long gtsLimit = (long) stack.getAttribute(WarpScriptStack.ATTRIBUTE_GTS_LIMIT); AtomicLong gtscount = (AtomicLong) stack.getAttribute(WarpScriptStack.ATTRIBUTE_GTS_COUNT); // Variables to keep track of the last Metadata and fetched count Metadata lastMetadata = null; long lastCount = 0L; try { while(iter.hasNext()) { metadatas.add(iter.next()); if (gtscount.incrementAndGet() > gtsLimit) { throw new WarpScriptException(getName() + " exceeded limit of " + gtsLimit + " Geo Time Series, current count is " + gtscount); } if (metadatas.size() < EgressFetchHandler.FETCH_BATCHSIZE && iter.hasNext()) { continue; } // // Filter the retrieved Metadata according to geo // if (params.containsKey(PARAM_GEO)) { GeoDirectoryClient geoclient = stack.getGeoDirectoryClient(); long end = (long) params.get(PARAM_END); long start = Long.MIN_VALUE; if (params.containsKey(PARAM_TIMESPAN)) { start = end - (long) params.get(PARAM_TIMESPAN); } boolean inside = false; if (PARAM_GEOOP_IN.equals(params.get(PARAM_GEOOP))) { inside = true; } try { metadatas = geoclient.filter((String) params.get(PARAM_GEODIR), metadatas, (GeoXPShape) params.get(PARAM_GEO), inside, start, end); } catch (IOException ioe) { throw new WarpScriptException(ioe); } } // // Generate extra Metadata if PARAM_EXTRA is set // if (params.containsKey(PARAM_EXTRA)) { Set<Metadata> withextra = new HashSet<Metadata>(); withextra.addAll(metadatas); for (Metadata meta: metadatas) { for (String cls: (Set<String>) params.get(PARAM_EXTRA)) { // The following is safe, the constructor allocates new maps Metadata metadata = new Metadata(meta); metadata.setName(cls); metadata.setClassId(GTSHelper.classId(this.SIPHASH_CLASS, cls)); metadata.setLabelsId(GTSHelper.labelsId(this.SIPHASH_LABELS, metadata.getLabels())); withextra.add(metadata); } } metadatas.clear(); metadatas.addAll(withextra); } // // We assume that GTS will be fetched in a continuous way, i.e. without having a GTSDecoder from one // then one from another, then one from the first one. // long timespan = params.containsKey(PARAM_TIMESPAN) ? (long) params.get(PARAM_TIMESPAN) : - ((long) params.get(PARAM_COUNT)); TYPE type = (TYPE) params.get(PARAM_TYPE); if (null != this.forcedType) { if (null != type) { throw new WarpScriptException(getName() + " type of fetched GTS cannot be changed."); } type = this.forcedType; } boolean writeTimestamp = Boolean.TRUE.equals(params.get(PARAM_WRITE_TIMESTAMP)); boolean showUUID = Boolean.TRUE.equals(params.get(PARAM_SHOWUUID)); TYPE lastType = TYPE.UNDEFINED; try (GTSDecoderIterator gtsiter = gtsStore.fetch(rtoken, metadatas, (long) params.get(PARAM_END), timespan, fromArchive, writeTimestamp)) { while(gtsiter.hasNext()) { GTSDecoder decoder = gtsiter.next(); boolean identical = true; if (null == lastMetadata || !lastMetadata.equals(decoder.getMetadata())) { lastMetadata = decoder.getMetadata(); identical = false; lastCount = 0; lastType = TYPE.UNDEFINED; } GeoTimeSerie gts; // // If we should ventilate per type, do so now // if (null != typelabel) { Map<String,String> labels = new HashMap<String,String>(decoder.getMetadata().getLabels()); labels.remove(Constants.PRODUCER_LABEL); labels.remove(Constants.OWNER_LABEL); java.util.UUID uuid = null; if (showUUID) { uuid = new java.util.UUID(decoder.getClassId(), decoder.getLabelsId()); } long count = 0; Metadata decoderMeta = decoder.getMetadata(); while(decoder.next()) { // If we've read enough data, exit if (identical && timespan < 0 && lastCount + count >= -timespan) { break; } count++; long ts = decoder.getTimestamp(); long location = decoder.getLocation(); long elevation = decoder.getElevation(); Object value = decoder.getValue(); int gtsidx = 0; String typename = "DOUBLE"; if (value instanceof Long) { gtsidx = 1; typename = "LONG"; } else if (value instanceof Boolean) { gtsidx = 2; typename = "BOOLEAN"; } else if (value instanceof String) { gtsidx = 3; typename = "STRING"; } base = bases[gtsidx]; if (null == base || !base.getMetadata().getName().equals(decoderMeta.getName()) || !base.getMetadata().getLabels().equals(decoderMeta.getLabels())) { bases[gtsidx] = new GeoTimeSerie(); base = bases[gtsidx]; series.add(base); base.setLabels(decoder.getLabels()); base.getMetadata().putToAttributes(typelabel, typename); base.setName(decoder.getName()); if (null != uuid) { base.getMetadata().putToAttributes(Constants.UUID_ATTRIBUTE, uuid.toString()); } } GTSHelper.setValue(base, ts, location, elevation, value, false); } if (fetched.addAndGet(count) > fetchLimit) { Map<String,String> sensisionLabels = new HashMap<String, String>(); sensisionLabels.put(SensisionConstants.SENSISION_LABEL_CONSUMERID, Tokens.getUUID(rtoken.getBilledId())); Sensision.update(SensisionConstants.SENSISION_CLASS_EINSTEIN_FETCHCOUNT_EXCEEDED, sensisionLabels, 1); throw new WarpScriptException(getName() + " exceeded limit of " + fetchLimit + " datapoints, current count is " + fetched.get()); } lastCount += count; continue; } if (null != type) { gts = decoder.decode(type); } else { // // We need to decode using the same type as the previous decoder for the same GTS // Otherwise, if it happens that the current decoder starts with a value of another // type then the merge will not take into account this decoder as the decoded GTS // will be of a different type. if (identical && lastType != TYPE.UNDEFINED) { gts = decoder.decode(lastType); } else { gts = decoder.decode(); } lastType = gts.getType(); } if (identical && timespan < 0 && lastCount + GTSHelper.nvalues(gts) > -timespan) { // We would add too many datapoints, we will shrink the GTS. // As it it sorted in reverse order of the ticks (since the datapoints are organized // this way in HBase), we just need to shrink the GTS. gts = GTSHelper.shrinkTo(gts, (int) Math.max(-timespan - lastCount, 0)); } lastCount += GTSHelper.nvalues(gts); // // Remove producer/owner labels // // // Add a .uuid attribute if instructed to do so // if (showUUID) { java.util.UUID uuid = new java.util.UUID(gts.getClassId(), gts.getLabelsId()); gts.getMetadata().putToAttributes(Constants.UUID_ATTRIBUTE, uuid.toString()); } Map<String,String> labels = new HashMap<String, String>(); labels.putAll(gts.getMetadata().getLabels()); labels.remove(Constants.PRODUCER_LABEL); labels.remove(Constants.OWNER_LABEL); gts.setLabels(labels); // // If it's the first GTS, take it as is. // if (null == base) { base = gts; } else { // // If name and labels are identical to the previous GTS, merge them // Otherwise add 'base' to the stack and set it to 'gts'. // if (!base.getMetadata().getName().equals(gts.getMetadata().getName()) || !base.getMetadata().getLabels().equals(gts.getMetadata().getLabels())) { series.add(base); base = gts; } else { base = GTSHelper.merge(base, gts); } } if (fetched.addAndGet(gts.size()) > fetchLimit) { Map<String,String> sensisionLabels = new HashMap<String, String>(); sensisionLabels.put(SensisionConstants.SENSISION_LABEL_CONSUMERID, Tokens.getUUID(rtoken.getBilledId())); Sensision.update(SensisionConstants.SENSISION_CLASS_EINSTEIN_FETCHCOUNT_EXCEEDED, sensisionLabels, 1); throw new WarpScriptException(getName() + " exceeded limit of " + fetchLimit + " datapoints, current count is " + fetched.get()); //break; } } } catch (WarpScriptException ee) { throw ee; } catch (Throwable t) { throw new WarpScriptException(t); } // // If there is one current GTS, push it onto the stack (only if not ventilating per type) // if (null != base && null == typelabel) { series.add(base); } // // Reset state // base = null; metadatas.clear(); } } catch (Throwable t) { throw t; } finally { if (iter instanceof MetadataIterator) { try { ((MetadataIterator) iter).close(); } catch (Exception e) { } } } stack.push(series); // // Apply a possible postfetch hook // if (rtoken.getHooksSize() > 0 && rtoken.getHooks().containsKey(POSTFETCH_HOOK)) { stack.execMulti(rtoken.getHooks().get(POSTFETCH_HOOK)); } return stack; } private Map<String,Object> paramsFromMap(WarpScriptStack stack, Map<String,Object> map) throws WarpScriptException { Map<String,Object> params = new HashMap<String, Object>(); // // Handle the case where a MetaSet was passed as this will // modify some other parameters // MetaSet metaset = null; if (map.containsKey(PARAM_METASET)) { if (null == AES_METASET) { throw new WarpScriptException(getName() + " MetaSet support not available."); } Object ms = map.get(PARAM_METASET); if (!(ms instanceof byte[])) { // Decode byte[] decoded = OrderPreservingBase64.decode(ms.toString().getBytes(Charsets.US_ASCII)); // Decrypt byte[] decrypted = CryptoUtils.unwrap(AES_METASET, decoded); // Decompress try { ByteArrayOutputStream out = new ByteArrayOutputStream(decrypted.length); InputStream in = new GZIPInputStream(new ByteArrayInputStream(decrypted)); byte[] buf = new byte[1024]; while(true) { int len = in.read(buf); if (len < 0) { break; } out.write(buf, 0, len); } in.close(); out.close(); ms = out.toByteArray(); } catch (IOException e) { throw new WarpScriptException(getName() + " encountered an invalid MetaSet."); } } metaset = new MetaSet(); TDeserializer deser = new TDeserializer(new TCompactProtocol.Factory()); try { deser.deserialize(metaset, (byte[]) ms); } catch (TException te) { throw new WarpScriptException(getName() + " was unable to decode the provided MetaSet."); } // // Check if MetaSet has expired // if (metaset.getExpiry() < System.currentTimeMillis()) { throw new WarpScriptException(getName() + " MetaSet has expired."); } // Attempt to extract token, this will raise an exception if token has expired or was revoked ReadToken rtoken = Tokens.extractReadToken(metaset.getToken()); params.put(PARAM_METASET, metaset); params.put(PARAM_TOKEN, metaset.getToken()); } if (!params.containsKey(PARAM_TOKEN)) { if (!map.containsKey(PARAM_TOKEN)) { throw new WarpScriptException(getName() + " Missing '" + PARAM_TOKEN + "' parameter"); } params.put(PARAM_TOKEN, map.get(PARAM_TOKEN)); } if (map.containsKey(PARAM_SELECTORS)) { Object sels = map.get(PARAM_SELECTORS); if (!(sels instanceof List)) { throw new WarpScriptException(getName() + " Invalid parameter '" + PARAM_SELECTORS + "'"); } List<Pair<Object, Object>> selectors = new ArrayList<Pair<Object,Object>>(); for (Object sel: (List) sels) { Object[] clslbls = PARSESELECTOR.parse(sel.toString()); selectors.add(Pair.of(clslbls[0], clslbls[1])); } params.put(PARAM_SELECTOR_PAIRS, selectors); } else if (map.containsKey(PARAM_SELECTOR)) { Object[] clslbls = PARSESELECTOR.parse(map.get(PARAM_SELECTOR).toString()); params.put(PARAM_CLASS, clslbls[0]); params.put(PARAM_LABELS, clslbls[1]); } else if (map.containsKey(PARAM_CLASS) && map.containsKey(PARAM_LABELS)) { params.put(PARAM_CLASS, map.get(PARAM_CLASS)); params.put(PARAM_LABELS, new HashMap<String,String>((Map<String,String>) map.get(PARAM_LABELS))); } else if (!params.containsKey(PARAM_METASET)) { throw new WarpScriptException(getName() + " Missing '" + PARAM_SELECTOR + "', '" + PARAM_SELECTORS + "' or '" + PARAM_CLASS + "' and '" + PARAM_LABELS + "' parameters."); } if (!map.containsKey(PARAM_END)) { throw new WarpScriptException(getName() + " Missing '" + PARAM_END + "' parameter."); } if (map.get(PARAM_END) instanceof Long) { params.put(PARAM_END, map.get(PARAM_END)); } else if (map.get(PARAM_END) instanceof String) { if (SystemUtils.isJavaVersionAtLeast(JavaVersion.JAVA_1_8)) { params.put(PARAM_END, io.warp10.script.unary.TOTIMESTAMP.parseTimestamp(map.get(PARAM_END).toString())); } else { params.put(PARAM_END, fmt.parseDateTime(map.get(PARAM_END).toString()).getMillis() * Constants.TIME_UNITS_PER_MS); } } else { throw new WarpScriptException(getName() + " Invalid format for parameter '" + PARAM_END + "'."); } if (map.containsKey(PARAM_TIMESPAN)) { params.put(PARAM_TIMESPAN, (long) map.get(PARAM_TIMESPAN)); } else if (map.containsKey(PARAM_COUNT)) { params.put(PARAM_COUNT, (long) map.get(PARAM_COUNT)); } else if (map.containsKey(PARAM_START)) { long end = (long) params.get(PARAM_END); long start; if (map.get(PARAM_START) instanceof Long) { start = (long) map.get(PARAM_START); } else { if (SystemUtils.isJavaVersionAtLeast(JavaVersion.JAVA_1_8)) { start = io.warp10.script.unary.TOTIMESTAMP.parseTimestamp(map.get(PARAM_START).toString()); } else { start = fmt.parseDateTime(map.get(PARAM_START).toString()).getMillis() * Constants.TIME_UNITS_PER_MS; } } long timespan; if (start < end) { timespan = end - start; } else { timespan = start - end; end = start; } params.put(PARAM_END, end); params.put(PARAM_TIMESPAN, timespan); } else { throw new WarpScriptException(getName() + " Missing parameter '" + PARAM_TIMESPAN + "' or '" + PARAM_COUNT + "' or '" + PARAM_START + "'"); } // // Check end/timespan against MetaSet, adjust limits accordingly // if (null != metaset) { long end = (long) params.get(PARAM_END); long timespan = params.containsKey(PARAM_TIMESPAN) ? (long) params.get(PARAM_TIMESPAN) : -1; long count = params.containsKey(PARAM_COUNT) ? (long) params.get(PARAM_COUNT) : -1; if (metaset.isSetMaxduration()) { // Force 'end' to 'now' params.put(PARAM_END, TimeSource.getTime()); if (-1 != count && metaset.getMaxduration() >= 0) { throw new WarpScriptException(getName() + " MetaSet forbids count based requests."); } if (-1 != timespan && metaset.getMaxduration() <= 0) { throw new WarpScriptException(getName() + " MetaSet forbids duration based requests."); } if (-1 != count && count > -metaset.getMaxduration()) { count = -metaset.getMaxduration(); params.put(PARAM_COUNT, count); } if (-1 != timespan && timespan > metaset.getMaxduration()) { timespan = metaset.getMaxduration(); params.put(PARAM_TIMESPAN, timespan); } } if (metaset.isSetNotbefore()) { // forbid count based requests if (-1 != count) { throw new WarpScriptException(getName() + " MetaSet forbids count based requests."); } if (end < metaset.getNotbefore()) { throw new WarpScriptException(getName() + " MetaSet forbids time ranges before " + metaset.getNotbefore()); } // Adjust timespan so maxDuration is respected if (timespan > metaset.getMaxduration()) { timespan = metaset.getMaxduration(); params.put(PARAM_TIMESPAN, timespan); } } if (metaset.isSetNotafter() && end >= metaset.getNotafter()) { end = metaset.getNotafter(); params.put(PARAM_END, end); } } if (map.containsKey(PARAM_GEO)) { if (!(map.get(PARAM_GEO) instanceof GeoXPShape)) { throw new WarpScriptException(getName() + " Invalid '" + PARAM_GEO + "' type."); } if (!map.containsKey(PARAM_GEODIR)) { throw new WarpScriptException(getName() + " Missing '" + PARAM_GEODIR + "' parameter."); } if (!stack.getGeoDirectoryClient().knowsDirectory(map.get(PARAM_GEODIR).toString())) { throw new WarpScriptException(getName() + " Unknwon directory '" + map.get(PARAM_GEODIR) + "' for parameter '" + PARAM_GEODIR + "'."); } params.put(PARAM_GEODIR, map.get(PARAM_GEODIR)); params.put(PARAM_GEO, map.get(PARAM_GEO)); if (map.containsKey(PARAM_GEOOP)) { if (PARAM_GEOOP_IN.equals(map.get(PARAM_GEOOP))) { params.put(PARAM_GEOOP, PARAM_GEOOP_IN); } else if (PARAM_GEOOP_OUT.equals(map.get(PARAM_GEOOP))) { params.put(PARAM_GEOOP, PARAM_GEOOP_OUT); } else { throw new WarpScriptException(getName() + " Invalid value for parameter '" + PARAM_GEOOP + "'"); } } else { params.put(PARAM_GEOOP, PARAM_GEOOP_IN); } } if (map.containsKey(PARAM_TYPE)) { String type = map.get(PARAM_TYPE).toString(); if (TYPE.LONG.name().equalsIgnoreCase(type)) { params.put(PARAM_TYPE, TYPE.LONG); } else if (TYPE.DOUBLE.name().equalsIgnoreCase(type)) { params.put(PARAM_TYPE, TYPE.DOUBLE); } else if (TYPE.STRING.name().equalsIgnoreCase(type)) { params.put(PARAM_TYPE, TYPE.STRING); } else if (TYPE.BOOLEAN.name().equalsIgnoreCase(type)) { params.put(PARAM_TYPE, TYPE.BOOLEAN); } else { throw new WarpScriptException(getName() + " Invalid value for parameter '" + PARAM_TYPE + "'."); } } if (map.containsKey(PARAM_TYPEATTR)) { if (map.containsKey(PARAM_TYPE)) { throw new WarpScriptException(getName() + " Incompatible parameters '" + PARAM_TYPE + "' and '" + PARAM_TYPEATTR + "'."); } params.put(PARAM_TYPEATTR, map.get(PARAM_TYPEATTR).toString()); } if (map.containsKey(PARAM_EXTRA)) { // Check that we are not using a MetaSet if (params.containsKey(PARAM_METASET)) { throw new WarpScriptException(getName() + " Cannot specify '" + PARAM_EXTRA + "' when '" + PARAM_METASET + "' is used."); } if (!(map.get(PARAM_EXTRA) instanceof List)) { throw new WarpScriptException(getName() + " Invalid type for parameter '" + PARAM_EXTRA + "'."); } Set<String> extra = new HashSet<String>(); for (Object o: (List) map.get(PARAM_EXTRA)) { if (!(o instanceof String)) { throw new WarpScriptException(getName() + " Invalid type for parameter '" + PARAM_EXTRA + "'."); } extra.add(o.toString()); } params.put(PARAM_EXTRA, extra); } if (map.containsKey(PARAM_WRITE_TIMESTAMP)) { params.put(PARAM_WRITE_TIMESTAMP, Boolean.TRUE.equals(map.get(PARAM_WRITE_TIMESTAMP))); } return params; } }
warp10/src/main/java/io/warp10/script/functions/FETCH.java
// // Copyright 2016 Cityzen Data // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package io.warp10.script.functions; import io.warp10.WarpDist; import io.warp10.continuum.TimeSource; import io.warp10.continuum.Tokens; import io.warp10.continuum.egress.EgressFetchHandler; import io.warp10.continuum.geo.GeoDirectoryClient; import io.warp10.continuum.gts.GTSDecoder; import io.warp10.continuum.gts.GTSHelper; import io.warp10.continuum.gts.GeoTimeSerie; import io.warp10.continuum.gts.GeoTimeSerie.TYPE; import io.warp10.continuum.sensision.SensisionConstants; import io.warp10.continuum.store.Constants; import io.warp10.continuum.store.DirectoryClient; import io.warp10.continuum.store.GTSDecoderIterator; import io.warp10.continuum.store.MetadataIterator; import io.warp10.continuum.store.StoreClient; import io.warp10.continuum.store.thrift.data.MetaSet; import io.warp10.continuum.store.thrift.data.Metadata; import io.warp10.crypto.CryptoUtils; import io.warp10.crypto.KeyStore; import io.warp10.crypto.OrderPreservingBase64; import io.warp10.crypto.SipHashInline; import io.warp10.quasar.token.thrift.data.ReadToken; import io.warp10.script.NamedWarpScriptFunction; import io.warp10.script.WarpScriptStackFunction; import io.warp10.script.WarpScriptException; import io.warp10.script.WarpScriptStack; import io.warp10.sensision.Sensision; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicLong; import java.util.zip.GZIPInputStream; import org.apache.commons.io.output.ByteArrayOutputStream; import org.apache.commons.lang3.JavaVersion; import org.apache.commons.lang3.SystemUtils; import org.apache.commons.lang3.tuple.Pair; import org.apache.thrift.TDeserializer; import org.apache.thrift.TException; import org.apache.thrift.protocol.TCompactProtocol; import org.joda.time.format.DateTimeFormatter; import org.joda.time.format.ISODateTimeFormat; import com.geoxp.GeoXPLib.GeoXPShape; import com.google.common.base.Charsets; /** * Fetch GeoTimeSeries from continuum * FIXME(hbs): we need to retrieve an OAuth token, where do we put it? * * The top of the stack must contain a list of the following parameters * * @param token The OAuth 2.0 token to use for data retrieval * @param classSelector Class selector. * @param labelsSelectors Map of label name to label selector. * @param now Most recent timestamp to consider (in us since the Epoch) * @param timespan Width of time period to consider (in us). Timestamps at or before now - timespan will be ignored. * * The last two parameters can be replaced by String parameters representing the end and start ISO8601 timestamps */ public class FETCH extends NamedWarpScriptFunction implements WarpScriptStackFunction { private static final String PARAM_CLASS = "class"; /** * Extra classes to retrieve after Directory/GeoDirectory have been called */ private static final String PARAM_EXTRA = "extra"; private static final String PARAM_LABELS = "labels"; private static final String PARAM_SELECTOR = "selector"; private static final String PARAM_SELECTORS = "selectors"; private static final String PARAM_SELECTOR_PAIRS = "selpairs"; private static final String PARAM_TOKEN = "token"; private static final String PARAM_END = "end"; private static final String PARAM_START = "start"; private static final String PARAM_COUNT = "count"; private static final String PARAM_TIMESPAN = "timespan"; private static final String PARAM_TYPE = "type"; private static final String PARAM_GEO = "geo"; private static final String PARAM_GEODIR = "geodir"; private static final String PARAM_GEOOP = "geoop"; private static final String PARAM_GEOOP_IN = "in"; private static final String PARAM_GEOOP_OUT = "out"; private static final String PARAM_WRITE_TIMESTAMP = "wtimestamp"; private static final String PARAM_SHOWUUID = "showuuid"; private static final String PARAM_TYPEATTR = "typeattr"; private static final String PARAM_METASET = "metaset"; public static final String POSTFETCH_HOOK = "postfetch"; private DateTimeFormatter fmt = ISODateTimeFormat.dateTimeParser(); private WarpScriptStackFunction listTo = new LISTTO(""); private final boolean fromArchive; private final TYPE forcedType; private final long[] SIPHASH_CLASS; private final long[] SIPHASH_LABELS; private final byte[] AES_METASET; public FETCH(String name, boolean fromArchive, TYPE type) { super(name); this.fromArchive = fromArchive; this.forcedType = type; KeyStore ks = null; try { ks = WarpDist.getKeyStore(); } catch (Throwable t) { // Catch NoClassDefFound } if (null != ks) { this.SIPHASH_CLASS = SipHashInline.getKey(ks.getKey(KeyStore.SIPHASH_CLASS)); this.SIPHASH_LABELS = SipHashInline.getKey(ks.getKey(KeyStore.SIPHASH_LABELS)); this.AES_METASET = ks.getKey(KeyStore.AES_METASETS); } else { this.SIPHASH_CLASS = null; this.SIPHASH_LABELS = null; this.AES_METASET = null; } } @Override public Object apply(WarpScriptStack stack) throws WarpScriptException { // // Extract parameters from the stack // Object top = stack.peek(); // // Handle the new (as of 20150805) parameter passing mechanism as a map // Map<String,Object> params = null; if (top instanceof Map) { stack.pop(); params = paramsFromMap(stack, (Map<String,Object>) top); } if (top instanceof List) { if (5 != ((List) top).size()) { stack.drop(); throw new WarpScriptException(getName() + " expects 5 parameters."); } // // Explode list and remove its size // listTo.apply(stack); stack.drop(); } if (null == params) { params = new HashMap<String, Object>(); // // Extract time span // Object oStop = stack.pop(); Object oStart = stack.pop(); long endts; long timespan; if (oStart instanceof String && oStop instanceof String) { long start; long stop; if (SystemUtils.isJavaVersionAtLeast(JavaVersion.JAVA_1_8)) { start = io.warp10.script.unary.TOTIMESTAMP.parseTimestamp(oStart.toString()); } else { start = fmt.parseDateTime((String) oStart).getMillis() * Constants.TIME_UNITS_PER_MS; } if (SystemUtils.isJavaVersionAtLeast(JavaVersion.JAVA_1_8)) { stop = io.warp10.script.unary.TOTIMESTAMP.parseTimestamp(oStop.toString()); } else { stop = fmt.parseDateTime((String) oStop).getMillis() * Constants.TIME_UNITS_PER_MS; } if (start < stop) { endts = stop; timespan = stop - start; } else { endts = start; timespan = start - stop; } } else if (oStart instanceof Long && oStop instanceof Long) { endts = (long) oStart; timespan = (long) oStop; } else { throw new WarpScriptException("Invalid timespan specification."); } params.put(PARAM_END, endts); if (timespan < 0) { params.put(PARAM_COUNT, -timespan); } else { params.put(PARAM_TIMESPAN, timespan); } // // Extract labels selector // Object oLabelsSelector = stack.pop(); if (!(oLabelsSelector instanceof Map)) { throw new WarpScriptException("Label selectors must be a map."); } Map<String,String> labelSelectors = new HashMap<String,String>((Map<String,String>) oLabelsSelector); params.put(PARAM_LABELS, labelSelectors); // // Extract class selector // Object oClassSelector = stack.pop(); if (!(oClassSelector instanceof String)) { throw new WarpScriptException("Class selector must be a string."); } String classSelector = (String) oClassSelector; params.put(PARAM_CLASS, classSelector); // // Extract token // Object oToken = stack.pop(); if (!(oToken instanceof String)) { throw new WarpScriptException("Token must be a string."); } String token = (String) oToken; params.put(PARAM_TOKEN, token); } StoreClient gtsStore = stack.getStoreClient(); DirectoryClient directoryClient = stack.getDirectoryClient(); GeoTimeSerie base = null; GeoTimeSerie[] bases = null; String typelabel = (String) params.get(PARAM_TYPEATTR); if (null != typelabel) { bases = new GeoTimeSerie[4]; } ReadToken rtoken = Tokens.extractReadToken(params.get(PARAM_TOKEN).toString()); List<String> clsSels = new ArrayList<String>(); List<Map<String,String>> lblsSels = new ArrayList<Map<String,String>>(); MetaSet metaset = null; List<Metadata> metadatas = null; Iterator<Metadata> iter = null; if (params.containsKey(PARAM_METASET)) { metaset = (MetaSet) params.get(PARAM_METASET); iter = metaset.getMetadatas().iterator(); } else { if (params.containsKey(PARAM_SELECTOR_PAIRS)) { for (Pair<Object,Object> pair: (List<Pair<Object,Object>>) params.get(PARAM_SELECTOR_PAIRS)) { clsSels.add(pair.getLeft().toString()); Map<String,String> labelSelectors = (Map<String,String>) pair.getRight(); labelSelectors.putAll(Tokens.labelSelectorsFromReadToken(rtoken)); lblsSels.add((Map<String,String>) labelSelectors); } } else { Map<String,String> labelSelectors = (Map<String,String>) params.get(PARAM_LABELS); labelSelectors.putAll(Tokens.labelSelectorsFromReadToken(rtoken)); clsSels.add(params.get(PARAM_CLASS).toString()); lblsSels.add(labelSelectors); } try { metadatas = directoryClient.find(clsSels, lblsSels); iter = metadatas.iterator(); } catch (IOException ioe) { try { iter = directoryClient.iterator(clsSels, lblsSels); } catch (Exception e) { throw new WarpScriptException(e); } } } metadatas = new ArrayList<Metadata>(); List<GeoTimeSerie> series = new ArrayList<GeoTimeSerie>(); AtomicLong fetched = (AtomicLong) stack.getAttribute(WarpScriptStack.ATTRIBUTE_FETCH_COUNT); long fetchLimit = (long) stack.getAttribute(WarpScriptStack.ATTRIBUTE_FETCH_LIMIT); long gtsLimit = (long) stack.getAttribute(WarpScriptStack.ATTRIBUTE_GTS_LIMIT); AtomicLong gtscount = (AtomicLong) stack.getAttribute(WarpScriptStack.ATTRIBUTE_GTS_COUNT); // Variables to keep track of the last Metadata and fetched count Metadata lastMetadata = null; long lastCount = 0L; try { while(iter.hasNext()) { metadatas.add(iter.next()); if (gtscount.incrementAndGet() > gtsLimit) { throw new WarpScriptException(getName() + " exceeded limit of " + gtsLimit + " Geo Time Series, current count is " + gtscount); } if (metadatas.size() < EgressFetchHandler.FETCH_BATCHSIZE && iter.hasNext()) { continue; } // // Filter the retrieved Metadata according to geo // if (params.containsKey(PARAM_GEO)) { GeoDirectoryClient geoclient = stack.getGeoDirectoryClient(); long end = (long) params.get(PARAM_END); long start = Long.MIN_VALUE; if (params.containsKey(PARAM_TIMESPAN)) { start = end - (long) params.get(PARAM_TIMESPAN); } boolean inside = false; if (PARAM_GEOOP_IN.equals(params.get(PARAM_GEOOP))) { inside = true; } try { metadatas = geoclient.filter((String) params.get(PARAM_GEODIR), metadatas, (GeoXPShape) params.get(PARAM_GEO), inside, start, end); } catch (IOException ioe) { throw new WarpScriptException(ioe); } } // // Generate extra Metadata if PARAM_EXTRA is set // if (params.containsKey(PARAM_EXTRA)) { Set<Metadata> withextra = new HashSet<Metadata>(); withextra.addAll(metadatas); for (Metadata meta: metadatas) { for (String cls: (Set<String>) params.get(PARAM_EXTRA)) { // The following is safe, the constructor allocates new maps Metadata metadata = new Metadata(meta); metadata.setName(cls); metadata.setClassId(GTSHelper.classId(this.SIPHASH_CLASS, cls)); metadata.setLabelsId(GTSHelper.labelsId(this.SIPHASH_LABELS, metadata.getLabels())); withextra.add(metadata); } } metadatas.clear(); metadatas.addAll(withextra); } // // We assume that GTS will be fetched in a continuous way, i.e. without having a GTSDecoder from one // then one from another, then one from the first one. // long timespan = params.containsKey(PARAM_TIMESPAN) ? (long) params.get(PARAM_TIMESPAN) : - ((long) params.get(PARAM_COUNT)); TYPE type = (TYPE) params.get(PARAM_TYPE); if (null != this.forcedType) { if (null != type) { throw new WarpScriptException(getName() + " type of fetched GTS cannot be changed."); } type = this.forcedType; } boolean writeTimestamp = Boolean.TRUE.equals(params.get(PARAM_WRITE_TIMESTAMP)); boolean showUUID = Boolean.TRUE.equals(params.get(PARAM_SHOWUUID)); try (GTSDecoderIterator gtsiter = gtsStore.fetch(rtoken, metadatas, (long) params.get(PARAM_END), timespan, fromArchive, writeTimestamp)) { while(gtsiter.hasNext()) { GTSDecoder decoder = gtsiter.next(); boolean identical = true; if (null == lastMetadata || !lastMetadata.equals(decoder.getMetadata())) { lastMetadata = decoder.getMetadata(); identical = false; lastCount = 0; } GeoTimeSerie gts; // // If we should ventilate per type, do so now // if (null != typelabel) { Map<String,String> labels = new HashMap<String,String>(decoder.getMetadata().getLabels()); labels.remove(Constants.PRODUCER_LABEL); labels.remove(Constants.OWNER_LABEL); java.util.UUID uuid = null; if (showUUID) { uuid = new java.util.UUID(decoder.getClassId(), decoder.getLabelsId()); } long count = 0; Metadata decoderMeta = decoder.getMetadata(); while(decoder.next()) { // If we've read enough data, exit if (identical && timespan < 0 && lastCount + count >= -timespan) { break; } count++; long ts = decoder.getTimestamp(); long location = decoder.getLocation(); long elevation = decoder.getElevation(); Object value = decoder.getValue(); int gtsidx = 0; String typename = "DOUBLE"; if (value instanceof Long) { gtsidx = 1; typename = "LONG"; } else if (value instanceof Boolean) { gtsidx = 2; typename = "BOOLEAN"; } else if (value instanceof String) { gtsidx = 3; typename = "STRING"; } base = bases[gtsidx]; if (null == base || !base.getMetadata().getName().equals(decoderMeta.getName()) || !base.getMetadata().getLabels().equals(decoderMeta.getLabels())) { bases[gtsidx] = new GeoTimeSerie(); base = bases[gtsidx]; series.add(base); base.setLabels(decoder.getLabels()); base.getMetadata().putToAttributes(typelabel, typename); base.setName(decoder.getName()); if (null != uuid) { base.getMetadata().putToAttributes(Constants.UUID_ATTRIBUTE, uuid.toString()); } } GTSHelper.setValue(base, ts, location, elevation, value, false); } if (fetched.addAndGet(count) > fetchLimit) { Map<String,String> sensisionLabels = new HashMap<String, String>(); sensisionLabels.put(SensisionConstants.SENSISION_LABEL_CONSUMERID, Tokens.getUUID(rtoken.getBilledId())); Sensision.update(SensisionConstants.SENSISION_CLASS_EINSTEIN_FETCHCOUNT_EXCEEDED, sensisionLabels, 1); throw new WarpScriptException(getName() + " exceeded limit of " + fetchLimit + " datapoints, current count is " + fetched.get()); } lastCount += count; continue; } if (null != type) { gts = decoder.decode(type); } else { gts = decoder.decode(); } if (identical && timespan < 0 && lastCount + GTSHelper.nvalues(gts) > -timespan) { // We would add too many datapoints, we will shrink the GTS. // As it it sorted in reverse order of the ticks (since the datapoints are organized // this way in HBase), we just need to shrink the GTS. gts = GTSHelper.shrinkTo(gts, (int) Math.max(-timespan - lastCount, 0)); } lastCount += GTSHelper.nvalues(gts); // // Remove producer/owner labels // // // Add a .uuid attribute if instructed to do so // if (showUUID) { java.util.UUID uuid = new java.util.UUID(gts.getClassId(), gts.getLabelsId()); gts.getMetadata().putToAttributes(Constants.UUID_ATTRIBUTE, uuid.toString()); } Map<String,String> labels = new HashMap<String, String>(); labels.putAll(gts.getMetadata().getLabels()); labels.remove(Constants.PRODUCER_LABEL); labels.remove(Constants.OWNER_LABEL); gts.setLabels(labels); // // If it's the first GTS, take it as is. // if (null == base) { base = gts; } else { // // If name and labels are identical to the previous GTS, merge them // Otherwise add 'base' to the stack and set it to 'gts'. // if (!base.getMetadata().getName().equals(gts.getMetadata().getName()) || !base.getMetadata().getLabels().equals(gts.getMetadata().getLabels())) { series.add(base); base = gts; } else { base = GTSHelper.merge(base, gts); } } if (fetched.addAndGet(gts.size()) > fetchLimit) { Map<String,String> sensisionLabels = new HashMap<String, String>(); sensisionLabels.put(SensisionConstants.SENSISION_LABEL_CONSUMERID, Tokens.getUUID(rtoken.getBilledId())); Sensision.update(SensisionConstants.SENSISION_CLASS_EINSTEIN_FETCHCOUNT_EXCEEDED, sensisionLabels, 1); throw new WarpScriptException(getName() + " exceeded limit of " + fetchLimit + " datapoints, current count is " + fetched.get()); //break; } } } catch (WarpScriptException ee) { throw ee; } catch (Throwable t) { throw new WarpScriptException(t); } // // If there is one current GTS, push it onto the stack (only if not ventilating per type) // if (null != base && null == typelabel) { series.add(base); } // // Reset state // base = null; metadatas.clear(); } } catch (Throwable t) { throw t; } finally { if (iter instanceof MetadataIterator) { try { ((MetadataIterator) iter).close(); } catch (Exception e) { } } } stack.push(series); // // Apply a possible postfetch hook // if (rtoken.getHooksSize() > 0 && rtoken.getHooks().containsKey(POSTFETCH_HOOK)) { stack.execMulti(rtoken.getHooks().get(POSTFETCH_HOOK)); } return stack; } private Map<String,Object> paramsFromMap(WarpScriptStack stack, Map<String,Object> map) throws WarpScriptException { Map<String,Object> params = new HashMap<String, Object>(); // // Handle the case where a MetaSet was passed as this will // modify some other parameters // MetaSet metaset = null; if (map.containsKey(PARAM_METASET)) { if (null == AES_METASET) { throw new WarpScriptException(getName() + " MetaSet support not available."); } Object ms = map.get(PARAM_METASET); if (!(ms instanceof byte[])) { // Decode byte[] decoded = OrderPreservingBase64.decode(ms.toString().getBytes(Charsets.US_ASCII)); // Decrypt byte[] decrypted = CryptoUtils.unwrap(AES_METASET, decoded); // Decompress try { ByteArrayOutputStream out = new ByteArrayOutputStream(decrypted.length); InputStream in = new GZIPInputStream(new ByteArrayInputStream(decrypted)); byte[] buf = new byte[1024]; while(true) { int len = in.read(buf); if (len < 0) { break; } out.write(buf, 0, len); } in.close(); out.close(); ms = out.toByteArray(); } catch (IOException e) { throw new WarpScriptException(getName() + " encountered an invalid MetaSet."); } } metaset = new MetaSet(); TDeserializer deser = new TDeserializer(new TCompactProtocol.Factory()); try { deser.deserialize(metaset, (byte[]) ms); } catch (TException te) { throw new WarpScriptException(getName() + " was unable to decode the provided MetaSet."); } // // Check if MetaSet has expired // if (metaset.getExpiry() < System.currentTimeMillis()) { throw new WarpScriptException(getName() + " MetaSet has expired."); } // Attempt to extract token, this will raise an exception if token has expired or was revoked ReadToken rtoken = Tokens.extractReadToken(metaset.getToken()); params.put(PARAM_METASET, metaset); params.put(PARAM_TOKEN, metaset.getToken()); } if (!params.containsKey(PARAM_TOKEN)) { if (!map.containsKey(PARAM_TOKEN)) { throw new WarpScriptException(getName() + " Missing '" + PARAM_TOKEN + "' parameter"); } params.put(PARAM_TOKEN, map.get(PARAM_TOKEN)); } if (map.containsKey(PARAM_SELECTORS)) { Object sels = map.get(PARAM_SELECTORS); if (!(sels instanceof List)) { throw new WarpScriptException(getName() + " Invalid parameter '" + PARAM_SELECTORS + "'"); } List<Pair<Object, Object>> selectors = new ArrayList<Pair<Object,Object>>(); for (Object sel: (List) sels) { Object[] clslbls = PARSESELECTOR.parse(sel.toString()); selectors.add(Pair.of(clslbls[0], clslbls[1])); } params.put(PARAM_SELECTOR_PAIRS, selectors); } else if (map.containsKey(PARAM_SELECTOR)) { Object[] clslbls = PARSESELECTOR.parse(map.get(PARAM_SELECTOR).toString()); params.put(PARAM_CLASS, clslbls[0]); params.put(PARAM_LABELS, clslbls[1]); } else if (map.containsKey(PARAM_CLASS) && map.containsKey(PARAM_LABELS)) { params.put(PARAM_CLASS, map.get(PARAM_CLASS)); params.put(PARAM_LABELS, new HashMap<String,String>((Map<String,String>) map.get(PARAM_LABELS))); } else if (!params.containsKey(PARAM_METASET)) { throw new WarpScriptException(getName() + " Missing '" + PARAM_SELECTOR + "', '" + PARAM_SELECTORS + "' or '" + PARAM_CLASS + "' and '" + PARAM_LABELS + "' parameters."); } if (!map.containsKey(PARAM_END)) { throw new WarpScriptException(getName() + " Missing '" + PARAM_END + "' parameter."); } if (map.get(PARAM_END) instanceof Long) { params.put(PARAM_END, map.get(PARAM_END)); } else if (map.get(PARAM_END) instanceof String) { if (SystemUtils.isJavaVersionAtLeast(JavaVersion.JAVA_1_8)) { params.put(PARAM_END, io.warp10.script.unary.TOTIMESTAMP.parseTimestamp(map.get(PARAM_END).toString())); } else { params.put(PARAM_END, fmt.parseDateTime(map.get(PARAM_END).toString()).getMillis() * Constants.TIME_UNITS_PER_MS); } } else { throw new WarpScriptException(getName() + " Invalid format for parameter '" + PARAM_END + "'."); } if (map.containsKey(PARAM_TIMESPAN)) { params.put(PARAM_TIMESPAN, (long) map.get(PARAM_TIMESPAN)); } else if (map.containsKey(PARAM_COUNT)) { params.put(PARAM_COUNT, (long) map.get(PARAM_COUNT)); } else if (map.containsKey(PARAM_START)) { long end = (long) params.get(PARAM_END); long start; if (map.get(PARAM_START) instanceof Long) { start = (long) map.get(PARAM_START); } else { if (SystemUtils.isJavaVersionAtLeast(JavaVersion.JAVA_1_8)) { start = io.warp10.script.unary.TOTIMESTAMP.parseTimestamp(map.get(PARAM_START).toString()); } else { start = fmt.parseDateTime(map.get(PARAM_START).toString()).getMillis() * Constants.TIME_UNITS_PER_MS; } } long timespan; if (start < end) { timespan = end - start; } else { timespan = start - end; end = start; } params.put(PARAM_END, end); params.put(PARAM_TIMESPAN, timespan); } else { throw new WarpScriptException(getName() + " Missing parameter '" + PARAM_TIMESPAN + "' or '" + PARAM_COUNT + "' or '" + PARAM_START + "'"); } // // Check end/timespan against MetaSet, adjust limits accordingly // if (null != metaset) { long end = (long) params.get(PARAM_END); long timespan = params.containsKey(PARAM_TIMESPAN) ? (long) params.get(PARAM_TIMESPAN) : -1; long count = params.containsKey(PARAM_COUNT) ? (long) params.get(PARAM_COUNT) : -1; if (metaset.isSetMaxduration()) { // Force 'end' to 'now' params.put(PARAM_END, TimeSource.getTime()); if (-1 != count && metaset.getMaxduration() >= 0) { throw new WarpScriptException(getName() + " MetaSet forbids count based requests."); } if (-1 != timespan && metaset.getMaxduration() <= 0) { throw new WarpScriptException(getName() + " MetaSet forbids duration based requests."); } if (-1 != count && count > -metaset.getMaxduration()) { count = -metaset.getMaxduration(); params.put(PARAM_COUNT, count); } if (-1 != timespan && timespan > metaset.getMaxduration()) { timespan = metaset.getMaxduration(); params.put(PARAM_TIMESPAN, timespan); } } if (metaset.isSetNotbefore()) { // forbid count based requests if (-1 != count) { throw new WarpScriptException(getName() + " MetaSet forbids count based requests."); } if (end < metaset.getNotbefore()) { throw new WarpScriptException(getName() + " MetaSet forbids time ranges before " + metaset.getNotbefore()); } // Adjust timespan so maxDuration is respected if (timespan > metaset.getMaxduration()) { timespan = metaset.getMaxduration(); params.put(PARAM_TIMESPAN, timespan); } } if (metaset.isSetNotafter() && end >= metaset.getNotafter()) { end = metaset.getNotafter(); params.put(PARAM_END, end); } } if (map.containsKey(PARAM_GEO)) { if (!(map.get(PARAM_GEO) instanceof GeoXPShape)) { throw new WarpScriptException(getName() + " Invalid '" + PARAM_GEO + "' type."); } if (!map.containsKey(PARAM_GEODIR)) { throw new WarpScriptException(getName() + " Missing '" + PARAM_GEODIR + "' parameter."); } if (!stack.getGeoDirectoryClient().knowsDirectory(map.get(PARAM_GEODIR).toString())) { throw new WarpScriptException(getName() + " Unknwon directory '" + map.get(PARAM_GEODIR) + "' for parameter '" + PARAM_GEODIR + "'."); } params.put(PARAM_GEODIR, map.get(PARAM_GEODIR)); params.put(PARAM_GEO, map.get(PARAM_GEO)); if (map.containsKey(PARAM_GEOOP)) { if (PARAM_GEOOP_IN.equals(map.get(PARAM_GEOOP))) { params.put(PARAM_GEOOP, PARAM_GEOOP_IN); } else if (PARAM_GEOOP_OUT.equals(map.get(PARAM_GEOOP))) { params.put(PARAM_GEOOP, PARAM_GEOOP_OUT); } else { throw new WarpScriptException(getName() + " Invalid value for parameter '" + PARAM_GEOOP + "'"); } } else { params.put(PARAM_GEOOP, PARAM_GEOOP_IN); } } if (map.containsKey(PARAM_TYPE)) { String type = map.get(PARAM_TYPE).toString(); if (TYPE.LONG.name().equalsIgnoreCase(type)) { params.put(PARAM_TYPE, TYPE.LONG); } else if (TYPE.DOUBLE.name().equalsIgnoreCase(type)) { params.put(PARAM_TYPE, TYPE.DOUBLE); } else if (TYPE.STRING.name().equalsIgnoreCase(type)) { params.put(PARAM_TYPE, TYPE.STRING); } else if (TYPE.BOOLEAN.name().equalsIgnoreCase(type)) { params.put(PARAM_TYPE, TYPE.BOOLEAN); } else { throw new WarpScriptException(getName() + " Invalid value for parameter '" + PARAM_TYPE + "'."); } } if (map.containsKey(PARAM_TYPEATTR)) { if (map.containsKey(PARAM_TYPE)) { throw new WarpScriptException(getName() + " Incompatible parameters '" + PARAM_TYPE + "' and '" + PARAM_TYPEATTR + "'."); } params.put(PARAM_TYPEATTR, map.get(PARAM_TYPEATTR).toString()); } if (map.containsKey(PARAM_EXTRA)) { // Check that we are not using a MetaSet if (params.containsKey(PARAM_METASET)) { throw new WarpScriptException(getName() + " Cannot specify '" + PARAM_EXTRA + "' when '" + PARAM_METASET + "' is used."); } if (!(map.get(PARAM_EXTRA) instanceof List)) { throw new WarpScriptException(getName() + " Invalid type for parameter '" + PARAM_EXTRA + "'."); } Set<String> extra = new HashSet<String>(); for (Object o: (List) map.get(PARAM_EXTRA)) { if (!(o instanceof String)) { throw new WarpScriptException(getName() + " Invalid type for parameter '" + PARAM_EXTRA + "'."); } extra.add(o.toString()); } params.put(PARAM_EXTRA, extra); } if (map.containsKey(PARAM_WRITE_TIMESTAMP)) { params.put(PARAM_WRITE_TIMESTAMP, Boolean.TRUE.equals(map.get(PARAM_WRITE_TIMESTAMP))); } return params; } }
Forced type of GTS decoding when decoding GTSDecoders 2..
warp10/src/main/java/io/warp10/script/functions/FETCH.java
Forced type of GTS decoding when decoding GTSDecoders 2..
Java
apache-2.0
c200deb86a17779ba1246449fea814ae2f19ae3f
0
asakusafw/asakusafw,cocoatomo/asakusafw,akirakw/asakusafw,cocoatomo/asakusafw,ashigeru/asakusafw,akirakw/asakusafw,ashigeru/asakusafw,asakusafw/asakusafw
/** * Copyright 2011-2017 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.runtime.stage.temporary; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.SequenceFile.CompressionType; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.compress.CompressionCodec; import com.asakusafw.runtime.io.ModelInput; import com.asakusafw.runtime.io.ModelOutput; import com.asakusafw.runtime.io.sequencefile.SequenceFileModelInput; import com.asakusafw.runtime.io.sequencefile.SequenceFileModelOutput; import com.asakusafw.runtime.io.sequencefile.SequenceFileUtil; /** * Access to the temporary storage. * @since 0.2.5 * @version 0.7.1 */ public final class TemporaryStorage { static final Log LOG = LogFactory.getLog(TemporaryStorage.class); private static final int OUTPUT_INIT_BUFFER_SIZE = 300 * 1024; private static final int OUTPUT_PAGE_SIZE = 256 * 1024; /** * Resolves the raw path pattern into the concrete path list. * @param conf current configuration * @param pathPattern path pattern which describes temporary storage * @return the resolved paths * @throws IOException if failed to resolve path pattern * @throws IllegalArgumentException if some parameters were {@code null} */ public static List<Path> list(Configuration conf, Path pathPattern) throws IOException { if (conf == null) { throw new IllegalArgumentException("conf must not be null"); //$NON-NLS-1$ } if (pathPattern == null) { throw new IllegalArgumentException("pathPattern must not be null"); //$NON-NLS-1$ } List<FileStatus> statusList = listStatus(conf, pathPattern); if (statusList.isEmpty()) { return Collections.emptyList(); } List<Path> results = new ArrayList<>(); for (FileStatus status : statusList) { results.add(status.getPath()); } return results; } /** * Resolves the raw path pattern into the concrete file status list. * @param conf current configuration * @param pathPattern path pattern which describes temporary storage * @return the resolved file status * @throws IOException if failed to resolve path pattern * @throws IllegalArgumentException if some parameters were {@code null} * @since 0.7.1 */ public static List<FileStatus> listStatus(Configuration conf, Path pathPattern) throws IOException { if (conf == null) { throw new IllegalArgumentException("conf must not be null"); //$NON-NLS-1$ } if (pathPattern == null) { throw new IllegalArgumentException("pathPattern must not be null"); //$NON-NLS-1$ } FileSystem fs = pathPattern.getFileSystem(conf); if (LOG.isDebugEnabled()) { LOG.debug(MessageFormat.format( "listing temporary inputs: {0}", //$NON-NLS-1$ fs.makeQualified(pathPattern))); } FileStatus[] statusList = fs.globStatus(pathPattern); if (statusList == null || statusList.length == 0) { return Collections.emptyList(); } return Arrays.asList(statusList); } /** * Opens a temporary {@link ModelInput} for the specified path. * @param <V> data type * @param conf configuration * @param dataType data type * @param path source path (must not contain wildcards) * @return the opened {@link ModelInput} * @throws IOException if failed to open input * @throws IllegalArgumentException if some parameters were {@code null} */ @SuppressWarnings("unchecked") public static <V> ModelInput<V> openInput( Configuration conf, Class<V> dataType, Path path) throws IOException { if (conf == null) { throw new IllegalArgumentException("conf must not be null"); //$NON-NLS-1$ } if (dataType == null) { throw new IllegalArgumentException("dataType must not be null"); //$NON-NLS-1$ } if (path == null) { throw new IllegalArgumentException("path must not be null"); //$NON-NLS-1$ } FileSystem fs = path.getFileSystem(conf); if (LOG.isDebugEnabled()) { LOG.debug(MessageFormat.format( "opening temporary input: {0}", //$NON-NLS-1$ fs.makeQualified(path))); } if (Writable.class.isAssignableFrom(dataType)) { return (ModelInput<V>) new TemporaryFileInput<>(fs.open(path), 0); } SequenceFile.Reader reader = new SequenceFile.Reader(conf, SequenceFile.Reader.file(fs.makeQualified(path))); return (ModelInput<V>) new SequenceFileModelInput<>(reader); } /** * Opens a temporary {@link ModelInput} for the specified path. * @param <V> data type * @param conf configuration * @param dataType data type * @param status source file status * @param input source file content * @return the opened {@link ModelInput} * @throws IOException if failed to open input * @throws IllegalArgumentException if some parameters were {@code null} */ @SuppressWarnings("unchecked") public static <V> ModelInput<V> openInput( Configuration conf, Class<V> dataType, FileStatus status, InputStream input) throws IOException { if (conf == null) { throw new IllegalArgumentException("conf must not be null"); //$NON-NLS-1$ } if (dataType == null) { throw new IllegalArgumentException("dataType must not be null"); //$NON-NLS-1$ } if (status == null) { throw new IllegalArgumentException("status must not be null"); //$NON-NLS-1$ } if (input == null) { throw new IllegalArgumentException("input must not be null"); //$NON-NLS-1$ } if (Writable.class.isAssignableFrom(dataType)) { return (ModelInput<V>) new TemporaryFileInput<>(input, 0); } SequenceFile.Reader reader = SequenceFileUtil.openReader(input, status, conf); return (ModelInput<V>) new SequenceFileModelInput<>(reader, input); } /** * Opens a temporary {@link ModelOutput} for the specified path. * @param <V> data type * @param conf configuration * @param dataType data type * @param path target path * @return the opened {@link ModelOutput} * @throws IOException if failed to open output * @throws IllegalArgumentException if some parameters were {@code null} */ @SuppressWarnings("unchecked") public static <V> ModelOutput<V> openOutput( Configuration conf, Class<V> dataType, Path path) throws IOException { if (conf == null) { throw new IllegalArgumentException("conf must not be null"); //$NON-NLS-1$ } if (dataType == null) { throw new IllegalArgumentException("dataType must not be null"); //$NON-NLS-1$ } if (path == null) { throw new IllegalArgumentException("path must not be null"); //$NON-NLS-1$ } FileSystem fs = path.getFileSystem(conf); if (LOG.isDebugEnabled()) { LOG.debug(MessageFormat.format( "opening temporary output: {0}", //$NON-NLS-1$ fs.makeQualified(path))); } if (Writable.class.isAssignableFrom(dataType)) { return (ModelOutput<V>) new TemporaryFileOutput<>( fs.create(path, true), dataType.getName(), OUTPUT_INIT_BUFFER_SIZE, OUTPUT_PAGE_SIZE); } SequenceFile.Writer out = SequenceFile.createWriter( conf, SequenceFile.Writer.file(fs.makeQualified(path)), SequenceFile.Writer.keyClass(NullWritable.class), SequenceFile.Writer.valueClass(dataType)); return new SequenceFileModelOutput<>(out); } /** * Opens a temporary {@link ModelOutput} for the specified path. * @param <V> data type * @param conf configuration * @param dataType data type * @param path target path * @param compressionCodec compression codec, or null if not compressed * @return the opened {@link ModelOutput} * @throws IOException if failed to open output * @throws IllegalArgumentException if some parameters were {@code null} */ @SuppressWarnings("unchecked") public static <V> ModelOutput<V> openOutput( Configuration conf, Class<V> dataType, Path path, CompressionCodec compressionCodec) throws IOException { if (conf == null) { throw new IllegalArgumentException("conf must not be null"); //$NON-NLS-1$ } if (dataType == null) { throw new IllegalArgumentException("dataType must not be null"); //$NON-NLS-1$ } if (path == null) { throw new IllegalArgumentException("path must not be null"); //$NON-NLS-1$ } FileSystem fs = path.getFileSystem(conf); if (LOG.isDebugEnabled()) { LOG.debug(MessageFormat.format( "opening temporary output: {0}", //$NON-NLS-1$ fs.makeQualified(path))); } if (Writable.class.isAssignableFrom(dataType)) { return (ModelOutput<V>) new TemporaryFileOutput<>( fs.create(path, true), dataType.getName(), OUTPUT_INIT_BUFFER_SIZE, OUTPUT_PAGE_SIZE); } SequenceFile.Writer out = newWriter(conf, fs, dataType, path, compressionCodec); return new SequenceFileModelOutput<>(out); } private static <V> SequenceFile.Writer newWriter( Configuration conf, FileSystem fs, Class<V> dataType, Path path, CompressionCodec compressionCodec) throws IOException { return SequenceFile.createWriter( conf, SequenceFile.Writer.file(fs.makeQualified(path)), SequenceFile.Writer.keyClass(NullWritable.class), SequenceFile.Writer.valueClass(dataType), SequenceFile.Writer.compression( compressionCodec == null ? CompressionType.BLOCK : CompressionType.NONE, compressionCodec)); } /** * Opens a temporary {@link ModelOutput} for the specified output. * @param <V> data type * @param conf configuration * @param dataType data type * @param output target output stream * @return the opened {@link ModelOutput} * @throws IOException if failed to open output * @throws IllegalArgumentException if some parameters were {@code null} */ public static <V> ModelOutput<V> openOutput( Configuration conf, Class<V> dataType, OutputStream output) throws IOException { return openOutput(conf, dataType, output, null); } /** * Opens a temporary {@link ModelOutput} for the specified output. * @param <V> data type * @param conf configuration * @param dataType data type * @param output target output stream * @param compressionCodec compression codec, or null if not compressed * @return the opened {@link ModelOutput} * @throws IOException if failed to open output * @throws IllegalArgumentException if some parameters were {@code null} */ @SuppressWarnings("unchecked") public static <V> ModelOutput<V> openOutput( Configuration conf, Class<V> dataType, OutputStream output, CompressionCodec compressionCodec) throws IOException { if (conf == null) { throw new IllegalArgumentException("conf must not be null"); //$NON-NLS-1$ } if (dataType == null) { throw new IllegalArgumentException("dataType must not be null"); //$NON-NLS-1$ } if (output == null) { throw new IllegalArgumentException("output must not be null"); //$NON-NLS-1$ } if (Writable.class.isAssignableFrom(dataType)) { return (ModelOutput<V>) new TemporaryFileOutput<>( output, dataType.getName(), OUTPUT_INIT_BUFFER_SIZE, OUTPUT_PAGE_SIZE); } SequenceFile.Writer out = SequenceFileUtil.openWriter( output, conf, NullWritable.class, dataType, compressionCodec); return new SequenceFileModelOutput<>(out); } private TemporaryStorage() { return; } }
core-project/asakusa-runtime/src/main/java/com/asakusafw/runtime/stage/temporary/TemporaryStorage.java
/** * Copyright 2011-2017 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.runtime.stage.temporary; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.SequenceFile.CompressionType; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.compress.CompressionCodec; import com.asakusafw.runtime.io.ModelInput; import com.asakusafw.runtime.io.ModelOutput; import com.asakusafw.runtime.io.sequencefile.SequenceFileModelInput; import com.asakusafw.runtime.io.sequencefile.SequenceFileModelOutput; import com.asakusafw.runtime.io.sequencefile.SequenceFileUtil; /** * Access to the temporary storage. * @since 0.2.5 * @version 0.7.1 */ public final class TemporaryStorage { static final Log LOG = LogFactory.getLog(TemporaryStorage.class); private static final int OUTPUT_INIT_BUFFER_SIZE = 300 * 1024; private static final int OUTPUT_PAGE_SIZE = 256 * 1024; /** * Resolves the raw path pattern into the concrete path list. * @param conf current configuration * @param pathPattern path pattern which describes temporary storage * @return the resolved paths * @throws IOException if failed to resolve path pattern * @throws IllegalArgumentException if some parameters were {@code null} */ public static List<Path> list(Configuration conf, Path pathPattern) throws IOException { if (conf == null) { throw new IllegalArgumentException("conf must not be null"); //$NON-NLS-1$ } if (pathPattern == null) { throw new IllegalArgumentException("pathPattern must not be null"); //$NON-NLS-1$ } List<FileStatus> statusList = listStatus(conf, pathPattern); if (statusList.isEmpty()) { return Collections.emptyList(); } List<Path> results = new ArrayList<>(); for (FileStatus status : statusList) { results.add(status.getPath()); } return results; } /** * Resolves the raw path pattern into the concrete file status list. * @param conf current configuration * @param pathPattern path pattern which describes temporary storage * @return the resolved file status * @throws IOException if failed to resolve path pattern * @throws IllegalArgumentException if some parameters were {@code null} * @since 0.7.1 */ public static List<FileStatus> listStatus(Configuration conf, Path pathPattern) throws IOException { if (conf == null) { throw new IllegalArgumentException("conf must not be null"); //$NON-NLS-1$ } if (pathPattern == null) { throw new IllegalArgumentException("pathPattern must not be null"); //$NON-NLS-1$ } FileSystem fs = pathPattern.getFileSystem(conf); if (LOG.isDebugEnabled()) { LOG.debug(MessageFormat.format( "Listing temporary input: {0} (fs={1})", //$NON-NLS-1$ pathPattern, fs.getUri())); } FileStatus[] statusList = fs.globStatus(pathPattern); if (statusList == null || statusList.length == 0) { return Collections.emptyList(); } return Arrays.asList(statusList); } /** * Opens a temporary {@link ModelInput} for the specified path. * @param <V> data type * @param conf configuration * @param dataType data type * @param path source path (must not contain wildcards) * @return the opened {@link ModelInput} * @throws IOException if failed to open input * @throws IllegalArgumentException if some parameters were {@code null} */ @SuppressWarnings("unchecked") public static <V> ModelInput<V> openInput( Configuration conf, Class<V> dataType, Path path) throws IOException { if (conf == null) { throw new IllegalArgumentException("conf must not be null"); //$NON-NLS-1$ } if (dataType == null) { throw new IllegalArgumentException("dataType must not be null"); //$NON-NLS-1$ } if (path == null) { throw new IllegalArgumentException("path must not be null"); //$NON-NLS-1$ } FileSystem fs = path.getFileSystem(conf); if (LOG.isDebugEnabled()) { LOG.debug(MessageFormat.format( "Opening temporary input: {0} (fs={1})", //$NON-NLS-1$ path, fs.getUri())); } if (Writable.class.isAssignableFrom(dataType)) { return (ModelInput<V>) new TemporaryFileInput<>(fs.open(path), 0); } SequenceFile.Reader reader = new SequenceFile.Reader(conf, SequenceFile.Reader.file(fs.makeQualified(path))); return (ModelInput<V>) new SequenceFileModelInput<>(reader); } /** * Opens a temporary {@link ModelInput} for the specified path. * @param <V> data type * @param conf configuration * @param dataType data type * @param status source file status * @param input source file content * @return the opened {@link ModelInput} * @throws IOException if failed to open input * @throws IllegalArgumentException if some parameters were {@code null} */ @SuppressWarnings("unchecked") public static <V> ModelInput<V> openInput( Configuration conf, Class<V> dataType, FileStatus status, InputStream input) throws IOException { if (conf == null) { throw new IllegalArgumentException("conf must not be null"); //$NON-NLS-1$ } if (dataType == null) { throw new IllegalArgumentException("dataType must not be null"); //$NON-NLS-1$ } if (status == null) { throw new IllegalArgumentException("status must not be null"); //$NON-NLS-1$ } if (input == null) { throw new IllegalArgumentException("input must not be null"); //$NON-NLS-1$ } if (Writable.class.isAssignableFrom(dataType)) { return (ModelInput<V>) new TemporaryFileInput<>(input, 0); } SequenceFile.Reader reader = SequenceFileUtil.openReader(input, status, conf); return (ModelInput<V>) new SequenceFileModelInput<>(reader, input); } /** * Opens a temporary {@link ModelOutput} for the specified path. * @param <V> data type * @param conf configuration * @param dataType data type * @param path target path * @return the opened {@link ModelOutput} * @throws IOException if failed to open output * @throws IllegalArgumentException if some parameters were {@code null} */ @SuppressWarnings("unchecked") public static <V> ModelOutput<V> openOutput( Configuration conf, Class<V> dataType, Path path) throws IOException { if (conf == null) { throw new IllegalArgumentException("conf must not be null"); //$NON-NLS-1$ } if (dataType == null) { throw new IllegalArgumentException("dataType must not be null"); //$NON-NLS-1$ } if (path == null) { throw new IllegalArgumentException("path must not be null"); //$NON-NLS-1$ } FileSystem fs = path.getFileSystem(conf); if (LOG.isDebugEnabled()) { LOG.debug(MessageFormat.format( "Opening temporary output: {0} (fs={1})", //$NON-NLS-1$ path, fs.getUri())); } if (Writable.class.isAssignableFrom(dataType)) { return (ModelOutput<V>) new TemporaryFileOutput<>( fs.create(path, true), dataType.getName(), OUTPUT_INIT_BUFFER_SIZE, OUTPUT_PAGE_SIZE); } SequenceFile.Writer out = SequenceFile.createWriter( conf, SequenceFile.Writer.file(fs.makeQualified(path)), SequenceFile.Writer.keyClass(NullWritable.class), SequenceFile.Writer.valueClass(dataType)); return new SequenceFileModelOutput<>(out); } /** * Opens a temporary {@link ModelOutput} for the specified path. * @param <V> data type * @param conf configuration * @param dataType data type * @param path target path * @param compressionCodec compression codec, or null if not compressed * @return the opened {@link ModelOutput} * @throws IOException if failed to open output * @throws IllegalArgumentException if some parameters were {@code null} */ @SuppressWarnings("unchecked") public static <V> ModelOutput<V> openOutput( Configuration conf, Class<V> dataType, Path path, CompressionCodec compressionCodec) throws IOException { if (conf == null) { throw new IllegalArgumentException("conf must not be null"); //$NON-NLS-1$ } if (dataType == null) { throw new IllegalArgumentException("dataType must not be null"); //$NON-NLS-1$ } if (path == null) { throw new IllegalArgumentException("path must not be null"); //$NON-NLS-1$ } FileSystem fs = path.getFileSystem(conf); if (LOG.isDebugEnabled()) { LOG.debug(MessageFormat.format( "Opening temporary output: {0} (fs={1})", //$NON-NLS-1$ path, fs.getUri())); } if (Writable.class.isAssignableFrom(dataType)) { return (ModelOutput<V>) new TemporaryFileOutput<>( fs.create(path, true), dataType.getName(), OUTPUT_INIT_BUFFER_SIZE, OUTPUT_PAGE_SIZE); } SequenceFile.Writer out = newWriter(conf, fs, dataType, path, compressionCodec); return new SequenceFileModelOutput<>(out); } private static <V> SequenceFile.Writer newWriter( Configuration conf, FileSystem fs, Class<V> dataType, Path path, CompressionCodec compressionCodec) throws IOException { return SequenceFile.createWriter( conf, SequenceFile.Writer.file(fs.makeQualified(path)), SequenceFile.Writer.keyClass(NullWritable.class), SequenceFile.Writer.valueClass(dataType), SequenceFile.Writer.compression( compressionCodec == null ? CompressionType.BLOCK : CompressionType.NONE, compressionCodec)); } /** * Opens a temporary {@link ModelOutput} for the specified output. * @param <V> data type * @param conf configuration * @param dataType data type * @param output target output stream * @return the opened {@link ModelOutput} * @throws IOException if failed to open output * @throws IllegalArgumentException if some parameters were {@code null} */ public static <V> ModelOutput<V> openOutput( Configuration conf, Class<V> dataType, OutputStream output) throws IOException { return openOutput(conf, dataType, output, null); } /** * Opens a temporary {@link ModelOutput} for the specified output. * @param <V> data type * @param conf configuration * @param dataType data type * @param output target output stream * @param compressionCodec compression codec, or null if not compressed * @return the opened {@link ModelOutput} * @throws IOException if failed to open output * @throws IllegalArgumentException if some parameters were {@code null} */ @SuppressWarnings("unchecked") public static <V> ModelOutput<V> openOutput( Configuration conf, Class<V> dataType, OutputStream output, CompressionCodec compressionCodec) throws IOException { if (conf == null) { throw new IllegalArgumentException("conf must not be null"); //$NON-NLS-1$ } if (dataType == null) { throw new IllegalArgumentException("dataType must not be null"); //$NON-NLS-1$ } if (output == null) { throw new IllegalArgumentException("output must not be null"); //$NON-NLS-1$ } if (Writable.class.isAssignableFrom(dataType)) { return (ModelOutput<V>) new TemporaryFileOutput<>( output, dataType.getName(), OUTPUT_INIT_BUFFER_SIZE, OUTPUT_PAGE_SIZE); } SequenceFile.Writer out = SequenceFileUtil.openWriter( output, conf, NullWritable.class, dataType, compressionCodec); return new SequenceFileModelOutput<>(out); } private TemporaryStorage() { return; } }
Revise debug logs of `TemporaryStorage`. Clients now can obtain actual full-path of temporary files exchanging between WindGate and data-flow engines. Here is an example of logback configuration for enabling the logs. ```xml <configuration> ... <logger name="com.asakusafw.runtime.stage.temporary.TemporaryStorage" level="DEBUG" /> ... </configuration> ```
core-project/asakusa-runtime/src/main/java/com/asakusafw/runtime/stage/temporary/TemporaryStorage.java
Revise debug logs of `TemporaryStorage`.
Java
apache-2.0
6fed3425437ab1736244a8d77005b4fa8fa3e57b
0
spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework
/* * Copyright 2002-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.orm.jpa.persistenceunit; import java.io.IOException; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.sql.DataSource; import jakarta.persistence.PersistenceException; import jakarta.persistence.SharedCacheMode; import jakarta.persistence.ValidationMode; import jakarta.persistence.spi.PersistenceUnitInfo; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.beans.factory.InitializingBean; import org.springframework.context.ResourceLoaderAware; import org.springframework.context.weaving.LoadTimeWeaverAware; import org.springframework.core.io.Resource; import org.springframework.core.io.ResourceLoader; import org.springframework.core.io.support.PathMatchingResourcePatternResolver; import org.springframework.core.io.support.ResourcePatternResolver; import org.springframework.core.io.support.ResourcePatternUtils; import org.springframework.instrument.classloading.InstrumentationLoadTimeWeaver; import org.springframework.instrument.classloading.LoadTimeWeaver; import org.springframework.jdbc.datasource.lookup.DataSourceLookup; import org.springframework.jdbc.datasource.lookup.JndiDataSourceLookup; import org.springframework.jdbc.datasource.lookup.MapDataSourceLookup; import org.springframework.lang.Nullable; import org.springframework.util.ObjectUtils; import org.springframework.util.ResourceUtils; /** * Default implementation of the {@link PersistenceUnitManager} interface. * Used as internal default by * {@link org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean}. * * <p>Supports standard JPA scanning for {@code persistence.xml} files, * with configurable file locations, JDBC DataSource lookup and load-time weaving. * * <p>Builds a persistence unit based on the state of a {@link PersistenceManagedTypes}, * typically built using a {@link PersistenceManagedTypesScanner}.</p> * * <p>The default XML file location is {@code classpath*:META-INF/persistence.xml}, * scanning for all matching files in the classpath (as defined in the JPA specification). * DataSource names are by default interpreted as JNDI names, and no load time weaving * is available (which requires weaving to be turned off in the persistence provider). * * @author Juergen Hoeller * @author Stephane Nicoll * @since 2.0 * @see #setPersistenceXmlLocations * @see #setDataSourceLookup * @see #setLoadTimeWeaver * @see org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean#setPersistenceUnitManager */ public class DefaultPersistenceUnitManager implements PersistenceUnitManager, ResourceLoaderAware, LoadTimeWeaverAware, InitializingBean { private static final String DEFAULT_ORM_XML_RESOURCE = "META-INF/orm.xml"; private static final String PERSISTENCE_XML_FILENAME = "persistence.xml"; /** * Default location of the {@code persistence.xml} file: * "classpath*:META-INF/persistence.xml". */ public static final String DEFAULT_PERSISTENCE_XML_LOCATION = "classpath*:META-INF/" + PERSISTENCE_XML_FILENAME; /** * Default location for the persistence unit root URL: * "classpath:", indicating the root of the classpath. */ public static final String ORIGINAL_DEFAULT_PERSISTENCE_UNIT_ROOT_LOCATION = "classpath:"; /** * Default persistence unit name. */ public static final String ORIGINAL_DEFAULT_PERSISTENCE_UNIT_NAME = "default"; protected final Log logger = LogFactory.getLog(getClass()); private String[] persistenceXmlLocations = new String[] {DEFAULT_PERSISTENCE_XML_LOCATION}; @Nullable private String defaultPersistenceUnitRootLocation = ORIGINAL_DEFAULT_PERSISTENCE_UNIT_ROOT_LOCATION; @Nullable private String defaultPersistenceUnitName = ORIGINAL_DEFAULT_PERSISTENCE_UNIT_NAME; @Nullable private PersistenceManagedTypes managedTypes; @Nullable private String[] packagesToScan; @Nullable private String[] mappingResources; @Nullable private SharedCacheMode sharedCacheMode; @Nullable private ValidationMode validationMode; private DataSourceLookup dataSourceLookup = new JndiDataSourceLookup(); @Nullable private DataSource defaultDataSource; @Nullable private DataSource defaultJtaDataSource; @Nullable private PersistenceUnitPostProcessor[] persistenceUnitPostProcessors; @Nullable private LoadTimeWeaver loadTimeWeaver; private ResourcePatternResolver resourcePatternResolver = new PathMatchingResourcePatternResolver(); private final Set<String> persistenceUnitInfoNames = new HashSet<>(); private final Map<String, PersistenceUnitInfo> persistenceUnitInfos = new HashMap<>(); /** * Specify the location of the {@code persistence.xml} files to load. * These can be specified as Spring resource locations and/or location patterns. * <p>Default is "classpath*:META-INF/persistence.xml". */ public void setPersistenceXmlLocation(String persistenceXmlLocation) { this.persistenceXmlLocations = new String[] {persistenceXmlLocation}; } /** * Specify multiple locations of {@code persistence.xml} files to load. * These can be specified as Spring resource locations and/or location patterns. * <p>Default is "classpath*:META-INF/persistence.xml". * @param persistenceXmlLocations an array of Spring resource Strings * identifying the location of the {@code persistence.xml} files to read */ public void setPersistenceXmlLocations(String... persistenceXmlLocations) { this.persistenceXmlLocations = persistenceXmlLocations; } /** * Set the default persistence unit root location, to be applied * if no unit-specific persistence unit root could be determined. * <p>Default is "classpath:", that is, the root of the current classpath * (nearest root directory). To be overridden if unit-specific resolution * does not work and the classpath root is not appropriate either. */ public void setDefaultPersistenceUnitRootLocation(String defaultPersistenceUnitRootLocation) { this.defaultPersistenceUnitRootLocation = defaultPersistenceUnitRootLocation; } /** * Specify the name of the default persistence unit, if any. Default is "default". * <p>Primarily applied to a scanned persistence unit without {@code persistence.xml}. * Also applicable to selecting a default unit from several persistence units available. * @see #setPackagesToScan * @see #obtainDefaultPersistenceUnitInfo */ public void setDefaultPersistenceUnitName(String defaultPersistenceUnitName) { this.defaultPersistenceUnitName = defaultPersistenceUnitName; } /** * Set the {@link PersistenceManagedTypes} to use to build the list of managed types * as an alternative to entity scanning. * @param managedTypes the managed types * @since 6.0 */ public void setManagedTypes(PersistenceManagedTypes managedTypes) { this.managedTypes = managedTypes; } /** * Set whether to use Spring-based scanning for entity classes in the classpath * instead of using JPA's standard scanning of jar files with {@code persistence.xml} * markers in them. In case of Spring-based scanning, no {@code persistence.xml} * is necessary; all you need to do is to specify base packages to search here. * <p>Default is none. Specify packages to search for autodetection of your entity * classes in the classpath. This is analogous to Spring's component-scan feature * ({@link org.springframework.context.annotation.ClassPathBeanDefinitionScanner}). * <p>Consider setting a {@link PersistenceManagedTypes} instead that allows the * scanning logic to be optimized by AOT processing. * <p>Such package scanning defines a "default persistence unit" in Spring, which * may live next to regularly defined units originating from {@code persistence.xml}. * Its name is determined by {@link #setDefaultPersistenceUnitName}: by default, * it's simply "default". * <p><b>Note: There may be limitations in comparison to regular JPA scanning.</b> * In particular, JPA providers may pick up annotated packages for provider-specific * annotations only when driven by {@code persistence.xml}. As of 4.1, Spring's * scan can detect annotated packages as well if supported by the given * {@link org.springframework.orm.jpa.JpaVendorAdapter} (e.g. for Hibernate). * <p>If no explicit {@link #setMappingResources mapping resources} have been * specified in addition to these packages, this manager looks for a default * {@code META-INF/orm.xml} file in the classpath, registering it as a mapping * resource for the default unit if the mapping file is not co-located with a * {@code persistence.xml} file (in which case we assume it is only meant to be * used with the persistence units defined there, like in standard JPA). * @see #setManagedTypes(PersistenceManagedTypes) * @see #setDefaultPersistenceUnitName * @see #setMappingResources */ public void setPackagesToScan(String... packagesToScan) { this.packagesToScan = packagesToScan; } /** * Specify one or more mapping resources (equivalent to {@code <mapping-file>} * entries in {@code persistence.xml}) for the default persistence unit. * Can be used on its own or in combination with entity scanning in the classpath, * in both cases avoiding {@code persistence.xml}. * <p>Note that mapping resources must be relative to the classpath root, * e.g. "META-INF/mappings.xml" or "com/mycompany/repository/mappings.xml", * so that they can be loaded through {@code ClassLoader.getResource}. * <p>If no explicit mapping resources have been specified next to * {@link #setPackagesToScan packages to scan}, this manager looks for a default * {@code META-INF/orm.xml} file in the classpath, registering it as a mapping * resource for the default unit if the mapping file is not co-located with a * {@code persistence.xml} file (in which case we assume it is only meant to be * used with the persistence units defined there, like in standard JPA). * <p>Note that specifying an empty array/list here suppresses the default * {@code META-INF/orm.xml} check. On the other hand, explicitly specifying * {@code META-INF/orm.xml} here will register that file even if it happens * to be co-located with a {@code persistence.xml} file. * @see #setDefaultPersistenceUnitName * @see #setPackagesToScan */ public void setMappingResources(String... mappingResources) { this.mappingResources = mappingResources; } /** * Specify the JPA 2.0 shared cache mode for all of this manager's persistence * units, overriding any value in {@code persistence.xml} if set. * @since 4.0 * @see jakarta.persistence.spi.PersistenceUnitInfo#getSharedCacheMode() */ public void setSharedCacheMode(SharedCacheMode sharedCacheMode) { this.sharedCacheMode = sharedCacheMode; } /** * Specify the JPA 2.0 validation mode for all of this manager's persistence * units, overriding any value in {@code persistence.xml} if set. * @since 4.0 * @see jakarta.persistence.spi.PersistenceUnitInfo#getValidationMode() */ public void setValidationMode(ValidationMode validationMode) { this.validationMode = validationMode; } /** * Specify the JDBC DataSources that the JPA persistence provider is supposed * to use for accessing the database, resolving data source names in * {@code persistence.xml} against Spring-managed DataSources. * <p>The specified Map needs to define data source names for specific DataSource * objects, matching the data source names used in {@code persistence.xml}. * If not specified, data source names will be resolved as JNDI names instead * (as defined by standard JPA). * @see org.springframework.jdbc.datasource.lookup.MapDataSourceLookup */ public void setDataSources(Map<String, DataSource> dataSources) { this.dataSourceLookup = new MapDataSourceLookup(dataSources); } /** * Specify the JDBC DataSourceLookup that provides DataSources for the * persistence provider, resolving data source names in {@code persistence.xml} * against Spring-managed DataSource instances. * <p>Default is JndiDataSourceLookup, which resolves DataSource names as * JNDI names (as defined by standard JPA). Specify a BeanFactoryDataSourceLookup * instance if you want DataSource names to be resolved against Spring bean names. * <p>Alternatively, consider passing in a map from names to DataSource instances * via the "dataSources" property. If the {@code persistence.xml} file * does not define DataSource names at all, specify a default DataSource * via the "defaultDataSource" property. * @see org.springframework.jdbc.datasource.lookup.JndiDataSourceLookup * @see org.springframework.jdbc.datasource.lookup.BeanFactoryDataSourceLookup * @see #setDataSources * @see #setDefaultDataSource */ public void setDataSourceLookup(@Nullable DataSourceLookup dataSourceLookup) { this.dataSourceLookup = (dataSourceLookup != null ? dataSourceLookup : new JndiDataSourceLookup()); } /** * Return the JDBC DataSourceLookup that provides DataSources for the * persistence provider, resolving data source names in {@code persistence.xml} * against Spring-managed DataSource instances. */ @Nullable public DataSourceLookup getDataSourceLookup() { return this.dataSourceLookup; } /** * Specify the JDBC DataSource that the JPA persistence provider is supposed to use * for accessing the database if none has been specified in {@code persistence.xml}. * This variant indicates no special transaction setup, i.e. typical resource-local. * <p>In JPA speak, a DataSource passed in here will be uses as "nonJtaDataSource" * on the PersistenceUnitInfo passed to the PersistenceProvider, provided that * none has been registered before. * @see jakarta.persistence.spi.PersistenceUnitInfo#getNonJtaDataSource() */ public void setDefaultDataSource(@Nullable DataSource defaultDataSource) { this.defaultDataSource = defaultDataSource; } /** * Return the JDBC DataSource that the JPA persistence provider is supposed to use * for accessing the database if none has been specified in {@code persistence.xml}. */ @Nullable public DataSource getDefaultDataSource() { return this.defaultDataSource; } /** * Specify the JDBC DataSource that the JPA persistence provider is supposed to use * for accessing the database if none has been specified in {@code persistence.xml}. * This variant indicates that JTA is supposed to be used as transaction type. * <p>In JPA speak, a DataSource passed in here will be uses as "jtaDataSource" * on the PersistenceUnitInfo passed to the PersistenceProvider, provided that * none has been registered before. * @see jakarta.persistence.spi.PersistenceUnitInfo#getJtaDataSource() */ public void setDefaultJtaDataSource(@Nullable DataSource defaultJtaDataSource) { this.defaultJtaDataSource = defaultJtaDataSource; } /** * Return the JTA-aware DataSource that the JPA persistence provider is supposed to use * for accessing the database if none has been specified in {@code persistence.xml}. */ @Nullable public DataSource getDefaultJtaDataSource() { return this.defaultJtaDataSource; } /** * Set the PersistenceUnitPostProcessors to be applied to each * PersistenceUnitInfo that has been parsed by this manager. * <p>Such post-processors can, for example, register further entity classes and * jar files, in addition to the metadata read from {@code persistence.xml}. */ public void setPersistenceUnitPostProcessors(@Nullable PersistenceUnitPostProcessor... postProcessors) { this.persistenceUnitPostProcessors = postProcessors; } /** * Return the PersistenceUnitPostProcessors to be applied to each * PersistenceUnitInfo that has been parsed by this manager. */ @Nullable public PersistenceUnitPostProcessor[] getPersistenceUnitPostProcessors() { return this.persistenceUnitPostProcessors; } /** * Specify the Spring LoadTimeWeaver to use for class instrumentation according * to the JPA class transformer contract. * <p>It is not required to specify a LoadTimeWeaver: Most providers will be able * to provide a subset of their functionality without class instrumentation as well, * or operate with their own VM agent specified on JVM startup. Furthermore, * DefaultPersistenceUnitManager falls back to an InstrumentationLoadTimeWeaver * if Spring's agent-based instrumentation is available at runtime. * <p>In terms of Spring-provided weaving options, the most important ones are * InstrumentationLoadTimeWeaver, which requires a Spring-specific (but very general) * VM agent specified on JVM startup, and ReflectiveLoadTimeWeaver, which interacts * with an underlying ClassLoader based on specific extended methods being available * on it (for example, interacting with Spring's TomcatInstrumentableClassLoader). * Consider using the {@code context:load-time-weaver} XML tag for creating * such a shared LoadTimeWeaver (autodetecting the environment by default). * @see org.springframework.instrument.classloading.InstrumentationLoadTimeWeaver * @see org.springframework.instrument.classloading.ReflectiveLoadTimeWeaver */ @Override public void setLoadTimeWeaver(@Nullable LoadTimeWeaver loadTimeWeaver) { this.loadTimeWeaver = loadTimeWeaver; } /** * Return the Spring LoadTimeWeaver to use for class instrumentation according * to the JPA class transformer contract. */ @Nullable public LoadTimeWeaver getLoadTimeWeaver() { return this.loadTimeWeaver; } @Override public void setResourceLoader(ResourceLoader resourceLoader) { this.resourcePatternResolver = ResourcePatternUtils.getResourcePatternResolver(resourceLoader); } @Override public void afterPropertiesSet() { if (this.loadTimeWeaver == null && InstrumentationLoadTimeWeaver.isInstrumentationAvailable()) { this.loadTimeWeaver = new InstrumentationLoadTimeWeaver(this.resourcePatternResolver.getClassLoader()); } preparePersistenceUnitInfos(); } /** * Prepare the PersistenceUnitInfos according to the configuration * of this manager: scanning for {@code persistence.xml} files, * parsing all matching files, configuring and post-processing them. * <p>PersistenceUnitInfos cannot be obtained before this preparation * method has been invoked. * @see #obtainDefaultPersistenceUnitInfo() * @see #obtainPersistenceUnitInfo(String) */ public void preparePersistenceUnitInfos() { this.persistenceUnitInfoNames.clear(); this.persistenceUnitInfos.clear(); List<SpringPersistenceUnitInfo> puis = readPersistenceUnitInfos(); for (SpringPersistenceUnitInfo pui : puis) { if (pui.getPersistenceUnitRootUrl() == null) { pui.setPersistenceUnitRootUrl(determineDefaultPersistenceUnitRootUrl()); } if (pui.getJtaDataSource() == null && this.defaultJtaDataSource != null) { pui.setJtaDataSource(this.defaultJtaDataSource); } if (pui.getNonJtaDataSource() == null && this.defaultDataSource != null) { pui.setNonJtaDataSource(this.defaultDataSource); } if (this.sharedCacheMode != null) { pui.setSharedCacheMode(this.sharedCacheMode); } if (this.validationMode != null) { pui.setValidationMode(this.validationMode); } if (this.loadTimeWeaver != null) { pui.init(this.loadTimeWeaver); } else { pui.init(this.resourcePatternResolver.getClassLoader()); } postProcessPersistenceUnitInfo(pui); String name = pui.getPersistenceUnitName(); if (!this.persistenceUnitInfoNames.add(name) && !isPersistenceUnitOverrideAllowed()) { StringBuilder msg = new StringBuilder(); msg.append("Conflicting persistence unit definitions for name '").append(name).append("': "); msg.append(pui.getPersistenceUnitRootUrl()).append(", "); msg.append(this.persistenceUnitInfos.get(name).getPersistenceUnitRootUrl()); throw new IllegalStateException(msg.toString()); } this.persistenceUnitInfos.put(name, pui); } } /** * Read all persistence unit infos from {@code persistence.xml}, * as defined in the JPA specification. */ private List<SpringPersistenceUnitInfo> readPersistenceUnitInfos() { List<SpringPersistenceUnitInfo> infos = new ArrayList<>(1); String defaultName = this.defaultPersistenceUnitName; boolean buildDefaultUnit = (this.managedTypes != null || this.packagesToScan != null || this.mappingResources != null); boolean foundDefaultUnit = false; PersistenceUnitReader reader = new PersistenceUnitReader(this.resourcePatternResolver, this.dataSourceLookup); SpringPersistenceUnitInfo[] readInfos = reader.readPersistenceUnitInfos(this.persistenceXmlLocations); for (SpringPersistenceUnitInfo readInfo : readInfos) { infos.add(readInfo); if (defaultName != null && defaultName.equals(readInfo.getPersistenceUnitName())) { foundDefaultUnit = true; } } if (buildDefaultUnit) { if (foundDefaultUnit) { if (logger.isWarnEnabled()) { logger.warn("Found explicit default persistence unit with name '" + defaultName + "' in persistence.xml - " + "overriding local default persistence unit settings ('managedTypes', 'packagesToScan' or 'mappingResources')"); } } else { infos.add(buildDefaultPersistenceUnitInfo()); } } return infos; } /** * Perform Spring-based scanning for entity classes. * @see #setPackagesToScan */ private SpringPersistenceUnitInfo buildDefaultPersistenceUnitInfo() { SpringPersistenceUnitInfo scannedUnit = new SpringPersistenceUnitInfo(); if (this.defaultPersistenceUnitName != null) { scannedUnit.setPersistenceUnitName(this.defaultPersistenceUnitName); } scannedUnit.setExcludeUnlistedClasses(true); if (this.managedTypes != null) { applyManagedTypes(scannedUnit, this.managedTypes); } else if (this.packagesToScan != null) { applyManagedTypes(scannedUnit, new PersistenceManagedTypesScanner( this.resourcePatternResolver).scan(this.packagesToScan)); } if (this.mappingResources != null) { for (String mappingFileName : this.mappingResources) { scannedUnit.addMappingFileName(mappingFileName); } } else { Resource ormXml = getOrmXmlForDefaultPersistenceUnit(); if (ormXml != null) { scannedUnit.addMappingFileName(DEFAULT_ORM_XML_RESOURCE); if (scannedUnit.getPersistenceUnitRootUrl() == null) { try { scannedUnit.setPersistenceUnitRootUrl( PersistenceUnitReader.determinePersistenceUnitRootUrl(ormXml)); } catch (IOException ex) { logger.debug("Failed to determine persistence unit root URL from orm.xml location", ex); } } } } return scannedUnit; } private void applyManagedTypes(SpringPersistenceUnitInfo scannedUnit, PersistenceManagedTypes managedTypes) { managedTypes.getManagedClassNames().forEach(scannedUnit::addManagedClassName); managedTypes.getManagedPackages().forEach(scannedUnit::addManagedPackage); URL persistenceUnitRootUrl = managedTypes.getPersistenceUnitRootUrl(); if (scannedUnit.getPersistenceUnitRootUrl() == null && persistenceUnitRootUrl != null) { scannedUnit.setPersistenceUnitRootUrl(persistenceUnitRootUrl); } } /** * Try to determine the persistence unit root URL based on the given * "defaultPersistenceUnitRootLocation". * @return the persistence unit root URL to pass to the JPA PersistenceProvider * @see #setDefaultPersistenceUnitRootLocation */ @Nullable private URL determineDefaultPersistenceUnitRootUrl() { if (this.defaultPersistenceUnitRootLocation == null) { return null; } try { URL url = this.resourcePatternResolver.getResource(this.defaultPersistenceUnitRootLocation).getURL(); return (ResourceUtils.isJarURL(url) ? ResourceUtils.extractJarFileURL(url) : url); } catch (IOException ex) { if (ORIGINAL_DEFAULT_PERSISTENCE_UNIT_ROOT_LOCATION.equals(this.defaultPersistenceUnitRootLocation)) { logger.debug("Unable to resolve classpath root as persistence unit root URL"); return null; } throw new PersistenceException("Unable to resolve persistence unit root URL", ex); } } /** * Determine JPA's default "META-INF/orm.xml" resource for use with Spring's default * persistence unit, if any. * <p>Checks whether a "META-INF/orm.xml" file exists in the classpath and uses it * if it is not co-located with a "META-INF/persistence.xml" file. */ @Nullable private Resource getOrmXmlForDefaultPersistenceUnit() { Resource ormXml = this.resourcePatternResolver.getResource( this.defaultPersistenceUnitRootLocation + DEFAULT_ORM_XML_RESOURCE); if (ormXml.exists()) { try { Resource persistenceXml = ormXml.createRelative(PERSISTENCE_XML_FILENAME); if (!persistenceXml.exists()) { return ormXml; } } catch (IOException ex) { // Cannot resolve relative persistence.xml file - let's assume it's not there. return ormXml; } } return null; } /** * Return the specified PersistenceUnitInfo from this manager's cache * of processed persistence units, keeping it in the cache (i.e. not * 'obtaining' it for use but rather just accessing it for post-processing). * <p>This can be used in {@link #postProcessPersistenceUnitInfo} implementations, * detecting existing persistence units of the same name and potentially merging them. * @param persistenceUnitName the name of the desired persistence unit * @return the PersistenceUnitInfo in mutable form, or {@code null} if not available */ @Nullable protected final MutablePersistenceUnitInfo getPersistenceUnitInfo(String persistenceUnitName) { PersistenceUnitInfo pui = this.persistenceUnitInfos.get(persistenceUnitName); return (MutablePersistenceUnitInfo) pui; } /** * Hook method allowing subclasses to customize each PersistenceUnitInfo. * <p>The default implementation delegates to all registered PersistenceUnitPostProcessors. * It is usually preferable to register further entity classes, jar files etc there * rather than in a subclass of this manager, to be able to reuse the post-processors. * @param pui the chosen PersistenceUnitInfo, as read from {@code persistence.xml}. * Passed in as MutablePersistenceUnitInfo. * @see #setPersistenceUnitPostProcessors */ protected void postProcessPersistenceUnitInfo(MutablePersistenceUnitInfo pui) { PersistenceUnitPostProcessor[] postProcessors = getPersistenceUnitPostProcessors(); if (postProcessors != null) { for (PersistenceUnitPostProcessor postProcessor : postProcessors) { postProcessor.postProcessPersistenceUnitInfo(pui); } } } /** * Return whether an override of a same-named persistence unit is allowed. * <p>Default is {@code false}. May be overridden to return {@code true}, * for example if {@link #postProcessPersistenceUnitInfo} is able to handle that case. */ protected boolean isPersistenceUnitOverrideAllowed() { return false; } @Override public PersistenceUnitInfo obtainDefaultPersistenceUnitInfo() { if (this.persistenceUnitInfoNames.isEmpty()) { throw new IllegalStateException("No persistence units parsed from " + ObjectUtils.nullSafeToString(this.persistenceXmlLocations)); } if (this.persistenceUnitInfos.isEmpty()) { throw new IllegalStateException("All persistence units from " + ObjectUtils.nullSafeToString(this.persistenceXmlLocations) + " already obtained"); } if (this.persistenceUnitInfos.size() > 1 && this.defaultPersistenceUnitName != null) { return obtainPersistenceUnitInfo(this.defaultPersistenceUnitName); } PersistenceUnitInfo pui = this.persistenceUnitInfos.values().iterator().next(); this.persistenceUnitInfos.clear(); return pui; } @Override public PersistenceUnitInfo obtainPersistenceUnitInfo(String persistenceUnitName) { PersistenceUnitInfo pui = this.persistenceUnitInfos.remove(persistenceUnitName); if (pui == null) { if (!this.persistenceUnitInfoNames.contains(persistenceUnitName)) { throw new IllegalArgumentException( "No persistence unit with name '" + persistenceUnitName + "' found"); } else { throw new IllegalStateException( "Persistence unit with name '" + persistenceUnitName + "' already obtained"); } } return pui; } }
spring-orm/src/main/java/org/springframework/orm/jpa/persistenceunit/DefaultPersistenceUnitManager.java
/* * Copyright 2002-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.orm.jpa.persistenceunit; import java.io.IOException; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.sql.DataSource; import jakarta.persistence.PersistenceException; import jakarta.persistence.SharedCacheMode; import jakarta.persistence.ValidationMode; import jakarta.persistence.spi.PersistenceUnitInfo; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.beans.factory.InitializingBean; import org.springframework.context.ResourceLoaderAware; import org.springframework.context.weaving.LoadTimeWeaverAware; import org.springframework.core.io.Resource; import org.springframework.core.io.ResourceLoader; import org.springframework.core.io.support.PathMatchingResourcePatternResolver; import org.springframework.core.io.support.ResourcePatternResolver; import org.springframework.core.io.support.ResourcePatternUtils; import org.springframework.instrument.classloading.InstrumentationLoadTimeWeaver; import org.springframework.instrument.classloading.LoadTimeWeaver; import org.springframework.jdbc.datasource.lookup.DataSourceLookup; import org.springframework.jdbc.datasource.lookup.JndiDataSourceLookup; import org.springframework.jdbc.datasource.lookup.MapDataSourceLookup; import org.springframework.lang.Nullable; import org.springframework.util.ObjectUtils; import org.springframework.util.ResourceUtils; /** * Default implementation of the {@link PersistenceUnitManager} interface. * Used as internal default by * {@link org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean}. * * <p>Supports standard JPA scanning for {@code persistence.xml} files, * with configurable file locations, JDBC DataSource lookup and load-time weaving. * * <p>Builds a persistence unit based on the state of a {@link PersistenceManagedTypes}, * typically built using a {@link PersistenceManagedTypesScanner}.</p> * * <p>The default XML file location is {@code classpath*:META-INF/persistence.xml}, * scanning for all matching files in the classpath (as defined in the JPA specification). * DataSource names are by default interpreted as JNDI names, and no load time weaving * is available (which requires weaving to be turned off in the persistence provider). * * @author Juergen Hoeller * @author Stephane Nicoll * @since 2.0 * @see #setPersistenceXmlLocations * @see #setDataSourceLookup * @see #setLoadTimeWeaver * @see org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean#setPersistenceUnitManager */ public class DefaultPersistenceUnitManager implements PersistenceUnitManager, ResourceLoaderAware, LoadTimeWeaverAware, InitializingBean { private static final String DEFAULT_ORM_XML_RESOURCE = "META-INF/orm.xml"; private static final String PERSISTENCE_XML_FILENAME = "persistence.xml"; /** * Default location of the {@code persistence.xml} file: * "classpath*:META-INF/persistence.xml". */ public static final String DEFAULT_PERSISTENCE_XML_LOCATION = "classpath*:META-INF/" + PERSISTENCE_XML_FILENAME; /** * Default location for the persistence unit root URL: * "classpath:", indicating the root of the classpath. */ public static final String ORIGINAL_DEFAULT_PERSISTENCE_UNIT_ROOT_LOCATION = "classpath:"; /** * Default persistence unit name. */ public static final String ORIGINAL_DEFAULT_PERSISTENCE_UNIT_NAME = "default"; protected final Log logger = LogFactory.getLog(getClass()); private String[] persistenceXmlLocations = new String[] {DEFAULT_PERSISTENCE_XML_LOCATION}; @Nullable private String defaultPersistenceUnitRootLocation = ORIGINAL_DEFAULT_PERSISTENCE_UNIT_ROOT_LOCATION; @Nullable private String defaultPersistenceUnitName = ORIGINAL_DEFAULT_PERSISTENCE_UNIT_NAME; @Nullable private PersistenceManagedTypes managedTypes; @Nullable private String[] packagesToScan; @Nullable private String[] mappingResources; @Nullable private SharedCacheMode sharedCacheMode; @Nullable private ValidationMode validationMode; private DataSourceLookup dataSourceLookup = new JndiDataSourceLookup(); @Nullable private DataSource defaultDataSource; @Nullable private DataSource defaultJtaDataSource; @Nullable private PersistenceUnitPostProcessor[] persistenceUnitPostProcessors; @Nullable private LoadTimeWeaver loadTimeWeaver; private ResourcePatternResolver resourcePatternResolver = new PathMatchingResourcePatternResolver(); private final Set<String> persistenceUnitInfoNames = new HashSet<>(); private final Map<String, PersistenceUnitInfo> persistenceUnitInfos = new HashMap<>(); /** * Specify the location of the {@code persistence.xml} files to load. * These can be specified as Spring resource locations and/or location patterns. * <p>Default is "classpath*:META-INF/persistence.xml". */ public void setPersistenceXmlLocation(String persistenceXmlLocation) { this.persistenceXmlLocations = new String[] {persistenceXmlLocation}; } /** * Specify multiple locations of {@code persistence.xml} files to load. * These can be specified as Spring resource locations and/or location patterns. * <p>Default is "classpath*:META-INF/persistence.xml". * @param persistenceXmlLocations an array of Spring resource Strings * identifying the location of the {@code persistence.xml} files to read */ public void setPersistenceXmlLocations(String... persistenceXmlLocations) { this.persistenceXmlLocations = persistenceXmlLocations; } /** * Set the default persistence unit root location, to be applied * if no unit-specific persistence unit root could be determined. * <p>Default is "classpath:", that is, the root of the current classpath * (nearest root directory). To be overridden if unit-specific resolution * does not work and the classpath root is not appropriate either. */ public void setDefaultPersistenceUnitRootLocation(String defaultPersistenceUnitRootLocation) { this.defaultPersistenceUnitRootLocation = defaultPersistenceUnitRootLocation; } /** * Specify the name of the default persistence unit, if any. Default is "default". * <p>Primarily applied to a scanned persistence unit without {@code persistence.xml}. * Also applicable to selecting a default unit from several persistence units available. * @see #setPackagesToScan * @see #obtainDefaultPersistenceUnitInfo */ public void setDefaultPersistenceUnitName(String defaultPersistenceUnitName) { this.defaultPersistenceUnitName = defaultPersistenceUnitName; } /** * Set the {@link PersistenceManagedTypes} to use to build the list of managed types * as an alternative to entity scanning. * @param managedTypes the managed types * @since 6.0 */ public void setManagedTypes(PersistenceManagedTypes managedTypes) { this.managedTypes = managedTypes; } /** * Set whether to use Spring-based scanning for entity classes in the classpath * instead of using JPA's standard scanning of jar files with {@code persistence.xml} * markers in them. In case of Spring-based scanning, no {@code persistence.xml} * is necessary; all you need to do is to specify base packages to search here. * <p>Default is none. Specify packages to search for autodetection of your entity * classes in the classpath. This is analogous to Spring's component-scan feature * ({@link org.springframework.context.annotation.ClassPathBeanDefinitionScanner}). * <p>Consider setting a {@link PersistenceManagedTypes} instead that allows the * scanning logic to be optimized by AOT processing. * <p>Such package scanning defines a "default persistence unit" in Spring, which * may live next to regularly defined units originating from {@code persistence.xml}. * Its name is determined by {@link #setDefaultPersistenceUnitName}: by default, * it's simply "default". * <p><b>Note: There may be limitations in comparison to regular JPA scanning.</b> * In particular, JPA providers may pick up annotated packages for provider-specific * annotations only when driven by {@code persistence.xml}. As of 4.1, Spring's * scan can detect annotated packages as well if supported by the given * {@link org.springframework.orm.jpa.JpaVendorAdapter} (e.g. for Hibernate). * <p>If no explicit {@link #setMappingResources mapping resources} have been * specified in addition to these packages, this manager looks for a default * {@code META-INF/orm.xml} file in the classpath, registering it as a mapping * resource for the default unit if the mapping file is not co-located with a * {@code persistence.xml} file (in which case we assume it is only meant to be * used with the persistence units defined there, like in standard JPA). * @see #setManagedTypes(PersistenceManagedTypes) * @see #setDefaultPersistenceUnitName * @see #setMappingResources */ public void setPackagesToScan(String... packagesToScan) { this.packagesToScan = packagesToScan; } /** * Specify one or more mapping resources (equivalent to {@code <mapping-file>} * entries in {@code persistence.xml}) for the default persistence unit. * Can be used on its own or in combination with entity scanning in the classpath, * in both cases avoiding {@code persistence.xml}. * <p>Note that mapping resources must be relative to the classpath root, * e.g. "META-INF/mappings.xml" or "com/mycompany/repository/mappings.xml", * so that they can be loaded through {@code ClassLoader.getResource}. * <p>If no explicit mapping resources have been specified next to * {@link #setPackagesToScan packages to scan}, this manager looks for a default * {@code META-INF/orm.xml} file in the classpath, registering it as a mapping * resource for the default unit if the mapping file is not co-located with a * {@code persistence.xml} file (in which case we assume it is only meant to be * used with the persistence units defined there, like in standard JPA). * <p>Note that specifying an empty array/list here suppresses the default * {@code META-INF/orm.xml} check. On the other hand, explicitly specifying * {@code META-INF/orm.xml} here will register that file even if it happens * to be co-located with a {@code persistence.xml} file. * @see #setDefaultPersistenceUnitName * @see #setPackagesToScan */ public void setMappingResources(String... mappingResources) { this.mappingResources = mappingResources; } /** * Specify the JPA 2.0 shared cache mode for all of this manager's persistence * units, overriding any value in {@code persistence.xml} if set. * @since 4.0 * @see jakarta.persistence.spi.PersistenceUnitInfo#getSharedCacheMode() */ public void setSharedCacheMode(SharedCacheMode sharedCacheMode) { this.sharedCacheMode = sharedCacheMode; } /** * Specify the JPA 2.0 validation mode for all of this manager's persistence * units, overriding any value in {@code persistence.xml} if set. * @since 4.0 * @see jakarta.persistence.spi.PersistenceUnitInfo#getValidationMode() */ public void setValidationMode(ValidationMode validationMode) { this.validationMode = validationMode; } /** * Specify the JDBC DataSources that the JPA persistence provider is supposed * to use for accessing the database, resolving data source names in * {@code persistence.xml} against Spring-managed DataSources. * <p>The specified Map needs to define data source names for specific DataSource * objects, matching the data source names used in {@code persistence.xml}. * If not specified, data source names will be resolved as JNDI names instead * (as defined by standard JPA). * @see org.springframework.jdbc.datasource.lookup.MapDataSourceLookup */ public void setDataSources(Map<String, DataSource> dataSources) { this.dataSourceLookup = new MapDataSourceLookup(dataSources); } /** * Specify the JDBC DataSourceLookup that provides DataSources for the * persistence provider, resolving data source names in {@code persistence.xml} * against Spring-managed DataSource instances. * <p>Default is JndiDataSourceLookup, which resolves DataSource names as * JNDI names (as defined by standard JPA). Specify a BeanFactoryDataSourceLookup * instance if you want DataSource names to be resolved against Spring bean names. * <p>Alternatively, consider passing in a map from names to DataSource instances * via the "dataSources" property. If the {@code persistence.xml} file * does not define DataSource names at all, specify a default DataSource * via the "defaultDataSource" property. * @see org.springframework.jdbc.datasource.lookup.JndiDataSourceLookup * @see org.springframework.jdbc.datasource.lookup.BeanFactoryDataSourceLookup * @see #setDataSources * @see #setDefaultDataSource */ public void setDataSourceLookup(@Nullable DataSourceLookup dataSourceLookup) { this.dataSourceLookup = (dataSourceLookup != null ? dataSourceLookup : new JndiDataSourceLookup()); } /** * Return the JDBC DataSourceLookup that provides DataSources for the * persistence provider, resolving data source names in {@code persistence.xml} * against Spring-managed DataSource instances. */ @Nullable public DataSourceLookup getDataSourceLookup() { return this.dataSourceLookup; } /** * Specify the JDBC DataSource that the JPA persistence provider is supposed to use * for accessing the database if none has been specified in {@code persistence.xml}. * This variant indicates no special transaction setup, i.e. typical resource-local. * <p>In JPA speak, a DataSource passed in here will be uses as "nonJtaDataSource" * on the PersistenceUnitInfo passed to the PersistenceProvider, provided that * none has been registered before. * @see jakarta.persistence.spi.PersistenceUnitInfo#getNonJtaDataSource() */ public void setDefaultDataSource(@Nullable DataSource defaultDataSource) { this.defaultDataSource = defaultDataSource; } /** * Return the JDBC DataSource that the JPA persistence provider is supposed to use * for accessing the database if none has been specified in {@code persistence.xml}. */ @Nullable public DataSource getDefaultDataSource() { return this.defaultDataSource; } /** * Specify the JDBC DataSource that the JPA persistence provider is supposed to use * for accessing the database if none has been specified in {@code persistence.xml}. * This variant indicates that JTA is supposed to be used as transaction type. * <p>In JPA speak, a DataSource passed in here will be uses as "jtaDataSource" * on the PersistenceUnitInfo passed to the PersistenceProvider, provided that * none has been registered before. * @see jakarta.persistence.spi.PersistenceUnitInfo#getJtaDataSource() */ public void setDefaultJtaDataSource(@Nullable DataSource defaultJtaDataSource) { this.defaultJtaDataSource = defaultJtaDataSource; } /** * Return the JTA-aware DataSource that the JPA persistence provider is supposed to use * for accessing the database if none has been specified in {@code persistence.xml}. */ @Nullable public DataSource getDefaultJtaDataSource() { return this.defaultJtaDataSource; } /** * Set the PersistenceUnitPostProcessors to be applied to each * PersistenceUnitInfo that has been parsed by this manager. * <p>Such post-processors can, for example, register further entity classes and * jar files, in addition to the metadata read from {@code persistence.xml}. */ public void setPersistenceUnitPostProcessors(@Nullable PersistenceUnitPostProcessor... postProcessors) { this.persistenceUnitPostProcessors = postProcessors; } /** * Return the PersistenceUnitPostProcessors to be applied to each * PersistenceUnitInfo that has been parsed by this manager. */ @Nullable public PersistenceUnitPostProcessor[] getPersistenceUnitPostProcessors() { return this.persistenceUnitPostProcessors; } /** * Specify the Spring LoadTimeWeaver to use for class instrumentation according * to the JPA class transformer contract. * <p>It is not required to specify a LoadTimeWeaver: Most providers will be able * to provide a subset of their functionality without class instrumentation as well, * or operate with their own VM agent specified on JVM startup. Furthermore, * DefaultPersistenceUnitManager falls back to an InstrumentationLoadTimeWeaver * if Spring's agent-based instrumentation is available at runtime. * <p>In terms of Spring-provided weaving options, the most important ones are * InstrumentationLoadTimeWeaver, which requires a Spring-specific (but very general) * VM agent specified on JVM startup, and ReflectiveLoadTimeWeaver, which interacts * with an underlying ClassLoader based on specific extended methods being available * on it (for example, interacting with Spring's TomcatInstrumentableClassLoader). * Consider using the {@code context:load-time-weaver} XML tag for creating * such a shared LoadTimeWeaver (autodetecting the environment by default). * @see org.springframework.instrument.classloading.InstrumentationLoadTimeWeaver * @see org.springframework.instrument.classloading.ReflectiveLoadTimeWeaver */ @Override public void setLoadTimeWeaver(@Nullable LoadTimeWeaver loadTimeWeaver) { this.loadTimeWeaver = loadTimeWeaver; } /** * Return the Spring LoadTimeWeaver to use for class instrumentation according * to the JPA class transformer contract. */ @Nullable public LoadTimeWeaver getLoadTimeWeaver() { return this.loadTimeWeaver; } @Override public void setResourceLoader(ResourceLoader resourceLoader) { this.resourcePatternResolver = ResourcePatternUtils.getResourcePatternResolver(resourceLoader); } @Override public void afterPropertiesSet() { if (this.loadTimeWeaver == null && InstrumentationLoadTimeWeaver.isInstrumentationAvailable()) { this.loadTimeWeaver = new InstrumentationLoadTimeWeaver(this.resourcePatternResolver.getClassLoader()); } preparePersistenceUnitInfos(); } /** * Prepare the PersistenceUnitInfos according to the configuration * of this manager: scanning for {@code persistence.xml} files, * parsing all matching files, configuring and post-processing them. * <p>PersistenceUnitInfos cannot be obtained before this preparation * method has been invoked. * @see #obtainDefaultPersistenceUnitInfo() * @see #obtainPersistenceUnitInfo(String) */ public void preparePersistenceUnitInfos() { this.persistenceUnitInfoNames.clear(); this.persistenceUnitInfos.clear(); List<SpringPersistenceUnitInfo> puis = readPersistenceUnitInfos(); for (SpringPersistenceUnitInfo pui : puis) { if (pui.getPersistenceUnitRootUrl() == null) { pui.setPersistenceUnitRootUrl(determineDefaultPersistenceUnitRootUrl()); } if (pui.getJtaDataSource() == null && this.defaultJtaDataSource != null) { pui.setJtaDataSource(this.defaultJtaDataSource); } if (pui.getNonJtaDataSource() == null && this.defaultDataSource != null) { pui.setNonJtaDataSource(this.defaultDataSource); } if (this.sharedCacheMode != null) { pui.setSharedCacheMode(this.sharedCacheMode); } if (this.validationMode != null) { pui.setValidationMode(this.validationMode); } if (this.loadTimeWeaver != null) { pui.init(this.loadTimeWeaver); } else { pui.init(this.resourcePatternResolver.getClassLoader()); } postProcessPersistenceUnitInfo(pui); String name = pui.getPersistenceUnitName(); if (!this.persistenceUnitInfoNames.add(name) && !isPersistenceUnitOverrideAllowed()) { StringBuilder msg = new StringBuilder(); msg.append("Conflicting persistence unit definitions for name '").append(name).append("': "); msg.append(pui.getPersistenceUnitRootUrl()).append(", "); msg.append(this.persistenceUnitInfos.get(name).getPersistenceUnitRootUrl()); throw new IllegalStateException(msg.toString()); } this.persistenceUnitInfos.put(name, pui); } } /** * Read all persistence unit infos from {@code persistence.xml}, * as defined in the JPA specification. */ private List<SpringPersistenceUnitInfo> readPersistenceUnitInfos() { List<SpringPersistenceUnitInfo> infos = new ArrayList<>(1); String defaultName = this.defaultPersistenceUnitName; boolean buildDefaultUnit = (this.managedTypes != null || this.packagesToScan != null || this.mappingResources != null); boolean foundDefaultUnit = false; PersistenceUnitReader reader = new PersistenceUnitReader(this.resourcePatternResolver, this.dataSourceLookup); SpringPersistenceUnitInfo[] readInfos = reader.readPersistenceUnitInfos(this.persistenceXmlLocations); for (SpringPersistenceUnitInfo readInfo : readInfos) { infos.add(readInfo); if (defaultName != null && defaultName.equals(readInfo.getPersistenceUnitName())) { foundDefaultUnit = true; } } if (buildDefaultUnit) { if (foundDefaultUnit) { if (logger.isWarnEnabled()) { logger.warn("Found explicit default persistence unit with name '" + defaultName + "' in persistence.xml - " + "overriding local default persistence unit settings ('managedTypes', 'packagesToScan' or 'mappingResources')"); } } else { infos.add(buildDefaultPersistenceUnitInfo()); } } return infos; } /** * Perform Spring-based scanning for entity classes. * @see #setPackagesToScan */ private SpringPersistenceUnitInfo buildDefaultPersistenceUnitInfo() { SpringPersistenceUnitInfo scannedUnit = new SpringPersistenceUnitInfo(); if (this.defaultPersistenceUnitName != null) { scannedUnit.setPersistenceUnitName(this.defaultPersistenceUnitName); } scannedUnit.setExcludeUnlistedClasses(true); if (this.managedTypes != null) { applyManagedTypes(scannedUnit, this.managedTypes); } else if (this.packagesToScan != null) { applyManagedTypes(scannedUnit, new PersistenceManagedTypesScanner( this.resourcePatternResolver).scan(this.packagesToScan)); } if (this.mappingResources != null) { for (String mappingFileName : this.mappingResources) { scannedUnit.addMappingFileName(mappingFileName); } } else { Resource ormXml = getOrmXmlForDefaultPersistenceUnit(); if (ormXml != null) { scannedUnit.addMappingFileName(DEFAULT_ORM_XML_RESOURCE); if (scannedUnit.getPersistenceUnitRootUrl() == null) { try { scannedUnit.setPersistenceUnitRootUrl( PersistenceUnitReader.determinePersistenceUnitRootUrl(ormXml)); } catch (IOException ex) { logger.debug("Failed to determine persistence unit root URL from orm.xml location", ex); } } } } return scannedUnit; } private void applyManagedTypes(SpringPersistenceUnitInfo scannedUnit, PersistenceManagedTypes managedTypes) { managedTypes.getManagedClassNames().forEach(scannedUnit::addManagedClassName); managedTypes.getManagedPackages().forEach(scannedUnit::addManagedPackage); URL persistenceUnitRootUrl = managedTypes.getPersistenceUnitRootUrl(); if (scannedUnit.getPersistenceUnitRootUrl() == null && persistenceUnitRootUrl != null) { scannedUnit.setPersistenceUnitRootUrl(persistenceUnitRootUrl); } } /** * Try to determine the persistence unit root URL based on the given * "defaultPersistenceUnitRootLocation". * @return the persistence unit root URL to pass to the JPA PersistenceProvider * @see #setDefaultPersistenceUnitRootLocation */ @Nullable private URL determineDefaultPersistenceUnitRootUrl() { if (this.defaultPersistenceUnitRootLocation == null) { return null; } try { URL url = this.resourcePatternResolver.getResource(this.defaultPersistenceUnitRootLocation).getURL(); return (ResourceUtils.isJarURL(url) ? ResourceUtils.extractJarFileURL(url) : url); } catch (IOException ex) { throw new PersistenceException("Unable to resolve persistence unit root URL", ex); } } /** * Determine JPA's default "META-INF/orm.xml" resource for use with Spring's default * persistence unit, if any. * <p>Checks whether a "META-INF/orm.xml" file exists in the classpath and uses it * if it is not co-located with a "META-INF/persistence.xml" file. */ @Nullable private Resource getOrmXmlForDefaultPersistenceUnit() { Resource ormXml = this.resourcePatternResolver.getResource( this.defaultPersistenceUnitRootLocation + DEFAULT_ORM_XML_RESOURCE); if (ormXml.exists()) { try { Resource persistenceXml = ormXml.createRelative(PERSISTENCE_XML_FILENAME); if (!persistenceXml.exists()) { return ormXml; } } catch (IOException ex) { // Cannot resolve relative persistence.xml file - let's assume it's not there. return ormXml; } } return null; } /** * Return the specified PersistenceUnitInfo from this manager's cache * of processed persistence units, keeping it in the cache (i.e. not * 'obtaining' it for use but rather just accessing it for post-processing). * <p>This can be used in {@link #postProcessPersistenceUnitInfo} implementations, * detecting existing persistence units of the same name and potentially merging them. * @param persistenceUnitName the name of the desired persistence unit * @return the PersistenceUnitInfo in mutable form, or {@code null} if not available */ @Nullable protected final MutablePersistenceUnitInfo getPersistenceUnitInfo(String persistenceUnitName) { PersistenceUnitInfo pui = this.persistenceUnitInfos.get(persistenceUnitName); return (MutablePersistenceUnitInfo) pui; } /** * Hook method allowing subclasses to customize each PersistenceUnitInfo. * <p>The default implementation delegates to all registered PersistenceUnitPostProcessors. * It is usually preferable to register further entity classes, jar files etc there * rather than in a subclass of this manager, to be able to reuse the post-processors. * @param pui the chosen PersistenceUnitInfo, as read from {@code persistence.xml}. * Passed in as MutablePersistenceUnitInfo. * @see #setPersistenceUnitPostProcessors */ protected void postProcessPersistenceUnitInfo(MutablePersistenceUnitInfo pui) { PersistenceUnitPostProcessor[] postProcessors = getPersistenceUnitPostProcessors(); if (postProcessors != null) { for (PersistenceUnitPostProcessor postProcessor : postProcessors) { postProcessor.postProcessPersistenceUnitInfo(pui); } } } /** * Return whether an override of a same-named persistence unit is allowed. * <p>Default is {@code false}. May be overridden to return {@code true}, * for example if {@link #postProcessPersistenceUnitInfo} is able to handle that case. */ protected boolean isPersistenceUnitOverrideAllowed() { return false; } @Override public PersistenceUnitInfo obtainDefaultPersistenceUnitInfo() { if (this.persistenceUnitInfoNames.isEmpty()) { throw new IllegalStateException("No persistence units parsed from " + ObjectUtils.nullSafeToString(this.persistenceXmlLocations)); } if (this.persistenceUnitInfos.isEmpty()) { throw new IllegalStateException("All persistence units from " + ObjectUtils.nullSafeToString(this.persistenceXmlLocations) + " already obtained"); } if (this.persistenceUnitInfos.size() > 1 && this.defaultPersistenceUnitName != null) { return obtainPersistenceUnitInfo(this.defaultPersistenceUnitName); } PersistenceUnitInfo pui = this.persistenceUnitInfos.values().iterator().next(); this.persistenceUnitInfos.clear(); return pui; } @Override public PersistenceUnitInfo obtainPersistenceUnitInfo(String persistenceUnitName) { PersistenceUnitInfo pui = this.persistenceUnitInfos.remove(persistenceUnitName); if (pui == null) { if (!this.persistenceUnitInfoNames.contains(persistenceUnitName)) { throw new IllegalArgumentException( "No persistence unit with name '" + persistenceUnitName + "' found"); } else { throw new IllegalStateException( "Persistence unit with name '" + persistenceUnitName + "' already obtained"); } } return pui; } }
Refine DefaultPersistenceUnitManager#determineDefaultPersistenceUnitRootUrl This commit refines the implementation to be more lenient when defaultPersistenceUnitRootLocation is equals to ORIGINAL_DEFAULT_PERSISTENCE_UNIT_ROOT_LOCATION and an IOException is thrown, which happens when running on native image. Closes gh-29137
spring-orm/src/main/java/org/springframework/orm/jpa/persistenceunit/DefaultPersistenceUnitManager.java
Refine DefaultPersistenceUnitManager#determineDefaultPersistenceUnitRootUrl
Java
apache-2.0
9a48b92e3119c82dacc98e341f56b896d7cc4913
0
apache/geronimo,apache/geronimo,apache/geronimo,apache/geronimo
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.geronimo.openejb.deployment; import java.io.IOException; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.net.URI; import java.net.URISyntaxException; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import javax.ejb.EJB; import javax.ejb.EJBHome; import javax.ejb.EJBLocalHome; import javax.ejb.Local; import javax.ejb.Remote; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.geronimo.common.DeploymentException; import org.apache.geronimo.gbean.GBeanInfo; import org.apache.geronimo.gbean.GBeanInfoBuilder; import org.apache.geronimo.j2ee.deployment.Module; import org.apache.geronimo.j2ee.deployment.WebModule; import org.apache.geronimo.j2ee.j2eeobjectnames.NameFactory; import org.apache.geronimo.kernel.config.Configuration; import org.apache.geronimo.kernel.config.MultiParentClassLoader; import org.apache.geronimo.kernel.repository.Environment; import org.apache.geronimo.naming.deployment.AbstractNamingBuilder; import org.apache.geronimo.openejb.ClientEjbReference; import org.apache.geronimo.xbeans.javaee.EjbLocalRefType; import org.apache.geronimo.xbeans.javaee.EjbRefType; import org.apache.geronimo.xbeans.javaee.InjectionTargetType; import org.apache.openejb.OpenEJBException; import org.apache.openejb.config.AnnotationDeployer; import org.apache.openejb.config.JndiEncInfoBuilder; import org.apache.openejb.assembler.classic.EjbJarInfo; import org.apache.openejb.assembler.classic.JndiEncBuilder; import org.apache.openejb.assembler.classic.JndiEncInfo; import org.apache.openejb.core.ivm.naming.IntraVmJndiReference; import org.apache.openejb.jee.EjbLocalRef; import org.apache.openejb.jee.EjbRef; import org.apache.openejb.jee.InjectionTarget; import org.apache.openejb.jee.JndiConsumer; import org.apache.openejb.jee.SessionBean; import org.apache.xbean.finder.ClassFinder; import org.apache.xbean.finder.UrlSet; import org.apache.xmlbeans.QNameSet; import org.apache.xmlbeans.XmlObject; /** * @version $Revision: 475950 $ $Date: 2006-11-16 14:18:14 -0800 (Thu, 16 Nov 2006) $ */ public class EjbRefBuilder extends AbstractNamingBuilder { private static final Log log = LogFactory.getLog(EjbRefBuilder.class); private final QNameSet ejbRefQNameSet; private final QNameSet ejbLocalRefQNameSet; private final URI uri; public EjbRefBuilder(Environment defaultEnvironment, String[] eeNamespaces, String host, int port) throws URISyntaxException { super(defaultEnvironment); if (host != null) { uri = new URI("ejb", null, host, port, null, null, null); } else { uri = null; } ejbRefQNameSet = buildQNameSet(eeNamespaces, "ejb-ref"); ejbLocalRefQNameSet = buildQNameSet(eeNamespaces, "ejb-local-ref"); ejbRefQNameSet.union(ejbLocalRefQNameSet); } public QNameSet getSpecQNameSet() { return ejbRefQNameSet; } public QNameSet getPlanQNameSet() { return QNameSet.EMPTY; } protected boolean willMergeEnvironment(XmlObject specDD, XmlObject plan) { return specDD.selectChildren(ejbRefQNameSet).length > 0 || specDD.selectChildren(ejbLocalRefQNameSet).length > 0; } public void buildNaming(XmlObject specDD, XmlObject plan, Configuration localConfiguration, Configuration remoteConfiguration, Module module, Map componentContext) throws DeploymentException { JndiConsumer consumer = createJndiConsumer(specDD); processWebEjbAnnotations(module, consumer); Map<String, Object> map = null; try { EjbModuleBuilder.EarData earData = (EjbModuleBuilder.EarData) module.getRootEarContext().getGeneralData().get(EjbModuleBuilder.EarData.class); Collection<EjbJarInfo> ejbJars = Collections.emptySet(); if (earData != null) { ejbJars = earData.getEjbJars(); } JndiEncInfoBuilder jndiEncInfoBuilder = new JndiEncInfoBuilder(ejbJars); JndiEncInfo jndiEncInfo = jndiEncInfoBuilder.build(consumer, "GeronimoEnc"); JndiEncBuilder jndiEncBuilder = new JndiEncBuilder(jndiEncInfo, module.getName()); map = jndiEncBuilder.buildMap(); } catch (OpenEJBException e) { throw new DeploymentException(e); } for (Map.Entry<String, Object> entry : map.entrySet()) { String name = entry.getKey(); Object value = entry.getValue(); // work with names prefixed with java:comp/ if (name.startsWith("java:comp/")) { name = name.substring("java:comp/".length()); } // if this is a ref it will be prefixed with env/ if (name.startsWith("env/")) { if (uri != null) { value = createClientRef(value); } getJndiContextMap(componentContext).put(name, value); } } } private Object createClientRef(Object value) { IntraVmJndiReference intraVmJndiReference = (IntraVmJndiReference) value; String deploymentId = intraVmJndiReference.getJndiName(); if (deploymentId.startsWith("java:openejb/ejb/")) { deploymentId = deploymentId.substring("java:openejb/ejb/".length()); } ClientEjbReference clientRef = new ClientEjbReference(uri.toString(), deploymentId); return clientRef; } protected JndiConsumer createJndiConsumer(XmlObject specDD) throws DeploymentException { List<EjbRefType> ejbRefs = convert(specDD.selectChildren(ejbRefQNameSet), J2EE_CONVERTER, EjbRefType.class, EjbRefType.type); List<EjbLocalRefType> ejbLocalRefs = convert(specDD.selectChildren(ejbLocalRefQNameSet), J2EE_CONVERTER, EjbLocalRefType.class, EjbLocalRefType.type); // build jndi consumer JndiConsumer jndiConsumer = new SessionBean(); for (EjbRefType xmlbeansRef : ejbRefs) { // create the ejb-ref EjbRef ref = new EjbRef(); jndiConsumer.getEjbRef().add(ref); // ejb-ref-name ref.setEjbRefName(getStringValue(xmlbeansRef.getEjbRefName())); // ejb-ref-type String refType = getStringValue(xmlbeansRef.getEjbRefType()); if ("SESSION".equalsIgnoreCase(refType)) { ref.setEjbRefType(org.apache.openejb.jee.EjbRefType.SESSION); } else if ("ENTITY".equalsIgnoreCase(refType)) { ref.setEjbRefType(org.apache.openejb.jee.EjbRefType.ENTITY); } // home ref.setHome(getStringValue(xmlbeansRef.getHome())); // remote ref.setRemote(getStringValue(xmlbeansRef.getRemote())); // ejb-link ref.setEjbLink(getStringValue(xmlbeansRef.getEjbLink())); // mapped-name ref.setMappedName(getStringValue(xmlbeansRef.getMappedName())); // injection-targets if (xmlbeansRef.getInjectionTargetArray() != null) { for (InjectionTargetType injectionTargetType : xmlbeansRef.getInjectionTargetArray()) { InjectionTarget injectionTarget = new InjectionTarget(); injectionTarget.setInjectionTargetClass(getStringValue(injectionTargetType.getInjectionTargetClass())); injectionTarget.setInjectionTargetName(getStringValue(injectionTargetType.getInjectionTargetName())); ref.getInjectionTarget().add(injectionTarget); } } } for (EjbLocalRefType xmlbeansRef : ejbLocalRefs) { // create the ejb-ref EjbLocalRef ref = new EjbLocalRef(); jndiConsumer.getEjbLocalRef().add(ref); // ejb-ref-name ref.setEjbRefName(getStringValue(xmlbeansRef.getEjbRefName())); // ejb-ref-type String refType = getStringValue(xmlbeansRef.getEjbRefType()); if ("SESSION".equalsIgnoreCase(refType)) { ref.setEjbRefType(org.apache.openejb.jee.EjbRefType.SESSION); } else if ("ENTITY".equalsIgnoreCase(refType)) { ref.setEjbRefType(org.apache.openejb.jee.EjbRefType.ENTITY); } // home ref.setLocalHome(getStringValue(xmlbeansRef.getLocalHome())); // remote ref.setLocal(getStringValue(xmlbeansRef.getLocal())); // ejb-link ref.setEjbLink(getStringValue(xmlbeansRef.getEjbLink())); // mapped-name ref.setMappedName(getStringValue(xmlbeansRef.getMappedName())); // injection-targets if (xmlbeansRef.getInjectionTargetArray() != null) { for (InjectionTargetType injectionTargetType : xmlbeansRef.getInjectionTargetArray()) { InjectionTarget injectionTarget = new InjectionTarget(); injectionTarget.setInjectionTargetClass(getStringValue(injectionTargetType.getInjectionTargetClass())); injectionTarget.setInjectionTargetName(getStringValue(injectionTargetType.getInjectionTargetName())); ref.getInjectionTarget().add(injectionTarget); } } } return jndiConsumer; } private void processWebEjbAnnotations(Module module, JndiConsumer consumer) throws DeploymentException { if (module instanceof WebModule) { try { ClassLoader classLoader = module.getEarContext().getClassLoader(); UrlSet urlSet = new UrlSet(classLoader); if (classLoader instanceof MultiParentClassLoader) { MultiParentClassLoader multiParentClassLoader = (MultiParentClassLoader) classLoader; for (ClassLoader parent : multiParentClassLoader.getParents()) { if (parent != null) { urlSet = urlSet.exclude(parent); } } } else { ClassLoader parent = classLoader.getParent(); if (parent != null) { urlSet = urlSet.exclude(parent); } } ClassFinder finder = new ClassFinder(classLoader, urlSet.getUrls()); for (Field field : finder.findAnnotatedFields(EJB.class)) { EJB ejb = field.getAnnotation(EJB.class); AnnotationDeployer.Member member = new AnnotationDeployer.FieldMember(field); buildEjbRef(consumer, ejb, member); } for (Method method : finder.findAnnotatedMethods(EJB.class)) { EJB ejb = method.getAnnotation(EJB.class); AnnotationDeployer.Member member = new AnnotationDeployer.MethodMember(method); buildEjbRef(consumer, ejb, member); } } catch (IOException e) { // ignored... we tried log.warn("Unable to process @EJB annotations for web module" + module.getName(), e); } } } private void buildEjbRef(JndiConsumer consumer, EJB ejb, AnnotationDeployer.Member member) { EjbRef ejbRef = new EjbRef(); // This is how we deal with the fact that we don't know // whether to use an EjbLocalRef or EjbRef (remote). // We flag it uknown and let the linking code take care of // figuring out what to do with it. ejbRef.setRefType(EjbRef.Type.UNKNOWN); if (member != null) { // Set the member name where this will be injected InjectionTarget target = new InjectionTarget(); target.setInjectionTargetClass(member.getDeclaringClass().getName()); target.setInjectionTargetName(member.getName()); ejbRef.getInjectionTarget().add(target); } Class interfce = ejb.beanInterface(); if (interfce.equals(Object.class)) { interfce = (member == null) ? null : member.getType(); } if (interfce != null && !interfce.equals(Object.class)) { if (EJBHome.class.isAssignableFrom(interfce)) { ejbRef.setHome(interfce.getName()); Method[] methods = interfce.getMethods(); for (Method method : methods) { if (method.getName().startsWith("create")) { ejbRef.setRemote(method.getReturnType().getName()); break; } } ejbRef.setRefType(EjbRef.Type.REMOTE); } else if (EJBLocalHome.class.isAssignableFrom(interfce)) { ejbRef.setHome(interfce.getName()); Method[] methods = interfce.getMethods(); for (Method method : methods) { if (method.getName().startsWith("create")) { ejbRef.setRemote(method.getReturnType().getName()); break; } } ejbRef.setRefType(EjbRef.Type.LOCAL); } else { ejbRef.setRemote(interfce.getName()); if (interfce.getAnnotation(Local.class) != null) { ejbRef.setRefType(EjbRef.Type.LOCAL); } else if (interfce.getAnnotation(Remote.class) != null) { ejbRef.setRefType(EjbRef.Type.REMOTE); } } } // Get the ejb-ref-name String refName = ejb.name(); if (refName.equals("")) { refName = (member == null) ? null : member.getDeclaringClass().getName() + "/" + member.getName(); } ejbRef.setEjbRefName(refName); // Set the ejb-link, if any String ejbName = ejb.beanName(); if (ejbName.equals("")) { ejbName = null; } ejbRef.setEjbLink(ejbName); // Set the mappedName, if any String mappedName = ejb.mappedName(); if (mappedName.equals("")) { mappedName = null; } ejbRef.setMappedName(mappedName); switch (ejbRef.getRefType()) { case UNKNOWN: case REMOTE: consumer.getEjbRef().add(ejbRef); break; case LOCAL: consumer.getEjbLocalRef().add(new EjbLocalRef(ejbRef)); break; } } public static final GBeanInfo GBEAN_INFO; static { GBeanInfoBuilder infoBuilder = GBeanInfoBuilder.createStatic(EjbRefBuilder.class, NameFactory.MODULE_BUILDER); infoBuilder.addAttribute("eeNamespaces", String[].class, true, true); infoBuilder.addAttribute("defaultEnvironment", Environment.class, true, true); infoBuilder.addAttribute("host", String.class, true); infoBuilder.addAttribute("port", int.class, true); infoBuilder.setConstructor(new String[]{"defaultEnvironment", "eeNamespaces", "host", "port"}); GBEAN_INFO = infoBuilder.getBeanInfo(); } public static GBeanInfo getGBeanInfo() { return GBEAN_INFO; } }
modules/geronimo-openejb-builder/src/main/java/org/apache/geronimo/openejb/deployment/EjbRefBuilder.java
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.geronimo.openejb.deployment; import java.io.IOException; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.net.URI; import java.net.URISyntaxException; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import javax.ejb.EJB; import javax.ejb.EJBHome; import javax.ejb.EJBLocalHome; import javax.ejb.Local; import javax.ejb.Remote; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.geronimo.common.DeploymentException; import org.apache.geronimo.gbean.GBeanInfo; import org.apache.geronimo.gbean.GBeanInfoBuilder; import org.apache.geronimo.j2ee.deployment.Module; import org.apache.geronimo.j2ee.deployment.WebModule; import org.apache.geronimo.j2ee.j2eeobjectnames.NameFactory; import org.apache.geronimo.kernel.config.Configuration; import org.apache.geronimo.kernel.config.MultiParentClassLoader; import org.apache.geronimo.kernel.repository.Environment; import org.apache.geronimo.naming.deployment.AbstractNamingBuilder; import org.apache.geronimo.openejb.ClientEjbReference; import org.apache.geronimo.xbeans.javaee.EjbLocalRefType; import org.apache.geronimo.xbeans.javaee.EjbRefType; import org.apache.geronimo.xbeans.javaee.InjectionTargetType; import org.apache.openejb.OpenEJBException; import org.apache.openejb.config.AnnotationDeployer; import org.apache.openejb.config.JndiEncInfoBuilder; import org.apache.openejb.assembler.classic.EjbJarInfo; import org.apache.openejb.assembler.classic.JndiEncBuilder; import org.apache.openejb.assembler.classic.JndiEncInfo; import org.apache.openejb.core.ivm.naming.IntraVmJndiReference; import org.apache.openejb.jee.EjbLocalRef; import org.apache.openejb.jee.EjbRef; import org.apache.openejb.jee.InjectionTarget; import org.apache.openejb.jee.JndiConsumer; import org.apache.openejb.jee.SessionBean; import org.apache.xbean.finder.ClassFinder; import org.apache.xbean.finder.UrlSet; import org.apache.xmlbeans.QNameSet; import org.apache.xmlbeans.XmlObject; /** * @version $Revision: 475950 $ $Date: 2006-11-16 14:18:14 -0800 (Thu, 16 Nov 2006) $ */ public class EjbRefBuilder extends AbstractNamingBuilder { private static final Log log = LogFactory.getLog(EjbRefBuilder.class); private final QNameSet ejbRefQNameSet; private final QNameSet ejbLocalRefQNameSet; private final URI uri; public EjbRefBuilder(Environment defaultEnvironment, String[] eeNamespaces, String host, int port) throws URISyntaxException { super(defaultEnvironment); if (host != null) { uri = new URI("ejb", null, host, port, null, null, null); } else { uri = null; } ejbRefQNameSet = buildQNameSet(eeNamespaces, "ejb-ref"); ejbLocalRefQNameSet = buildQNameSet(eeNamespaces, "ejb-local-ref"); ejbRefQNameSet.union(ejbLocalRefQNameSet); } public QNameSet getSpecQNameSet() { return ejbRefQNameSet; } public QNameSet getPlanQNameSet() { return QNameSet.EMPTY; } protected boolean willMergeEnvironment(XmlObject specDD, XmlObject plan) { return specDD.selectChildren(ejbRefQNameSet).length > 0 || specDD.selectChildren(ejbLocalRefQNameSet).length > 0; } public void buildNaming(XmlObject specDD, XmlObject plan, Configuration localConfiguration, Configuration remoteConfiguration, Module module, Map componentContext) throws DeploymentException { JndiConsumer consumer = createJndiConsumer(specDD); processWebEjbAnnotations(module, consumer); Map<String, Object> map = null; try { EjbModuleBuilder.EarData earData = (EjbModuleBuilder.EarData) module.getRootEarContext().getGeneralData().get(EjbModuleBuilder.EarData.class); Collection<EjbJarInfo> ejbJars = Collections.emptySet(); if (earData != null) { ejbJars = earData.getEjbJars(); } JndiEncInfoBuilder jndiEncInfoBuilder = new JndiEncInfoBuilder(ejbJars); JndiEncInfo jndiEncInfo = jndiEncInfoBuilder.build(consumer, "GeronimoEnc"); JndiEncBuilder jndiEncBuilder = new JndiEncBuilder(jndiEncInfo); map = jndiEncBuilder.buildMap(); } catch (OpenEJBException e) { throw new DeploymentException(e); } for (Map.Entry<String, Object> entry : map.entrySet()) { String name = entry.getKey(); Object value = entry.getValue(); // work with names prefixed with java:comp/ if (name.startsWith("java:comp/")) { name = name.substring("java:comp/".length()); } // if this is a ref it will be prefixed with env/ if (name.startsWith("env/")) { if (uri != null) { value = createClientRef(value); } getJndiContextMap(componentContext).put(name, value); } } } private Object createClientRef(Object value) { IntraVmJndiReference intraVmJndiReference = (IntraVmJndiReference) value; String deploymentId = intraVmJndiReference.getJndiName(); if (deploymentId.startsWith("java:openejb/ejb/")) { deploymentId = deploymentId.substring("java:openejb/ejb/".length()); } ClientEjbReference clientRef = new ClientEjbReference(uri.toString(), deploymentId); return clientRef; } protected JndiConsumer createJndiConsumer(XmlObject specDD) throws DeploymentException { List<EjbRefType> ejbRefs = convert(specDD.selectChildren(ejbRefQNameSet), J2EE_CONVERTER, EjbRefType.class, EjbRefType.type); List<EjbLocalRefType> ejbLocalRefs = convert(specDD.selectChildren(ejbLocalRefQNameSet), J2EE_CONVERTER, EjbLocalRefType.class, EjbLocalRefType.type); // build jndi consumer JndiConsumer jndiConsumer = new SessionBean(); for (EjbRefType xmlbeansRef : ejbRefs) { // create the ejb-ref EjbRef ref = new EjbRef(); jndiConsumer.getEjbRef().add(ref); // ejb-ref-name ref.setEjbRefName(getStringValue(xmlbeansRef.getEjbRefName())); // ejb-ref-type String refType = getStringValue(xmlbeansRef.getEjbRefType()); if ("SESSION".equalsIgnoreCase(refType)) { ref.setEjbRefType(org.apache.openejb.jee.EjbRefType.SESSION); } else if ("ENTITY".equalsIgnoreCase(refType)) { ref.setEjbRefType(org.apache.openejb.jee.EjbRefType.ENTITY); } // home ref.setHome(getStringValue(xmlbeansRef.getHome())); // remote ref.setRemote(getStringValue(xmlbeansRef.getRemote())); // ejb-link ref.setEjbLink(getStringValue(xmlbeansRef.getEjbLink())); // mapped-name ref.setMappedName(getStringValue(xmlbeansRef.getMappedName())); // injection-targets if (xmlbeansRef.getInjectionTargetArray() != null) { for (InjectionTargetType injectionTargetType : xmlbeansRef.getInjectionTargetArray()) { InjectionTarget injectionTarget = new InjectionTarget(); injectionTarget.setInjectionTargetClass(getStringValue(injectionTargetType.getInjectionTargetClass())); injectionTarget.setInjectionTargetName(getStringValue(injectionTargetType.getInjectionTargetName())); ref.getInjectionTarget().add(injectionTarget); } } } for (EjbLocalRefType xmlbeansRef : ejbLocalRefs) { // create the ejb-ref EjbLocalRef ref = new EjbLocalRef(); jndiConsumer.getEjbLocalRef().add(ref); // ejb-ref-name ref.setEjbRefName(getStringValue(xmlbeansRef.getEjbRefName())); // ejb-ref-type String refType = getStringValue(xmlbeansRef.getEjbRefType()); if ("SESSION".equalsIgnoreCase(refType)) { ref.setEjbRefType(org.apache.openejb.jee.EjbRefType.SESSION); } else if ("ENTITY".equalsIgnoreCase(refType)) { ref.setEjbRefType(org.apache.openejb.jee.EjbRefType.ENTITY); } // home ref.setLocalHome(getStringValue(xmlbeansRef.getLocalHome())); // remote ref.setLocal(getStringValue(xmlbeansRef.getLocal())); // ejb-link ref.setEjbLink(getStringValue(xmlbeansRef.getEjbLink())); // mapped-name ref.setMappedName(getStringValue(xmlbeansRef.getMappedName())); // injection-targets if (xmlbeansRef.getInjectionTargetArray() != null) { for (InjectionTargetType injectionTargetType : xmlbeansRef.getInjectionTargetArray()) { InjectionTarget injectionTarget = new InjectionTarget(); injectionTarget.setInjectionTargetClass(getStringValue(injectionTargetType.getInjectionTargetClass())); injectionTarget.setInjectionTargetName(getStringValue(injectionTargetType.getInjectionTargetName())); ref.getInjectionTarget().add(injectionTarget); } } } return jndiConsumer; } private void processWebEjbAnnotations(Module module, JndiConsumer consumer) throws DeploymentException { if (module instanceof WebModule) { try { ClassLoader classLoader = module.getEarContext().getClassLoader(); UrlSet urlSet = new UrlSet(classLoader); if (classLoader instanceof MultiParentClassLoader) { MultiParentClassLoader multiParentClassLoader = (MultiParentClassLoader) classLoader; for (ClassLoader parent : multiParentClassLoader.getParents()) { if (parent != null) { urlSet = urlSet.exclude(parent); } } } else { ClassLoader parent = classLoader.getParent(); if (parent != null) { urlSet = urlSet.exclude(parent); } } ClassFinder finder = new ClassFinder(classLoader, urlSet.getUrls()); for (Field field : finder.findAnnotatedFields(EJB.class)) { EJB ejb = field.getAnnotation(EJB.class); AnnotationDeployer.Member member = new AnnotationDeployer.FieldMember(field); buildEjbRef(consumer, ejb, member); } for (Method method : finder.findAnnotatedMethods(EJB.class)) { EJB ejb = method.getAnnotation(EJB.class); AnnotationDeployer.Member member = new AnnotationDeployer.MethodMember(method); buildEjbRef(consumer, ejb, member); } } catch (IOException e) { // ignored... we tried log.warn("Unable to process @EJB annotations for web module" + module.getName(), e); } } } private void buildEjbRef(JndiConsumer consumer, EJB ejb, AnnotationDeployer.Member member) { EjbRef ejbRef = new EjbRef(); // This is how we deal with the fact that we don't know // whether to use an EjbLocalRef or EjbRef (remote). // We flag it uknown and let the linking code take care of // figuring out what to do with it. ejbRef.setRefType(EjbRef.Type.UNKNOWN); if (member != null) { // Set the member name where this will be injected InjectionTarget target = new InjectionTarget(); target.setInjectionTargetClass(member.getDeclaringClass().getName()); target.setInjectionTargetName(member.getName()); ejbRef.getInjectionTarget().add(target); } Class interfce = ejb.beanInterface(); if (interfce.equals(Object.class)) { interfce = (member == null) ? null : member.getType(); } if (interfce != null && !interfce.equals(Object.class)) { if (EJBHome.class.isAssignableFrom(interfce)) { ejbRef.setHome(interfce.getName()); Method[] methods = interfce.getMethods(); for (Method method : methods) { if (method.getName().startsWith("create")) { ejbRef.setRemote(method.getReturnType().getName()); break; } } ejbRef.setRefType(EjbRef.Type.REMOTE); } else if (EJBLocalHome.class.isAssignableFrom(interfce)) { ejbRef.setHome(interfce.getName()); Method[] methods = interfce.getMethods(); for (Method method : methods) { if (method.getName().startsWith("create")) { ejbRef.setRemote(method.getReturnType().getName()); break; } } ejbRef.setRefType(EjbRef.Type.LOCAL); } else { ejbRef.setRemote(interfce.getName()); if (interfce.getAnnotation(Local.class) != null) { ejbRef.setRefType(EjbRef.Type.LOCAL); } else if (interfce.getAnnotation(Remote.class) != null) { ejbRef.setRefType(EjbRef.Type.REMOTE); } } } // Get the ejb-ref-name String refName = ejb.name(); if (refName.equals("")) { refName = (member == null) ? null : member.getDeclaringClass().getName() + "/" + member.getName(); } ejbRef.setEjbRefName(refName); // Set the ejb-link, if any String ejbName = ejb.beanName(); if (ejbName.equals("")) { ejbName = null; } ejbRef.setEjbLink(ejbName); // Set the mappedName, if any String mappedName = ejb.mappedName(); if (mappedName.equals("")) { mappedName = null; } ejbRef.setMappedName(mappedName); switch (ejbRef.getRefType()) { case UNKNOWN: case REMOTE: consumer.getEjbRef().add(ejbRef); break; case LOCAL: consumer.getEjbLocalRef().add(new EjbLocalRef(ejbRef)); break; } } public static final GBeanInfo GBEAN_INFO; static { GBeanInfoBuilder infoBuilder = GBeanInfoBuilder.createStatic(EjbRefBuilder.class, NameFactory.MODULE_BUILDER); infoBuilder.addAttribute("eeNamespaces", String[].class, true, true); infoBuilder.addAttribute("defaultEnvironment", Environment.class, true, true); infoBuilder.addAttribute("host", String.class, true); infoBuilder.addAttribute("port", int.class, true); infoBuilder.setConstructor(new String[]{"defaultEnvironment", "eeNamespaces", "host", "port"}); GBEAN_INFO = infoBuilder.getBeanInfo(); } public static GBeanInfo getGBeanInfo() { return GBEAN_INFO; } }
Added missing moduleId to JndiEncBuilder constructor. git-svn-id: 0d16bf2c240b8111500ec482b35765e5042f5526@505047 13f79535-47bb-0310-9956-ffa450edef68
modules/geronimo-openejb-builder/src/main/java/org/apache/geronimo/openejb/deployment/EjbRefBuilder.java
Added missing moduleId to JndiEncBuilder constructor.
Java
apache-2.0
372867d77d2c35aef07dbfb22fe2ee69b12c556b
0
Orange-OpenSource/cf-java-client,cloudfoundry/cf-java-client,cloudfoundry/cf-java-client,orange-cloudfoundry/cf-java-client,cloudfoundry/cf-java-client,alexander071/cf-java-client,orange-cloudfoundry/cf-java-client,alexander071/cf-java-client,Orange-OpenSource/cf-java-client
/* * Copyright 2013-2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.cloudfoundry.client.spring.util; import org.cloudfoundry.client.RequestValidationException; import org.cloudfoundry.client.Validatable; import org.cloudfoundry.client.ValidationResult; import org.cloudfoundry.client.spring.v2.CloudFoundryExceptionBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.RequestEntity; import org.springframework.web.client.HttpStatusCodeException; import org.springframework.web.client.RestOperations; import org.springframework.web.util.UriComponentsBuilder; import reactor.Publishers; import reactor.rx.Stream; import reactor.rx.Streams; import java.net.URI; import java.util.function.Consumer; import java.util.function.Supplier; import static org.springframework.http.HttpMethod.PATCH; import static org.springframework.http.HttpMethod.PUT; public abstract class AbstractSpringOperations { private final Logger logger = LoggerFactory.getLogger(this.getClass()); protected final RestOperations restOperations; protected final URI root; protected AbstractSpringOperations(RestOperations restOperations, URI root) { this.restOperations = restOperations; this.root = root; } protected final <T> Stream<T> get(Validatable request, Class<T> responseType, Consumer<UriComponentsBuilder> builderCallback) { return exchange(request, () -> { UriComponentsBuilder builder = UriComponentsBuilder.fromUri(this.root); builderCallback.accept(builder); URI uri = builder.build().toUri(); this.logger.debug("GET {}", uri); return this.restOperations.getForObject(uri, responseType); }); } protected final Stream<Void> delete(Validatable request, Consumer<UriComponentsBuilder> builderCallback) { return exchange(request, () -> { UriComponentsBuilder builder = UriComponentsBuilder.fromUri(this.root); builderCallback.accept(builder); URI uri = builder.build().toUri(); this.logger.debug("DELETE {}", uri); this.restOperations.delete(uri); return null; }); } protected final <T> Stream<T> exchange(Validatable request, Supplier<T> exchange) { return Streams.wrap(Publishers.create(subscriber -> { if (request != null) { ValidationResult validationResult = request.isValid(); if (validationResult.getStatus() == ValidationResult.Status.INVALID) { subscriber.onError(new RequestValidationException(validationResult)); return; } } try { subscriber.onNext(exchange.get()); subscriber.onComplete(); } catch (HttpStatusCodeException e) { subscriber.onError(CloudFoundryExceptionBuilder.build(e)); } catch (Exception e) { subscriber.onError(e); } })); } protected final <T> Stream<T> patch(Validatable request, Class<T> responseType, Consumer<UriComponentsBuilder> builderCallback) { return exchange(request, () -> { UriComponentsBuilder builder = UriComponentsBuilder.fromUri(this.root); builderCallback.accept(builder); URI uri = builder.build().toUri(); this.logger.debug("PATCH {}", uri); return this.restOperations.exchange(new RequestEntity<>(request, PATCH, uri), responseType).getBody(); }); } protected final <T> Stream<T> post(Validatable request, Class<T> responseType, Consumer<UriComponentsBuilder> builderCallback) { return exchange(request, () -> { UriComponentsBuilder builder = UriComponentsBuilder.fromUri(this.root); builderCallback.accept(builder); URI uri = builder.build().toUri(); this.logger.debug("POST {}", uri); return this.restOperations.postForObject(uri, request, responseType); }); } protected final <T> Stream<T> put(Validatable request, Class<T> responseType, Consumer<UriComponentsBuilder> builderCallback) { return exchange(request, () -> { UriComponentsBuilder builder = UriComponentsBuilder.fromUri(this.root); builderCallback.accept(builder); URI uri = builder.build().toUri(); this.logger.debug("PUT {}", uri); return this.restOperations.exchange(new RequestEntity<>(request, PUT, uri), responseType).getBody(); }); } }
cloudfoundry-client-spring/src/main/java/org/cloudfoundry/client/spring/util/AbstractSpringOperations.java
/* * Copyright 2013-2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.cloudfoundry.client.spring.util; import org.cloudfoundry.client.RequestValidationException; import org.cloudfoundry.client.Validatable; import org.cloudfoundry.client.ValidationResult; import org.cloudfoundry.client.spring.v2.CloudFoundryExceptionBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.RequestEntity; import org.springframework.web.client.HttpStatusCodeException; import org.springframework.web.client.RestOperations; import org.springframework.web.util.UriComponentsBuilder; import reactor.Publishers; import reactor.rx.Stream; import reactor.rx.Streams; import java.net.URI; import java.util.function.Consumer; import java.util.function.Supplier; import static org.springframework.http.HttpMethod.PATCH; import static org.springframework.http.HttpMethod.PUT; public abstract class AbstractSpringOperations { private final Logger logger = LoggerFactory.getLogger(this.getClass()); protected final RestOperations restOperations; protected final URI root; protected AbstractSpringOperations(RestOperations restOperations, URI root) { this.restOperations = restOperations; this.root = root; } protected final <T> Stream<T> get(Validatable request, Class<T> responseType, Consumer<UriComponentsBuilder> builderCallback) { return exchange(request, () -> { UriComponentsBuilder builder = UriComponentsBuilder.fromUri(this.root); builderCallback.accept(builder); URI uri = builder.build().toUri(); this.logger.debug("GET {}", uri); return this.restOperations.getForObject(uri, responseType); }); } protected final Stream<Void> delete(Validatable request, Consumer<UriComponentsBuilder> builderCallback) { return exchange(request, () -> { UriComponentsBuilder builder = UriComponentsBuilder.fromUri(this.root); builderCallback.accept(builder); URI uri = builder.build().toUri(); this.logger.debug("DELETE {}", uri); this.restOperations.delete(uri); return null; }); } protected final <T> Stream<T> exchange(Validatable request, Supplier<T> exchange) { return Streams.wrap(Publishers.create(subscriber -> { if (request != null) { ValidationResult validationResult = request.isValid(); if (validationResult.getStatus() == ValidationResult.Status.INVALID) { subscriber.onError(new RequestValidationException(validationResult)); return; } } try { subscriber.onNext(exchange.get()); subscriber.onComplete(); } catch (HttpStatusCodeException e) { subscriber.onError(CloudFoundryExceptionBuilder.build(e)); } catch (Exception e) { subscriber.onError(e); } })); } protected final <T> Stream<T> patch(Validatable request, Class<T> responseType, Consumer<UriComponentsBuilder> builderCallback) { return exchange(request, () -> { UriComponentsBuilder builder = UriComponentsBuilder.fromUri(this.root); builderCallback.accept(builder); URI uri = builder.build().toUri(); this.logger.debug("PATCH {}", uri); return this.restOperations.exchange(new RequestEntity<>(request, PATCH, uri), responseType).getBody(); }); } protected final <T> Stream<T> post(Validatable request, Class<T> responseType, Consumer<UriComponentsBuilder> builderCallback) { return exchange(request, () -> { UriComponentsBuilder builder = UriComponentsBuilder.fromUri(this.root); builderCallback.accept(builder); URI uri = builder.build().toUri(); this.logger.debug("POST {}", uri); return this.restOperations.postForObject(uri, request, responseType); }); } protected final <T> Stream<T> put(Validatable request, Class<T> responseType, Consumer<UriComponentsBuilder> builderCallback) { return exchange(request, () -> { UriComponentsBuilder builder = UriComponentsBuilder.fromUri(this.root); builderCallback.accept(builder); URI uri = builder.build().toUri(); this.logger.debug("PUT {}", uri); if(Void.class == responseType) { this.restOperations.put(uri, request); return null; } else { return this.restOperations.exchange(new RequestEntity<>(request, PUT, uri), responseType).getBody(); } }); } }
Map Route Reverting changes to AbstractSpringOperations [#101421076]
cloudfoundry-client-spring/src/main/java/org/cloudfoundry/client/spring/util/AbstractSpringOperations.java
Map Route
Java
apache-2.0
6a6bc0a4213f721c531a0d74633c4cbe16055b6e
0
patriziobruno/mktsurveyservice-java,patriziobruno/mktsurveyservice-java
/* * Copyright 2016 eul0860. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.dstc.mkts.rest.auth; import java.net.URI; import java.net.URISyntaxException; import javax.inject.Inject; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.ws.rs.Consumes; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Context; import javax.ws.rs.core.Response; import net.dstc.mkts.api.AuthManager; import org.apache.oltu.oauth2.as.issuer.MD5Generator; import org.apache.oltu.oauth2.as.issuer.OAuthIssuer; import org.apache.oltu.oauth2.as.issuer.OAuthIssuerImpl; import org.apache.oltu.oauth2.as.request.OAuthAuthzRequest; import org.apache.oltu.oauth2.as.request.OAuthTokenRequest; import org.apache.oltu.oauth2.as.response.OAuthASResponse; import org.apache.oltu.oauth2.common.OAuth; import org.apache.oltu.oauth2.common.error.OAuthError; import org.apache.oltu.oauth2.common.exception.OAuthProblemException; import org.apache.oltu.oauth2.common.exception.OAuthSystemException; import org.apache.oltu.oauth2.common.message.OAuthResponse; import org.apache.oltu.oauth2.common.message.types.GrantType; import org.apache.oltu.oauth2.common.message.types.ResponseType; import org.apache.oltu.oauth2.common.utils.OAuthUtils; /** * * @author Patrizio Bruno <[email protected]> */ @Path("oauth2") public class AuthEndpoint { @Inject private AuthManager authManager; @GET @Path("authorize") public Response authorize(@Context HttpServletRequest request) throws URISyntaxException, OAuthSystemException, OAuthProblemException { try { OAuthAuthzRequest oauthRequest = new OAuthAuthzRequest(request); OAuthIssuerImpl oauthIssuerImpl = new OAuthIssuerImpl( new MD5Generator()); //build response according to response_type String responseType = oauthRequest.getParam( OAuth.OAUTH_RESPONSE_TYPE); OAuthASResponse.OAuthAuthorizationResponseBuilder builder = OAuthASResponse.authorizationResponse(request, HttpServletResponse.SC_FOUND); if (responseType.equals(ResponseType.CODE.toString())) { final String authorizationCode = oauthIssuerImpl. authorizationCode(); authManager.addAuthCode(authorizationCode); builder.setCode(authorizationCode); } if (responseType.equals(ResponseType.TOKEN.toString())) { final String accessToken = oauthIssuerImpl.accessToken(); authManager.addToken(accessToken); builder.setAccessToken(accessToken); builder.setExpiresIn(3600l); } String redirectURI = oauthRequest.getParam(OAuth.OAUTH_REDIRECT_URI); final OAuthResponse response = builder.location(redirectURI). buildQueryMessage(); URI url = new URI(response.getLocationUri()); return Response.status(response.getResponseStatus()).location(url). build(); } catch (OAuthProblemException e) { final Response.ResponseBuilder responseBuilder = Response.status( HttpServletResponse.SC_FOUND); String redirectUri = e.getRedirectUri(); if (OAuthUtils.isEmpty(redirectUri)) { throw new WebApplicationException( responseBuilder.entity( "OAuth callback url needs to be provided by client!!!"). build()); } final OAuthResponse response = OAuthASResponse.errorResponse( HttpServletResponse.SC_FOUND) .error(e).location(redirectUri).buildQueryMessage(); final URI location = new URI(response.getLocationUri()); return responseBuilder.location(location).build(); } } public static final String INVALID_CLIENT_DESCRIPTION = "Client authentication failed (e.g., unknown client, no client authentication included, or unsupported authentication method)."; @Path("token") @POST @Consumes("application/x-www-form-urlencoded") @Produces("application/json") public Response token(@Context HttpServletRequest request) throws OAuthSystemException { try { OAuthTokenRequest oauthRequest = new OAuthTokenRequest(request); OAuthIssuer oauthIssuerImpl = new OAuthIssuerImpl(new MD5Generator()); // check if clientid is valid if (!checkClientId(oauthRequest.getClientId())) { return buildInvalidClientIdResponse(); } // check if client_secret is valid if (!checkClientSecret(oauthRequest.getClientSecret())) { return buildInvalidClientSecretResponse(); } // do checking for different grant types if (oauthRequest.getParam(OAuth.OAUTH_GRANT_TYPE).equals(GrantType.AUTHORIZATION_CODE.toString())) { if (!authManager.isValidAuthCode(oauthRequest.getParam(OAuth.OAUTH_CODE))) { return buildBadAuthCodeResponse(); } } else if (oauthRequest.getParam(OAuth.OAUTH_GRANT_TYPE).equals(GrantType.PASSWORD.toString())) { if (!checkUserPass(oauthRequest.getUsername(), oauthRequest.getPassword())) { return buildInvalidUserPassResponse(); } } else if (oauthRequest.getParam(OAuth.OAUTH_GRANT_TYPE).equals(GrantType.REFRESH_TOKEN.toString())) { // refresh token is not supported in this implementation return buildInvalidUserPassResponse(); } final String accessToken = oauthIssuerImpl.accessToken(); authManager.addToken(accessToken); OAuthResponse response = OAuthASResponse .tokenResponse(HttpServletResponse.SC_OK) .setAccessToken(accessToken) .setExpiresIn("3600") .buildJSONMessage(); return Response.status(response.getResponseStatus()).entity(response.getBody()).build(); } catch (OAuthProblemException e) { OAuthResponse res = OAuthASResponse.errorResponse(HttpServletResponse.SC_BAD_REQUEST).error(e) .buildJSONMessage(); return Response.status(res.getResponseStatus()).entity(res.getBody()).build(); } } private Response buildInvalidClientIdResponse() throws OAuthSystemException { OAuthResponse response = OAuthASResponse.errorResponse(HttpServletResponse.SC_BAD_REQUEST) .setError(OAuthError.TokenResponse.INVALID_CLIENT) .setErrorDescription(INVALID_CLIENT_DESCRIPTION) .buildJSONMessage(); return Response.status(response.getResponseStatus()).entity(response.getBody()).build(); } private Response buildInvalidClientSecretResponse() throws OAuthSystemException { OAuthResponse response = OAuthASResponse.errorResponse(HttpServletResponse.SC_UNAUTHORIZED) .setError(OAuthError.TokenResponse.UNAUTHORIZED_CLIENT).setErrorDescription(INVALID_CLIENT_DESCRIPTION) .buildJSONMessage(); return Response.status(response.getResponseStatus()).entity(response.getBody()).build(); } private Response buildBadAuthCodeResponse() throws OAuthSystemException { OAuthResponse response = OAuthASResponse .errorResponse(HttpServletResponse.SC_BAD_REQUEST) .setError(OAuthError.TokenResponse.INVALID_GRANT) .setErrorDescription("invalid authorization code") .buildJSONMessage(); return Response.status(response.getResponseStatus()).entity(response.getBody()).build(); } private Response buildInvalidUserPassResponse() throws OAuthSystemException { OAuthResponse response = OAuthASResponse .errorResponse(HttpServletResponse.SC_BAD_REQUEST) .setError(OAuthError.TokenResponse.INVALID_GRANT) .setErrorDescription("invalid username or password") .buildJSONMessage(); return Response.status(response.getResponseStatus()).entity(response.getBody()).build(); } private boolean checkClientId(String clientId) { return "test".equals(clientId); } private boolean checkClientSecret(String secret) { return "test".equals(secret); } private boolean checkUserPass(String user, String pass) { return "test".equals(pass) && "test".equals(user); } }
src/main/java/net/dstc/mkts/rest/auth/AuthEndpoint.java
/* * Copyright 2016 eul0860. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.dstc.mkts.rest.auth; import java.net.URI; import java.net.URISyntaxException; import javax.inject.Inject; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.ws.rs.Consumes; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Context; import javax.ws.rs.core.Response; import net.dstc.mkts.api.AuthManager; import org.apache.oltu.oauth2.as.issuer.MD5Generator; import org.apache.oltu.oauth2.as.issuer.OAuthIssuer; import org.apache.oltu.oauth2.as.issuer.OAuthIssuerImpl; import org.apache.oltu.oauth2.as.request.OAuthAuthzRequest; import org.apache.oltu.oauth2.as.request.OAuthTokenRequest; import org.apache.oltu.oauth2.as.response.OAuthASResponse; import org.apache.oltu.oauth2.common.OAuth; import org.apache.oltu.oauth2.common.error.OAuthError; import org.apache.oltu.oauth2.common.exception.OAuthProblemException; import org.apache.oltu.oauth2.common.exception.OAuthSystemException; import org.apache.oltu.oauth2.common.message.OAuthResponse; import org.apache.oltu.oauth2.common.message.types.GrantType; import org.apache.oltu.oauth2.common.message.types.ResponseType; import org.apache.oltu.oauth2.common.utils.OAuthUtils; /** * * @author Patrizio Bruno <[email protected]> */ @Path("oauth2") public class AuthEndpoint { @Inject private AuthManager authManager; @GET @Path("authorize") public Response authorize(@Context HttpServletRequest request) throws URISyntaxException, OAuthSystemException, OAuthProblemException { try { OAuthAuthzRequest oauthRequest = new OAuthAuthzRequest(request); OAuthIssuerImpl oauthIssuerImpl = new OAuthIssuerImpl( new MD5Generator()); //build response according to response_type String responseType = oauthRequest.getParam( OAuth.OAUTH_RESPONSE_TYPE); OAuthASResponse.OAuthAuthorizationResponseBuilder builder = OAuthASResponse.authorizationResponse(request, HttpServletResponse.SC_FOUND); if (responseType.equals(ResponseType.CODE.toString())) { final String authorizationCode = oauthIssuerImpl. authorizationCode(); authManager.addAuthCode(authorizationCode); builder.setCode(authorizationCode); } if (responseType.equals(ResponseType.TOKEN.toString())) { final String accessToken = oauthIssuerImpl.accessToken(); authManager.addToken(accessToken); builder.setAccessToken(accessToken); builder.setExpiresIn(3600l); } String redirectURI = oauthRequest.getParam(OAuth.OAUTH_REDIRECT_URI); final OAuthResponse response = builder.location(redirectURI). buildQueryMessage(); URI url = new URI(response.getLocationUri()); return Response.status(response.getResponseStatus()).location(url). build(); } catch (OAuthProblemException e) { final Response.ResponseBuilder responseBuilder = Response.status( HttpServletResponse.SC_FOUND); String redirectUri = e.getRedirectUri(); if (OAuthUtils.isEmpty(redirectUri)) { throw new WebApplicationException( responseBuilder.entity( "OAuth callback url needs to be provided by client!!!"). build()); } final OAuthResponse response = OAuthASResponse.errorResponse( HttpServletResponse.SC_FOUND) .error(e).location(redirectUri).buildQueryMessage(); final URI location = new URI(response.getLocationUri()); return responseBuilder.location(location).build(); } } public static final String INVALID_CLIENT_DESCRIPTION = "Client authentication failed (e.g., unknown client, no client authentication included, or unsupported authentication method)."; @Path("token") @GET @Consumes("application/x-www-form-urlencoded") @Produces("application/json") public Response token(@Context HttpServletRequest request) throws OAuthSystemException { try { OAuthTokenRequest oauthRequest = new OAuthTokenRequest(request); OAuthIssuer oauthIssuerImpl = new OAuthIssuerImpl(new MD5Generator()); // check if clientid is valid if (!checkClientId(oauthRequest.getClientId())) { return buildInvalidClientIdResponse(); } // check if client_secret is valid if (!checkClientSecret(oauthRequest.getClientSecret())) { return buildInvalidClientSecretResponse(); } // do checking for different grant types if (oauthRequest.getParam(OAuth.OAUTH_GRANT_TYPE).equals(GrantType.AUTHORIZATION_CODE.toString())) { if (!authManager.isValidAuthCode(oauthRequest.getParam(OAuth.OAUTH_CODE))) { return buildBadAuthCodeResponse(); } } else if (oauthRequest.getParam(OAuth.OAUTH_GRANT_TYPE).equals(GrantType.PASSWORD.toString())) { if (!checkUserPass(oauthRequest.getUsername(), oauthRequest.getPassword())) { return buildInvalidUserPassResponse(); } } else if (oauthRequest.getParam(OAuth.OAUTH_GRANT_TYPE).equals(GrantType.REFRESH_TOKEN.toString())) { // refresh token is not supported in this implementation buildInvalidUserPassResponse(); } final String accessToken = oauthIssuerImpl.accessToken(); authManager.addToken(accessToken); OAuthResponse response = OAuthASResponse .tokenResponse(HttpServletResponse.SC_OK) .setAccessToken(accessToken) .setExpiresIn("3600") .buildJSONMessage(); return Response.status(response.getResponseStatus()).entity(response.getBody()).build(); } catch (OAuthProblemException e) { OAuthResponse res = OAuthASResponse.errorResponse(HttpServletResponse.SC_BAD_REQUEST).error(e) .buildJSONMessage(); return Response.status(res.getResponseStatus()).entity(res.getBody()).build(); } } private Response buildInvalidClientIdResponse() throws OAuthSystemException { OAuthResponse response = OAuthASResponse.errorResponse(HttpServletResponse.SC_BAD_REQUEST) .setError(OAuthError.TokenResponse.INVALID_CLIENT) .setErrorDescription(INVALID_CLIENT_DESCRIPTION) .buildJSONMessage(); return Response.status(response.getResponseStatus()).entity(response.getBody()).build(); } private Response buildInvalidClientSecretResponse() throws OAuthSystemException { OAuthResponse response = OAuthASResponse.errorResponse(HttpServletResponse.SC_UNAUTHORIZED) .setError(OAuthError.TokenResponse.UNAUTHORIZED_CLIENT).setErrorDescription(INVALID_CLIENT_DESCRIPTION) .buildJSONMessage(); return Response.status(response.getResponseStatus()).entity(response.getBody()).build(); } private Response buildBadAuthCodeResponse() throws OAuthSystemException { OAuthResponse response = OAuthASResponse .errorResponse(HttpServletResponse.SC_BAD_REQUEST) .setError(OAuthError.TokenResponse.INVALID_GRANT) .setErrorDescription("invalid authorization code") .buildJSONMessage(); return Response.status(response.getResponseStatus()).entity(response.getBody()).build(); } private Response buildInvalidUserPassResponse() throws OAuthSystemException { OAuthResponse response = OAuthASResponse .errorResponse(HttpServletResponse.SC_BAD_REQUEST) .setError(OAuthError.TokenResponse.INVALID_GRANT) .setErrorDescription("invalid username or password") .buildJSONMessage(); return Response.status(response.getResponseStatus()).entity(response.getBody()).build(); } private boolean checkClientId(String clientId) { return true; } private boolean checkClientSecret(String secret) { return true; } private boolean checkUserPass(String user, String pass) { return "test".equals(pass) && "test".equals(user); } }
fix refresh_token handling
src/main/java/net/dstc/mkts/rest/auth/AuthEndpoint.java
fix refresh_token handling
Java
apache-2.0
094b6aa2169215892a88355401cb2646d41a388e
0
quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus
package io.quarkus.rest.runtime.providers.serialisers; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.Reader; import java.lang.annotation.Annotation; import java.lang.reflect.Type; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MultivaluedMap; import javax.ws.rs.ext.MessageBodyReader; import javax.ws.rs.ext.MessageBodyWriter; public class ReaderBodyHandler implements MessageBodyWriter<Reader>, MessageBodyReader<Reader> { public boolean isReadable(Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) { return type.equals(Reader.class); } public Reader readFrom(Class<Reader> type, Type genericType, Annotation[] annotations, MediaType mediaType, MultivaluedMap<String, String> httpHeaders, InputStream entityStream) throws IOException { return new InputStreamReader(entityStream, MessageReaderUtil.charsetFromMediaType(mediaType)); } public boolean isWriteable(Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) { return true; } public long getSize(Reader inputStream, Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) { return -1; } public void writeTo(Reader inputStream, Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType, MultivaluedMap<String, Object> httpHeaders, OutputStream entityStream) throws IOException { try { int c; while ((c = inputStream.read()) != -1) { entityStream.write(c); } } finally { try { inputStream.close(); } catch (IOException e) { // Drop the exception so we don't mask real IO errors } } } }
extensions/quarkus-rest/runtime/src/main/java/io/quarkus/rest/runtime/providers/serialisers/ReaderBodyHandler.java
package io.quarkus.rest.runtime.providers.serialisers; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.Reader; import java.lang.annotation.Annotation; import java.lang.reflect.Type; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MultivaluedMap; import javax.ws.rs.ext.MessageBodyReader; import javax.ws.rs.ext.MessageBodyWriter; public class ReaderBodyHandler implements MessageBodyWriter<Reader>, MessageBodyReader<Reader> { public boolean isReadable(Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) { return type.equals(Reader.class); } public Reader readFrom(Class<Reader> type, Type genericType, Annotation[] annotations, MediaType mediaType, MultivaluedMap<String, String> httpHeaders, InputStream entityStream) throws IOException { String charset = mediaType.getParameters().get("charset"); if (charset == null) return new InputStreamReader(entityStream); else return new InputStreamReader(entityStream, charset); } public boolean isWriteable(Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) { return true; } public long getSize(Reader inputStream, Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) { return -1; } public void writeTo(Reader inputStream, Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType, MultivaluedMap<String, Object> httpHeaders, OutputStream entityStream) throws IOException { try { int c; while ((c = inputStream.read()) != -1) { entityStream.write(c); } } finally { try { inputStream.close(); } catch (IOException e) { // Drop the exception so we don't mask real IO errors } } } }
Fix potential NPE
extensions/quarkus-rest/runtime/src/main/java/io/quarkus/rest/runtime/providers/serialisers/ReaderBodyHandler.java
Fix potential NPE
Java
apache-2.0
e8416d340147b0c14398e74e36b782d5a329e996
0
ravihansa3000/stratos,lasinducharith/stratos,gayangunarathne/stratos,lasinducharith/stratos,gayangunarathne/stratos,gayangunarathne/stratos,pkdevbox/stratos,hsbhathiya/stratos,ravihansa3000/stratos,asankasanjaya/stratos,hsbhathiya/stratos,dinithis/stratos,agentmilindu/stratos,pkdevbox/stratos,dinithis/stratos,apache/stratos,Thanu/stratos,pubudu538/stratos,ravihansa3000/stratos,pubudu538/stratos,agentmilindu/stratos,ravihansa3000/stratos,Thanu/stratos,hsbhathiya/stratos,agentmilindu/stratos,gayangunarathne/stratos,dinithis/stratos,pubudu538/stratos,apache/stratos,Thanu/stratos,Thanu/stratos,asankasanjaya/stratos,asankasanjaya/stratos,anuruddhal/stratos,dinithis/stratos,pkdevbox/stratos,dinithis/stratos,hsbhathiya/stratos,pubudu538/stratos,pubudu538/stratos,pkdevbox/stratos,ravihansa3000/stratos,lasinducharith/stratos,lasinducharith/stratos,gayangunarathne/stratos,pkdevbox/stratos,agentmilindu/stratos,agentmilindu/stratos,anuruddhal/stratos,hsbhathiya/stratos,dinithis/stratos,agentmilindu/stratos,asankasanjaya/stratos,hsbhathiya/stratos,pubudu538/stratos,Thanu/stratos,asankasanjaya/stratos,gayangunarathne/stratos,pkdevbox/stratos,Thanu/stratos,apache/stratos,dinithis/stratos,apache/stratos,Thanu/stratos,anuruddhal/stratos,lasinducharith/stratos,anuruddhal/stratos,pkdevbox/stratos,anuruddhal/stratos,agentmilindu/stratos,ravihansa3000/stratos,hsbhathiya/stratos,asankasanjaya/stratos,lasinducharith/stratos,anuruddhal/stratos,ravihansa3000/stratos,pubudu538/stratos,apache/stratos,asankasanjaya/stratos,lasinducharith/stratos,anuruddhal/stratos,apache/stratos,apache/stratos,gayangunarathne/stratos
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.stratos.autoscaler.api; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.stratos.autoscaler.AutoscalerContext; import org.apache.stratos.autoscaler.NetworkPartitionLbHolder; import org.apache.stratos.autoscaler.applications.parser.ApplicationParser; import org.apache.stratos.autoscaler.applications.parser.DefaultApplicationParser; import org.apache.stratos.autoscaler.applications.pojo.ApplicationContext; import org.apache.stratos.autoscaler.applications.topic.ApplicationBuilder; import org.apache.stratos.autoscaler.client.CloudControllerClient; import org.apache.stratos.autoscaler.exception.*; import org.apache.stratos.autoscaler.interfaces.AutoScalerServiceInterface; import org.apache.stratos.autoscaler.kubernetes.KubernetesManager; import org.apache.stratos.autoscaler.monitor.cluster.AbstractClusterMonitor; import org.apache.stratos.autoscaler.partition.PartitionGroup; import org.apache.stratos.autoscaler.partition.PartitionManager; import org.apache.stratos.autoscaler.pojo.Dependencies; import org.apache.stratos.autoscaler.pojo.ServiceGroup; import org.apache.stratos.autoscaler.policy.PolicyManager; import org.apache.stratos.autoscaler.policy.model.AutoscalePolicy; import org.apache.stratos.autoscaler.policy.model.DeploymentPolicy; import org.apache.stratos.autoscaler.registry.RegistryManager; import org.apache.stratos.cloud.controller.stub.deployment.partition.Partition; import org.apache.stratos.cloud.controller.stub.pojo.Properties; import org.apache.stratos.cloud.controller.stub.pojo.Property; import org.apache.stratos.common.kubernetes.KubernetesGroup; import org.apache.stratos.common.kubernetes.KubernetesHost; import org.apache.stratos.common.kubernetes.KubernetesMaster; import org.apache.stratos.messaging.domain.applications.Application; import org.apache.stratos.metadata.client.defaults.DefaultMetaDataServiceClient; import org.apache.stratos.metadata.client.defaults.MetaDataServiceClient; import org.apache.stratos.metadata.client.exception.MetaDataServiceClientException; import org.wso2.carbon.registry.api.RegistryException; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Map; /** * Auto Scaler Service API is responsible getting Partitions and Policies. */ public class AutoScalerServiceImpl implements AutoScalerServiceInterface { private static final Log log = LogFactory.getLog(AutoScalerServiceImpl.class); PartitionManager partitionManager = PartitionManager.getInstance(); KubernetesManager kubernetesManager = KubernetesManager.getInstance(); public Partition[] getAllAvailablePartitions() { return partitionManager.getAllPartitions(); } public DeploymentPolicy[] getAllDeploymentPolicies() { return PolicyManager.getInstance().getDeploymentPolicyList(); } public AutoscalePolicy[] getAllAutoScalingPolicy() { return PolicyManager.getInstance().getAutoscalePolicyList(); } @Override public DeploymentPolicy[] getValidDeploymentPoliciesforCartridge(String cartridgeType) { ArrayList<DeploymentPolicy> validPolicies = new ArrayList<DeploymentPolicy>(); for (DeploymentPolicy deploymentPolicy : this.getAllDeploymentPolicies()) { try { // call CC API CloudControllerClient.getInstance().validateDeploymentPolicy(cartridgeType, deploymentPolicy); // if this deployment policy is valid for this cartridge, add it. validPolicies.add(deploymentPolicy); } catch (PartitionValidationException ignoredException) { // if this policy doesn't valid for the given cartridge, add a debug log. if (log.isDebugEnabled()) { log.debug("Deployment policy [id] " + deploymentPolicy.getId() + " is not valid for Cartridge [type] " + cartridgeType, ignoredException); } } } return validPolicies.toArray(new DeploymentPolicy[0]); } @Override public boolean addPartition(Partition partition) throws InvalidPartitionException { return partitionManager.addNewPartition(partition); } @Override public boolean addDeploymentPolicy(DeploymentPolicy deploymentPolicy) throws InvalidPolicyException { return PolicyManager.getInstance().deployDeploymentPolicy(deploymentPolicy); } @Override public boolean updateDeploymentPolicy(DeploymentPolicy deploymentPolicy) throws InvalidPolicyException { return PolicyManager.getInstance().updateDeploymentPolicy(deploymentPolicy); } @Override public boolean addAutoScalingPolicy(AutoscalePolicy autoscalePolicy) throws InvalidPolicyException { return PolicyManager.getInstance().deployAutoscalePolicy(autoscalePolicy); } @Override public boolean updateAutoScalingPolicy(AutoscalePolicy autoscalePolicy) throws InvalidPolicyException { return PolicyManager.getInstance().updateAutoscalePolicy(autoscalePolicy); } @Override public Partition getPartition(String partitionId) { return partitionManager.getPartitionById(partitionId); } @Override public DeploymentPolicy getDeploymentPolicy(String deploymentPolicyId) { return PolicyManager.getInstance().getDeploymentPolicy(deploymentPolicyId); } @Override public AutoscalePolicy getAutoscalingPolicy(String autoscalingPolicyId) { return PolicyManager.getInstance().getAutoscalePolicy(autoscalingPolicyId); } @Override public PartitionGroup[] getPartitionGroups(String deploymentPolicyId) { return PolicyManager.getInstance().getDeploymentPolicy(deploymentPolicyId).getPartitionGroups(); } public Partition[] getPartitionsOfDeploymentPolicy(String deploymentPolicyId) { DeploymentPolicy depPol = this.getDeploymentPolicy(deploymentPolicyId); if (null == depPol) { return null; } return depPol.getAllPartitions(); } @Override public KubernetesGroup[] getAllKubernetesGroups() { return kubernetesManager.getKubernetesGroups(); } @Override public KubernetesGroup getKubernetesGroup(String kubernetesGroupId) throws NonExistingKubernetesGroupException { return kubernetesManager.getKubernetesGroup(kubernetesGroupId); } @Override public KubernetesMaster getMasterForKubernetesGroup(String kubernetesGroupId) throws NonExistingKubernetesGroupException { return kubernetesManager.getKubernetesMasterInGroup(kubernetesGroupId); } @Override public KubernetesHost[] getHostsForKubernetesGroup(String kubernetesGroupId) throws NonExistingKubernetesGroupException { return kubernetesManager.getKubernetesHostsInGroup(kubernetesGroupId); } @Override public boolean addKubernetesGroup(KubernetesGroup kubernetesGroup) throws InvalidKubernetesGroupException { return kubernetesManager.addNewKubernetesGroup(kubernetesGroup); } @Override public boolean addKubernetesHost(String groupId, KubernetesHost kubernetesHost) throws InvalidKubernetesHostException, NonExistingKubernetesGroupException { return kubernetesManager.addNewKubernetesHost(groupId, kubernetesHost); } @Override public boolean removeKubernetesGroup(String groupId) throws NonExistingKubernetesGroupException { return kubernetesManager.removeKubernetesGroup(groupId); } @Override public boolean removeKubernetesHost(String hostId) throws NonExistingKubernetesHostException { return kubernetesManager.removeKubernetesHost(hostId); } @Override public boolean updateKubernetesMaster(KubernetesMaster kubernetesMaster) throws InvalidKubernetesMasterException, NonExistingKubernetesMasterException { return kubernetesManager.updateKubernetesMaster(kubernetesMaster); } @Override public boolean updateKubernetesHost(KubernetesHost kubernetesHost) throws InvalidKubernetesHostException, NonExistingKubernetesHostException { return kubernetesManager.updateKubernetesHost(kubernetesHost); } @Override public Partition[] getPartitionsOfGroup(String deploymentPolicyId, String groupId) { DeploymentPolicy depPol = this.getDeploymentPolicy(deploymentPolicyId); if (null == depPol) { return null; } PartitionGroup group = depPol.getPartitionGroup(groupId); if (group == null) { return null; } return group.getPartitions(); } public void checkLBExistenceAgainstPolicy(String lbClusterId, String deploymentPolicyId) throws NonExistingLBException { boolean exist = false; for (PartitionGroup partitionGroup : PolicyManager.getInstance().getDeploymentPolicy(deploymentPolicyId).getPartitionGroups()) { NetworkPartitionLbHolder nwPartitionLbHolder = partitionManager.getNetworkPartitionLbHolder(partitionGroup.getId()); if (nwPartitionLbHolder.isLBExist(lbClusterId)) { exist = true; break; } } if (!exist) { String msg = "LB with [cluster id] " + lbClusterId + " does not exist in any network partition of [Deployment Policy] " + deploymentPolicyId; log.error(msg); throw new NonExistingLBException(msg); } } public boolean checkDefaultLBExistenceAgainstPolicy(String deploymentPolicyId) { for (PartitionGroup partitionGroup : PolicyManager.getInstance().getDeploymentPolicy(deploymentPolicyId).getPartitionGroups()) { NetworkPartitionLbHolder nwPartitionLbHolder = partitionManager.getNetworkPartitionLbHolder(partitionGroup.getId()); if (!nwPartitionLbHolder.isDefaultLBExist()) { if (log.isDebugEnabled()) { log.debug("Default LB does not exist in [network partition] " + nwPartitionLbHolder.getNetworkPartitionId() + " of [Deployment Policy] " + deploymentPolicyId); } return false; } } return true; } public String getDefaultLBClusterId(String deploymentPolicyName) { if (log.isDebugEnabled()) { log.debug("Default LB Cluster Id for Deployment Policy [" + deploymentPolicyName + "] "); } for (PartitionGroup partitionGroup : PolicyManager.getInstance().getDeploymentPolicy(deploymentPolicyName).getPartitionGroups()) { NetworkPartitionLbHolder nwPartitionLbHolder = partitionManager.getNetworkPartitionLbHolder(partitionGroup.getId()); if (nwPartitionLbHolder.isDefaultLBExist()) { if (log.isDebugEnabled()) { log.debug("Default LB does not exist in [network partition] " + nwPartitionLbHolder.getNetworkPartitionId() + " of [Deployment Policy] " + deploymentPolicyName); } return nwPartitionLbHolder.getDefaultLbClusterId(); } } return null; } @Override public void deployApplicationDefinition(ApplicationContext applicationContext) throws ApplicationDefinitionException { ApplicationParser applicationParser = new DefaultApplicationParser(); Application application = applicationParser.parse(applicationContext); publishMetadata(applicationParser, application.getUniqueIdentifier()); ApplicationBuilder.handleApplicationCreated(application, applicationParser.getApplicationClusterContexts()); } @Override public void unDeployApplicationDefinition(String applicationId, int tenantId, String tenantDomain) throws ApplicationDefinitionException { ApplicationBuilder.handleApplicationUndeployed(applicationId); } public boolean checkServiceLBExistenceAgainstPolicy(String serviceName, String deploymentPolicyId) { for (PartitionGroup partitionGroup : PolicyManager.getInstance().getDeploymentPolicy(deploymentPolicyId).getPartitionGroups()) { NetworkPartitionLbHolder nwPartitionLbHolder = partitionManager.getNetworkPartitionLbHolder(partitionGroup.getId()); if (!nwPartitionLbHolder.isServiceLBExist(serviceName)) { if (log.isDebugEnabled()) { log.debug("Service LB [service name] " + serviceName + " does not exist in [network partition] " + nwPartitionLbHolder.getNetworkPartitionId() + " of [Deployment Policy] " + deploymentPolicyId); } return false; } } return true; } public String getServiceLBClusterId(String serviceType, String deploymentPolicyName) { for (PartitionGroup partitionGroup : PolicyManager.getInstance().getDeploymentPolicy(deploymentPolicyName).getPartitionGroups()) { NetworkPartitionLbHolder nwPartitionLbHolder = partitionManager.getNetworkPartitionLbHolder(partitionGroup.getId()); if (nwPartitionLbHolder.isServiceLBExist(serviceType)) { if (log.isDebugEnabled()) { log.debug("Service LB [service name] " + serviceType + " does not exist in [network partition] " + nwPartitionLbHolder.getNetworkPartitionId() + " of [Deployment Policy] " + deploymentPolicyName); } return nwPartitionLbHolder.getLBClusterIdOfService(serviceType); } } return null; } public boolean checkClusterLBExistenceAgainstPolicy(String clusterId, String deploymentPolicyId) { for (PartitionGroup partitionGroup : PolicyManager.getInstance().getDeploymentPolicy(deploymentPolicyId).getPartitionGroups()) { NetworkPartitionLbHolder nwPartitionLbHolder = partitionManager.getNetworkPartitionLbHolder(partitionGroup.getId()); if (!nwPartitionLbHolder.isClusterLBExist(clusterId)) { if (log.isDebugEnabled()) { log.debug("Cluster LB [cluster id] " + clusterId + " does not exist in [network partition] " + nwPartitionLbHolder.getNetworkPartitionId() + " of [Deployment Policy] " + deploymentPolicyId); } return false; } } return true; } public void updateClusterMonitor(String clusterId, Properties properties) throws InvalidArgumentException { if (log.isDebugEnabled()) { log.debug(String.format("Updating Cluster monitor [Cluster id] %s ", clusterId)); } AutoscalerContext asCtx = AutoscalerContext.getInstance(); AbstractClusterMonitor monitor = asCtx.getClusterMonitor(clusterId); if (monitor != null) { monitor.handleDynamicUpdates(properties); } else { log.debug(String.format("Updating Cluster monitor failed: Cluster monitor [Cluster id] %s not found.", clusterId)); } } public void deployServiceGroup(ServiceGroup servicegroup) throws InvalidServiceGroupException { if (servicegroup == null || StringUtils.isEmpty(servicegroup.getName())) { String msg = "Service group can not be null service name can not be empty."; log.error(msg); throw new IllegalArgumentException(msg); } String name = servicegroup.getName(); if (RegistryManager.getInstance().serviceGroupExist(name)) { throw new InvalidServiceGroupException("Service group with the name " + name + " already exist."); } if (log.isDebugEnabled()) { log.debug(MessageFormat.format("Deploying service group {0}", servicegroup.getName())); } String[] subGroups = servicegroup.getCartridges(); if (log.isDebugEnabled()) { log.debug("SubGroups" + subGroups); if (subGroups != null) { log.debug("subGroups:size" + subGroups.length); } else { log.debug("subGroups: are null"); } } Dependencies dependencies = servicegroup.getDependencies(); if (log.isDebugEnabled()) { log.debug("Dependencies" + dependencies); } if (dependencies != null) { String[] startupOrders = dependencies.getStartupOrders(); if (log.isDebugEnabled()) { log.debug("StartupOrders " + startupOrders); if (startupOrders != null) { log.debug("StartupOrder:size " + startupOrders.length); } else { log.debug("StartupOrder: is null"); } } String[] scalingOrders = dependencies.getScalingOrders(); if (log.isDebugEnabled()) { log.debug("ScalingOrders " + scalingOrders); if (scalingOrders != null) { log.debug("ScalingOrder:size " + scalingOrders.length); } else { log.debug("ScalingOrder: is null"); } } } RegistryManager.getInstance().persistServiceGroup(servicegroup); } public ServiceGroup getServiceGroup(String name) { if (StringUtils.isEmpty(name)) { return null; } try { return RegistryManager.getInstance().getServiceGroup(name); } catch (Exception e) { throw new AutoScalerException("Error occurred while retrieving service group", e); } } public ServiceGroup[] getServiceGroups() throws AutoScalerException { return RegistryManager.getInstance().getServiceGroups(); } public boolean serviceGroupExist(String serviceName) { return false; } public void undeployServiceGroup(String name) throws AutoScalerException { try { RegistryManager.getInstance().removeServiceGroup(name); } catch (RegistryException e) { throw new AutoScalerException("Error occurred while removing the service groups", e); } } private void publishMetadata(ApplicationParser applicationParser, String appId) { MetaDataServiceClient metaDataServiceClien = null; try { metaDataServiceClien = new DefaultMetaDataServiceClient(); for (Map.Entry<String, Properties> entry : applicationParser.getAliasToProperties().entrySet()) { String alias = entry.getKey(); Properties properties = entry.getValue(); if (properties != null) { for (Property property : properties.getProperties()) { metaDataServiceClien.addPropertyToCluster(appId, alias, property.getName(), property.getValue()); } } } } catch (MetaDataServiceClientException e) { log.error("Could not publish to metadata service ", e); } } }
components/org.apache.stratos.autoscaler/src/main/java/org/apache/stratos/autoscaler/api/AutoScalerServiceImpl.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.stratos.autoscaler.api; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.stratos.autoscaler.AutoscalerContext; import org.apache.stratos.autoscaler.NetworkPartitionLbHolder; import org.apache.stratos.autoscaler.applications.parser.ApplicationParser; import org.apache.stratos.autoscaler.applications.parser.DefaultApplicationParser; import org.apache.stratos.autoscaler.applications.pojo.ApplicationContext; import org.apache.stratos.autoscaler.applications.topic.ApplicationBuilder; import org.apache.stratos.autoscaler.client.CloudControllerClient; import org.apache.stratos.autoscaler.exception.*; import org.apache.stratos.autoscaler.interfaces.AutoScalerServiceInterface; import org.apache.stratos.autoscaler.kubernetes.KubernetesManager; import org.apache.stratos.autoscaler.monitor.cluster.AbstractClusterMonitor; import org.apache.stratos.autoscaler.partition.PartitionGroup; import org.apache.stratos.autoscaler.partition.PartitionManager; import org.apache.stratos.autoscaler.pojo.Dependencies; import org.apache.stratos.autoscaler.pojo.ServiceGroup; import org.apache.stratos.autoscaler.policy.PolicyManager; import org.apache.stratos.autoscaler.policy.model.AutoscalePolicy; import org.apache.stratos.autoscaler.policy.model.DeploymentPolicy; import org.apache.stratos.autoscaler.registry.RegistryManager; import org.apache.stratos.cloud.controller.stub.deployment.partition.Partition; import org.apache.stratos.cloud.controller.stub.pojo.Properties; import org.apache.stratos.cloud.controller.stub.pojo.Property; import org.apache.stratos.common.kubernetes.KubernetesGroup; import org.apache.stratos.common.kubernetes.KubernetesHost; import org.apache.stratos.common.kubernetes.KubernetesMaster; import org.apache.stratos.messaging.domain.applications.Application; import org.apache.stratos.metadata.client.defaults.DefaultMetaDataServiceClient; import org.apache.stratos.metadata.client.defaults.MetaDataServiceClient; import org.apache.stratos.metadata.client.exception.MetaDataServiceClientException; import org.wso2.carbon.registry.api.RegistryException; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Map; /** * Auto Scaler Service API is responsible getting Partitions and Policies. */ public class AutoScalerServiceImpl implements AutoScalerServiceInterface { private static final Log log = LogFactory.getLog(AutoScalerServiceImpl.class); PartitionManager partitionManager = PartitionManager.getInstance(); KubernetesManager kubernetesManager = KubernetesManager.getInstance(); public Partition[] getAllAvailablePartitions() { return partitionManager.getAllPartitions(); } public DeploymentPolicy[] getAllDeploymentPolicies() { return PolicyManager.getInstance().getDeploymentPolicyList(); } public AutoscalePolicy[] getAllAutoScalingPolicy() { return PolicyManager.getInstance().getAutoscalePolicyList(); } @Override public DeploymentPolicy[] getValidDeploymentPoliciesforCartridge(String cartridgeType) { ArrayList<DeploymentPolicy> validPolicies = new ArrayList<DeploymentPolicy>(); for (DeploymentPolicy deploymentPolicy : this.getAllDeploymentPolicies()) { try { // call CC API CloudControllerClient.getInstance().validateDeploymentPolicy(cartridgeType, deploymentPolicy); // if this deployment policy is valid for this cartridge, add it. validPolicies.add(deploymentPolicy); } catch (PartitionValidationException ignoredException) { // if this policy doesn't valid for the given cartridge, add a debug log. if (log.isDebugEnabled()) { log.debug("Deployment policy [id] " + deploymentPolicy.getId() + " is not valid for Cartridge [type] " + cartridgeType, ignoredException); } } } return validPolicies.toArray(new DeploymentPolicy[0]); } @Override public boolean addPartition(Partition partition) throws InvalidPartitionException { return partitionManager.addNewPartition(partition); } @Override public boolean addDeploymentPolicy(DeploymentPolicy deploymentPolicy) throws InvalidPolicyException { return PolicyManager.getInstance().deployDeploymentPolicy(deploymentPolicy); } @Override public boolean updateDeploymentPolicy(DeploymentPolicy deploymentPolicy) throws InvalidPolicyException { return PolicyManager.getInstance().updateDeploymentPolicy(deploymentPolicy); } @Override public boolean addAutoScalingPolicy(AutoscalePolicy autoscalePolicy) throws InvalidPolicyException { return PolicyManager.getInstance().deployAutoscalePolicy(autoscalePolicy); } @Override public boolean updateAutoScalingPolicy(AutoscalePolicy autoscalePolicy) throws InvalidPolicyException { return PolicyManager.getInstance().updateAutoscalePolicy(autoscalePolicy); } @Override public Partition getPartition(String partitionId) { return partitionManager.getPartitionById(partitionId); } @Override public DeploymentPolicy getDeploymentPolicy(String deploymentPolicyId) { return PolicyManager.getInstance().getDeploymentPolicy(deploymentPolicyId); } @Override public AutoscalePolicy getAutoscalingPolicy(String autoscalingPolicyId) { return PolicyManager.getInstance().getAutoscalePolicy(autoscalingPolicyId); } @Override public PartitionGroup[] getPartitionGroups(String deploymentPolicyId) { return PolicyManager.getInstance().getDeploymentPolicy(deploymentPolicyId).getPartitionGroups(); } public Partition[] getPartitionsOfDeploymentPolicy(String deploymentPolicyId) { DeploymentPolicy depPol = this.getDeploymentPolicy(deploymentPolicyId); if (null == depPol) { return null; } return depPol.getAllPartitions(); } @Override public KubernetesGroup[] getAllKubernetesGroups() { return kubernetesManager.getKubernetesGroups(); } @Override public KubernetesGroup getKubernetesGroup(String kubernetesGroupId) throws NonExistingKubernetesGroupException { return kubernetesManager.getKubernetesGroup(kubernetesGroupId); } @Override public KubernetesMaster getMasterForKubernetesGroup(String kubernetesGroupId) throws NonExistingKubernetesGroupException { return kubernetesManager.getKubernetesMasterInGroup(kubernetesGroupId); } @Override public KubernetesHost[] getHostsForKubernetesGroup(String kubernetesGroupId) throws NonExistingKubernetesGroupException { return kubernetesManager.getKubernetesHostsInGroup(kubernetesGroupId); } @Override public boolean addKubernetesGroup(KubernetesGroup kubernetesGroup) throws InvalidKubernetesGroupException { return kubernetesManager.addNewKubernetesGroup(kubernetesGroup); } @Override public boolean addKubernetesHost(String groupId, KubernetesHost kubernetesHost) throws InvalidKubernetesHostException, NonExistingKubernetesGroupException { return kubernetesManager.addNewKubernetesHost(groupId, kubernetesHost); } @Override public boolean removeKubernetesGroup(String groupId) throws NonExistingKubernetesGroupException { return kubernetesManager.removeKubernetesGroup(groupId); } @Override public boolean removeKubernetesHost(String hostId) throws NonExistingKubernetesHostException { return kubernetesManager.removeKubernetesHost(hostId); } @Override public boolean updateKubernetesMaster(KubernetesMaster kubernetesMaster) throws InvalidKubernetesMasterException, NonExistingKubernetesMasterException { return kubernetesManager.updateKubernetesMaster(kubernetesMaster); } @Override public boolean updateKubernetesHost(KubernetesHost kubernetesHost) throws InvalidKubernetesHostException, NonExistingKubernetesHostException { return kubernetesManager.updateKubernetesHost(kubernetesHost); } @Override public Partition[] getPartitionsOfGroup(String deploymentPolicyId, String groupId) { DeploymentPolicy depPol = this.getDeploymentPolicy(deploymentPolicyId); if (null == depPol) { return null; } PartitionGroup group = depPol.getPartitionGroup(groupId); if (group == null) { return null; } return group.getPartitions(); } public void checkLBExistenceAgainstPolicy(String lbClusterId, String deploymentPolicyId) throws NonExistingLBException { boolean exist = false; for (PartitionGroup partitionGroup : PolicyManager.getInstance().getDeploymentPolicy(deploymentPolicyId).getPartitionGroups()) { NetworkPartitionLbHolder nwPartitionLbHolder = partitionManager.getNetworkPartitionLbHolder(partitionGroup.getId()); if (nwPartitionLbHolder.isLBExist(lbClusterId)) { exist = true; break; } } if (!exist) { String msg = "LB with [cluster id] " + lbClusterId + " does not exist in any network partition of [Deployment Policy] " + deploymentPolicyId; log.error(msg); throw new NonExistingLBException(msg); } } public boolean checkDefaultLBExistenceAgainstPolicy(String deploymentPolicyId) { for (PartitionGroup partitionGroup : PolicyManager.getInstance().getDeploymentPolicy(deploymentPolicyId).getPartitionGroups()) { NetworkPartitionLbHolder nwPartitionLbHolder = partitionManager.getNetworkPartitionLbHolder(partitionGroup.getId()); if (!nwPartitionLbHolder.isDefaultLBExist()) { if (log.isDebugEnabled()) { log.debug("Default LB does not exist in [network partition] " + nwPartitionLbHolder.getNetworkPartitionId() + " of [Deployment Policy] " + deploymentPolicyId); } return false; } } return true; } public String getDefaultLBClusterId(String deploymentPolicyName) { if (log.isDebugEnabled()) { log.debug("Default LB Cluster Id for Deployment Policy [" + deploymentPolicyName + "] "); } for (PartitionGroup partitionGroup : PolicyManager.getInstance().getDeploymentPolicy(deploymentPolicyName).getPartitionGroups()) { NetworkPartitionLbHolder nwPartitionLbHolder = partitionManager.getNetworkPartitionLbHolder(partitionGroup.getId()); if (nwPartitionLbHolder.isDefaultLBExist()) { if (log.isDebugEnabled()) { log.debug("Default LB does not exist in [network partition] " + nwPartitionLbHolder.getNetworkPartitionId() + " of [Deployment Policy] " + deploymentPolicyName); } return nwPartitionLbHolder.getDefaultLbClusterId(); } } return null; } @Override public void deployApplicationDefinition(ApplicationContext applicationContext) throws ApplicationDefinitionException { ApplicationParser applicationParser = new DefaultApplicationParser(); Application application = applicationParser.parse(applicationContext); publishMetadata(applicationParser, application.getUniqueIdentifier()); ApplicationBuilder.handleApplicationCreated(application, applicationParser.getApplicationClusterContexts()); } @Override public void unDeployApplicationDefinition(String applicationId, int tenantId, String tenantDomain) throws ApplicationDefinitionException { ApplicationBuilder.handleApplicationUndeployed(applicationId); } public boolean checkServiceLBExistenceAgainstPolicy(String serviceName, String deploymentPolicyId) { for (PartitionGroup partitionGroup : PolicyManager.getInstance().getDeploymentPolicy(deploymentPolicyId).getPartitionGroups()) { NetworkPartitionLbHolder nwPartitionLbHolder = partitionManager.getNetworkPartitionLbHolder(partitionGroup.getId()); if (!nwPartitionLbHolder.isServiceLBExist(serviceName)) { if (log.isDebugEnabled()) { log.debug("Service LB [service name] " + serviceName + " does not exist in [network partition] " + nwPartitionLbHolder.getNetworkPartitionId() + " of [Deployment Policy] " + deploymentPolicyId); } return false; } } return true; } public String getServiceLBClusterId(String serviceType, String deploymentPolicyName) { for (PartitionGroup partitionGroup : PolicyManager.getInstance().getDeploymentPolicy(deploymentPolicyName).getPartitionGroups()) { NetworkPartitionLbHolder nwPartitionLbHolder = partitionManager.getNetworkPartitionLbHolder(partitionGroup.getId()); if (nwPartitionLbHolder.isServiceLBExist(serviceType)) { if (log.isDebugEnabled()) { log.debug("Service LB [service name] " + serviceType + " does not exist in [network partition] " + nwPartitionLbHolder.getNetworkPartitionId() + " of [Deployment Policy] " + deploymentPolicyName); } return nwPartitionLbHolder.getLBClusterIdOfService(serviceType); } } return null; } public boolean checkClusterLBExistenceAgainstPolicy(String clusterId, String deploymentPolicyId) { for (PartitionGroup partitionGroup : PolicyManager.getInstance().getDeploymentPolicy(deploymentPolicyId).getPartitionGroups()) { NetworkPartitionLbHolder nwPartitionLbHolder = partitionManager.getNetworkPartitionLbHolder(partitionGroup.getId()); if (!nwPartitionLbHolder.isClusterLBExist(clusterId)) { if (log.isDebugEnabled()) { log.debug("Cluster LB [cluster id] " + clusterId + " does not exist in [network partition] " + nwPartitionLbHolder.getNetworkPartitionId() + " of [Deployment Policy] " + deploymentPolicyId); } return false; } } return true; } public void updateClusterMonitor(String clusterId, Properties properties) throws InvalidArgumentException { if (log.isDebugEnabled()) { log.debug(String.format("Updating Cluster monitor [Cluster id] %s ", clusterId)); } AutoscalerContext asCtx = AutoscalerContext.getInstance(); AbstractClusterMonitor monitor = asCtx.getClusterMonitor(clusterId); if (monitor != null) { monitor.handleDynamicUpdates(properties); } else { log.debug(String.format("Updating Cluster monitor failed: Cluster monitor [Cluster id] %s not found.", clusterId)); } } public void deployServiceGroup(ServiceGroup servicegroup) throws InvalidServiceGroupException { if (servicegroup == null || StringUtils.isEmpty(servicegroup.getName())) { String msg = "Service group can not be null service name can not be empty."; log.error(msg); throw new IllegalArgumentException(msg); } String name = servicegroup.getName(); if (RegistryManager.getInstance().serviceGroupExist(name)) { throw new InvalidServiceGroupException("Service group with the name " + name + " already exist."); } if (log.isDebugEnabled()) { log.debug(MessageFormat.format("Deploying service group {0}", servicegroup.getName())); } String[] subGroups = servicegroup.getCartridges(); if (log.isDebugEnabled()) { log.debug("SubGroups" + subGroups); if (subGroups != null) { log.debug("subGroups:size" + subGroups.length); } else { log.debug("subGroups: are null"); } } Dependencies dependencies = servicegroup.getDependencies(); if (log.isDebugEnabled()) { log.debug("Dependencies" + dependencies); } if (dependencies != null) { String[] startupOrders = dependencies.getStartupOrders(); if (log.isDebugEnabled()) { log.debug("StartupOrders " + startupOrders); if (startupOrders != null) { log.debug("StartupOrder:size " + startupOrders.length); } else { log.debug("StartupOrder: is null"); } } String[] scalingOrders = dependencies.getScalingOrders(); if (log.isDebugEnabled()) { log.debug("ScalingOrders " + scalingOrders); if (startupOrders != null) { log.debug("ScalingOrder:size " + scalingOrders.length); } else { log.debug("ScalingOrder: is null"); } } } RegistryManager.getInstance().persistServiceGroup(servicegroup); } public ServiceGroup getServiceGroup(String name) { if (StringUtils.isEmpty(name)) { return null; } try { return RegistryManager.getInstance().getServiceGroup(name); } catch (Exception e) { throw new AutoScalerException("Error occurred while retrieving service group", e); } } public ServiceGroup[] getServiceGroups() throws AutoScalerException { return RegistryManager.getInstance().getServiceGroups(); } public boolean serviceGroupExist(String serviceName) { return false; } public void undeployServiceGroup(String name) throws AutoScalerException { try { RegistryManager.getInstance().removeServiceGroup(name); } catch (RegistryException e) { throw new AutoScalerException("Error occurred while removing the service groups", e); } } private void publishMetadata(ApplicationParser applicationParser, String appId) { MetaDataServiceClient metaDataServiceClien = null; try { metaDataServiceClien = new DefaultMetaDataServiceClient(); for (Map.Entry<String, Properties> entry : applicationParser.getAliasToProperties().entrySet()) { String alias = entry.getKey(); Properties properties = entry.getValue(); if (properties != null) { for (Property property : properties.getProperties()) { metaDataServiceClien.addPropertyToCluster(appId, alias, property.getName(), property.getValue()); } } } } catch (MetaDataServiceClientException e) { log.error("Could not publish to metadata service ", e); } } }
adding check for scalingOrders != null whne DEBUG is turned on
components/org.apache.stratos.autoscaler/src/main/java/org/apache/stratos/autoscaler/api/AutoScalerServiceImpl.java
adding check for scalingOrders != null whne DEBUG is turned on
Java
apache-2.0
12379419bf2214a98e1d4d14e3b0ba6c9909b50c
0
pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus
/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.common; import edu.isi.pegasus.common.util.Boolean; import edu.isi.pegasus.common.util.CommonProperties; import edu.isi.pegasus.planner.catalog.classes.Profiles; import edu.isi.pegasus.planner.classes.NameValue; import edu.isi.pegasus.planner.namespace.Dagman; import edu.isi.pegasus.planner.namespace.Namespace; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.MissingResourceException; import java.util.Properties; import java.util.Set; /** * A Central Properties class that keeps track of all the properties used by Pegasus. All other * classes access the methods in this class to get the value of the property. It access the * CommonProperties class to read the property file. * * @author Karan Vahi * @author Gaurang Mehta * @version $Revision$ * @see edu.isi.pegasus.common.util.CommonProperties */ public class PegasusProperties implements Cloneable { /** the name of the property to disable invoke functionality */ public static final String DISABLE_INVOKE_PROPERTY = "pegasus.gridstart.invoke.disable"; public static final String PEGASUS_KICKSTART_STAT_PROPERTY = "pegasus.gridstart.kickstart.stat"; public static final String PEGASUS_WORKER_NODE_EXECUTION_PROPERTY = "pegasus.execute.*.filesystem.local"; public static final String PEGASUS_TRANSFER_WORKER_PACKAGE_PROPERTY = "pegasus.transfer.worker.package"; public static final String PEGASUS_TRANSFER_WORKER_PACKAGE_STRICT_PROPERTY = "pegasus.transfer.worker.package.strict"; public static final String PEGASUS_TRANSFER_WORKER_PACKAGE_AUTODOWNLOAD_PROPERTY = "pegasus.transfer.worker.package.autodownload"; public static final String PEGASUS_TRANSFER_ARGUMENTS_KEY = "pegasus.transfer.arguments"; public static final String PEGASUS_TRANSFER_LITE_ARGUMENTS_KEY = "pegasus.transfer.lite.arguments"; public static final String PEGASUS_TRANSFORMATION_CATALOG_PROPERTY = "pegasus.catalog.transformation"; public static final String PEGASUS_TRANSFORMATION_CATALOG_FILE_PROPERTY = "pegasus.catalog.transformation.file"; public static final String PEGASUS_REPLICA_CATALOG_PROPERTY = "pegasus.catalog.replica"; public static final String PEGASUS_REPLICA_CATALOG_FILE_PROPERTY = "pegasus.catalog.replica.file"; public static final String PEGASUS_SITE_CATALOG_PROPERTY = "pegasus.catalog.site"; public static final String PEGASUS_SITE_CATALOG_FILE_PROPERTY = "pegasus.catalog.site.file"; public static final String PEGASUS_LOG_METRICS_PROPERTY = "pegasus.log.metrics"; public static final String PEGASUS_LOG_METRICS_PROPERTY_FILE = "pegasus.log.metrics.file"; public static final String PEGASUS_APP_METRICS_PREFIX = "pegasus.metrics.app"; /** The property key for pegasus mode. */ public static final String PEGASUS_MODE_PROPERTY_KEY = "pegasus.mode"; public static final String PEGASUS_INTEGRITY_CHECKING_KEY = "pegasus.integrity.checking"; // Replica Catalog Constants public static final String DEFAULT_RC_COLLECTION = "GriphynData"; public static final String DEFAULT_RLI_URL = null; public static final String DEFAULT_RLS_QUERY_MODE = "bulk"; public static final String DEFAULT_RLS_EXIT_MODE = "error"; // public static final String DEFAULT_REPLICA_MODE = "rls"; public static final String DEFAULT_RLS_QUERY_ATTRIB = "false"; public static final String DEFAULT_LRC_IGNORE_URL = null; public static final String DEFAULT_RLS_TIMEOUT = "30"; public static final String DEFAULT_EXEC_DIR = ""; public static final String DEFAULT_STORAGE_DIR = ""; public static final String DEFAULT_CONDOR_BIN_DIR = ""; public static final String DEFAULT_CONDOR_CONFIG_DIR = ""; public static final String CONDOR_KICKSTART = "kickstart-condor"; // transfer constants public static final String DEFAULT_STAGING_DELIMITER = "-"; public static final String DEFAULT_TRANSFER_PROCESSES = "4"; public static final String DEFAULT_TRANSFER_STREAMS = "1"; // grid start constants public static final String DEFAULT_INVOKE_LENGTH = "4000"; // site selector constants public static final String DEFAULT_SITE_SELECTOR = "Random"; public static final String DEFAULT_SITE_SELECTOR_TIMEOUT = "300"; public static final String DEFAULT_SITE_SELECTOR_KEEP = "onerror"; /// some simulator constants that are used public static final String DEFAULT_DATA_MULTIPLICATION_FACTOR = "1"; public static final String DEFAULT_COMP_MULTIPLICATION_FACTOR = "1"; public static final String DEFAULT_COMP_ERROR_PERCENTAGE = "0"; public static final String DEFAULT_COMP_VARIANCE_PERCENTAGE = "0"; // collapsing constants public static final String DEFAULT_JOB_AGGREGATOR = "SeqExec"; // some tranformation catalog constants public static final String DEFAULT_TC_MAPPER_MODE = "All"; public static final String DEFAULT_TX_SELECTOR_MODE = "Random"; // logging constants public static final String DEFAULT_LOGGING_FILE = "stdout"; /** Default properties that applies priorities to all kinds of transfer jobs. */ public static final String ALL_TRANSFER_PRIORITY_PROPERTY_KEY = "pegasus.transfer.*.priority"; /** The property key designated the root workflow uuid. */ public static final String ROOT_WORKFLOW_UUID_PROPERTY_KEY = "pegasus.workflow.root.uuid"; /** The default value to be assigned for dagman.maxpre . */ public static final String DEFAULT_DAGMAN_MAX_PRE_VALUE = "1"; /** Various modes pegasus can be run in. */ public static enum PEGASUS_MODE { production, development, tutorial }; /** An enum defining The dial for cleanup algorithm */ public enum CLEANUP_SCOPE { fullahead, deferred }; /** An enum defining the dial for integrity checking */ public enum INTEGRITY_DIAL { none, nosymlink, full }; /** The default DAXCallback that is loaded, if none is specified by the user. */ private static final String DEFAULT_DAX_CALLBACK = "DAX2Graph"; /** The value of the PEGASUS_HOME environment variable. */ private String mPegasusHome; /** The object holding all the properties pertaining to the VDS system. */ private CommonProperties mProps; /** The default transfer priority that needs to be applied to the transfer jobs. */ private String mDefaultTransferPriority; /** The set containing the deprecated properties specified by the user. */ private Set mDeprecatedProperties; /** The pointer to the properties file that is written out in the submit directory. */ private String mPropsInSubmitDir; /** Profiles that are specified in the properties */ private Profiles mProfiles; private static Map<Profiles.NAMESPACES, String> mNamepsaceToPropertiesPrefix; public Map<Profiles.NAMESPACES, String> namespaceToPropertiesPrefix() { if (mNamepsaceToPropertiesPrefix == null) { mNamepsaceToPropertiesPrefix = new HashMap<Profiles.NAMESPACES, String>(); mNamepsaceToPropertiesPrefix.put(Profiles.NAMESPACES.condor, "condor"); mNamepsaceToPropertiesPrefix.put(Profiles.NAMESPACES.dagman, "dagman"); mNamepsaceToPropertiesPrefix.put(Profiles.NAMESPACES.globus, "globus"); mNamepsaceToPropertiesPrefix.put(Profiles.NAMESPACES.env, "env"); mNamepsaceToPropertiesPrefix.put(Profiles.NAMESPACES.hints, "hints"); mNamepsaceToPropertiesPrefix.put(Profiles.NAMESPACES.pegasus, "pegasus"); mNamepsaceToPropertiesPrefix.put(Profiles.NAMESPACES.selector, "selector"); } return mNamepsaceToPropertiesPrefix; } /** * Returns an instance to this properties object. * * @return a handle to the Properties class. */ public static PegasusProperties getInstance() { return getInstance(null); } /** * Returns an instance to this properties object. * * @param confProperties the path to conf properties, that supersede the loading of properties * from $PEGASUS_HOME/.pegasusrc * @return a handle to the Properties class. */ public static PegasusProperties getInstance(String confProperties) { return nonSingletonInstance(confProperties); } /** * To get a reference to the the object. The properties file that is loaded is from the path * specified in the argument. This is *not implemented* as singleton. However the invocation of * this does modify the internally held singleton object. * * @param confProperties the path to conf properties, that supersede the loading of properties * from $PEGASUS_HOME/.pegasusrc * @return a handle to the Properties class. */ protected static PegasusProperties nonSingletonInstance(String confProperties) { return new PegasusProperties(confProperties); } /** * To get a reference to the the object. The properties file that is loaded is from the path * specified in the argument. * * <p>This is *not implemented* as singleton. However the invocation of this does modify the * internally held singleton object. * * @return a handle to the Properties class. */ public static PegasusProperties nonSingletonInstance() { // return nonSingletonInstance( CommonProperties.PROPERTY_FILENAME ); return nonSingletonInstance(null); } /** * The constructor that constructs the default paths to the various configuration files, and * populates the singleton instance as required. If the properties file passed is null, then the * singleton instance is invoked, else the non singleton instance is invoked. * * @param confProperties the path to conf properties, that supersede the loading of properties * from $PEGASUS_HOME/.pegasusrc */ private PegasusProperties(String confProperties) { // mLogger = LogManager.getInstance(); mDeprecatedProperties = new HashSet(5); initializePropertyFile(confProperties); mDefaultTransferPriority = getDefaultTransferPriority(); } /** * Retrieves profiles from the properties * * @return profiles object. */ public Profiles retrieveProfilesFromProperties() { // retrieve up all the profiles that are specified in // the properties if (mProfiles == null) { mProfiles = retrieveProfilesFromProperties(mProps); // System.out.println( mProfiles ); } return mProfiles; } /** * Retrieves profiles from the properties * * @param properties the common properties so far * @return profiles object. */ protected Profiles retrieveProfilesFromProperties(CommonProperties properties) { Profiles profiles = new Profiles(); // retrieve some matching properties first // traverse through all the enum keys for (Profiles.NAMESPACES n : Profiles.NAMESPACES.values()) { Properties p = properties.matchingSubset(namespaceToPropertiesPrefix().get(n), false); for (Map.Entry<Object, Object> entry : p.entrySet()) { profiles.addProfile(n, (String) entry.getKey(), (String) entry.getValue()); } } return profiles; } /** * Returns the clone of the object. * * @return the clone */ public Object clone() { PegasusProperties props; try { // this will do a shallow clone for all member variables // that is fine for the string variables props = (PegasusProperties) super.clone(); // clone the CommonProperties props.mProfiles = (this.mProfiles == null) ? null : (Profiles) this.mProfiles.clone(); props.mProps = (this.mProps == null) ? null : (CommonProperties) this.mProps.clone(); } catch (CloneNotSupportedException e) { // somewhere in the hierarch chain clone is not implemented throw new RuntimeException( "Clone not implemented in the base class of " + this.getClass().getName(), e); } return props; } /** * Accessor to the bin directory of the Pegasus install * * @return the "etc" directory of the VDS runtime system. */ public File getBinDir() { return mProps.getBinDir(); } /** * Accessor to the schema directory of the Pegasus install * * @return the "etc" directory of the VDS runtime system. */ public File getSchemaDir() { return mProps.getSchemaDir(); } /** * Accessor to the bin directory of the Pegasus install * * @return the "etc" directory of the VDS runtime system. */ public File getSharedDir() { return mProps.getSharedStateDir(); } /** * Returns all the profiles relevant to a particular namespace * * @param ns the namespace corresponding to which you need the profiles */ public Namespace getProfiles(Profiles.NAMESPACES ns) { return this.retrieveProfilesFromProperties().get(ns); } /** * Returns the default path to the condor kickstart. Currently the path defaults to * $PEGASUS_HOME/bin/kickstart-condor. * * @return default path to kickstart condor. */ public String getDefaultPathToCondorKickstart() { StringBuffer sb = new StringBuffer(50); sb.append(mPegasusHome); sb.append(File.separator); sb.append("bin"); sb.append(File.separator); sb.append(CONDOR_KICKSTART); return sb.toString(); } /** * Gets the handle to the properties file. The singleton instance is invoked if the properties * file is null (partly due to the way CommonProperties is implemented ), else the non singleton * is invoked. * * @param confProperties the path to conf properties, that supersede the loading of properties * from $PEGASUS_HOME/.pegasusrc */ private void initializePropertyFile(String confProperties) { try { /* mProps = ( confProperties == null ) ? //invoke the singleton instance CommonProperties.instance() : //invoke the non singleton instance CommonProperties.nonSingletonInstance( confProperties ); */ // we always load non singleton instance? // Karan April 27, 2011 mProps = CommonProperties.nonSingletonInstance(confProperties); } catch (IOException e) { System.err.println("unable to read property file: " + e.getMessage()); System.exit(1); } catch (MissingResourceException e) { System.err.println("A required property is missing: " + e.getMessage()); System.exit(1); } } /** * It allows you to get any property from the property file without going through the * corresponding accesor function in this class. For coding and clarity purposes, the function * should be used judiciously, and the accessor function should be used as far as possible. * * @param key the property whose value is desired. * @return String */ public String getProperty(String key) { return mProps.getProperty(key); } /** * Returns the CommonProperties that this object encapsulates. Use only when absolutely * necessary. Use accessor methods whereever possible. * * @return CommonProperties */ public CommonProperties getVDSProperties() { return this.mProps; } /** * Accessor: Overwrite any properties from within the program. * * @param key is the key to look up * @param value is the new property value to place in the system. * @return the old value, or null if it didn't exist before. */ public Object setProperty(String key, String value) { return mProps.setProperty(key, value); } /** * Extracts a specific property key subset from the known properties. The prefix may be removed * from the keys in the resulting dictionary, or it may be kept. In the latter case, exact * matches on the prefix will also be copied into the resulting dictionary. * * @param prefix is the key prefix to filter the properties by. * @param keepPrefix if true, the key prefix is kept in the resulting dictionary. As * side-effect, a key that matches the prefix exactly will also be copied. If false, the * resulting dictionary's keys are shortened by the prefix. An exact prefix match will not * be copied, as it would result in an empty string key. * @return a property dictionary matching the filter key. May be an empty dictionary, if no * prefix matches were found. * @see #getProperty( String ) is used to assemble matches */ public Properties matchingSubset(String prefix, boolean keepPrefix) { return mProps.matchingSubset(prefix, keepPrefix); } /** * Remaps property keys matching a particular prefix to the new prefix, and returns the remapped * properties in a new Properties object * * @param prefix * @param remapToPrefix * @return */ public Properties remap(String prefix, String remapToPrefix) { Properties output = this.matchingSubset(prefix, true); Properties result = new Properties(); if (!output.isEmpty()) { // remap the properties for (String outputProperty : output.stringPropertyNames()) { String key = outputProperty.replace(prefix, remapToPrefix); String value = output.getProperty(outputProperty); result.setProperty(key, value); } } return result; } /** * Returns the properties matching a particular prefix as a list of sorted name value pairs, * where name is the full name of the matching property (including the prefix) and value is it's * value in the properties file. * * @param prefix the prefix for the property names. * @param system boolean indicating whether to match only System properties or all including the * ones in the property file. * @return list of <code>NameValue</code> objects corresponding to the matched properties sorted * by keys. null if no matching property is found. */ public List getMatchingProperties(String prefix, boolean system) { // sanity check if (prefix == null) { return null; } Properties p = (system) ? System.getProperties() : matchingSubset(prefix, true); java.util.Enumeration e = p.propertyNames(); List l = (e.hasMoreElements()) ? new java.util.ArrayList() : null; while (e.hasMoreElements()) { String key = (String) e.nextElement(); NameValue nv = new NameValue(key, p.getProperty(key)); l.add(nv); } Collections.sort(l); return (l.isEmpty()) ? null : l; } /** * Accessor to $PEGASUS_HOME/etc. The files in this directory have a low change frequency, are * effectively read-only, they reside on a per-machine basis, and they are valid usually for a * single user. * * @return the "etc" directory of the VDS runtime system. */ public File getSysConfDir() { return mProps.getSysConfDir(); } /** * Removes a property from the soft state. * * @param key the key * @return the corresponding value if key exits, else null */ public String removeProperty(String key) { return mProps.removeProperty(key); } // PROPERTIES RELATED TO SCHEMAS /** * Returns the location of the schema for the DAX. * * <p>Referred to by the "pegasus.schema.dax" property. * * @return location to the DAX schema. */ public String getDAXSchemaLocation() { return this.getDAXSchemaLocation(null); } /** * Returns the location of the schema for the DAX. * * <p>Referred to by the "pegasus.schema.dax" property. * * @param defaultLocation the default location to the schema. * @return location to the DAX schema specified in the properties file, else the default * location if no value specified. */ public String getDAXSchemaLocation(String defaultLocation) { return mProps.getProperty("pegasus.schema.dax", defaultLocation); } /** * Returns the location of the schema for the PDAX. * * <p>Referred to by the "pegasus.schema.pdax" property * * @param defaultLocation the default location to the schema. * @return location to the PDAX schema specified in the properties file, else the default * location if no value specified. */ public String getPDAXSchemaLocation(String defaultLocation) { return mProps.getProperty("pegasus.schema.pdax", defaultLocation); } // DIRECTORY CREATION PROPERTIES /** * Returns the name of the class that the user wants, to insert the create directory jobs in the * graph in case of creating random directories. * * <p>Referred to by the "pegasus.dir.create.strategy" property. * * @return the create dir classname if specified in the properties file, else Minimal. */ public String getCreateDirClass() { return getProperty("pegasus.dir.create.strategy", "pegasus.dir.create", "Minimal"); } /** * Returns the name of the class that the user wants, to render the directory creation jobs. It * dictates what mechanism is used to create the directory for a workflow. * * <p>Referred to by the "pegasus.dir.create.impl" property. * * @return the create dir classname if specified in the properties file, else * DefaultImplementation. */ public String getCreateDirImplementation() { return mProps.getProperty("pegasus.dir.create.impl", "DefaultImplementation"); } /** * It specifies whether to use the extended timestamp format for generation of timestamps that * are used to create the random directory name, and for the classads generation. * * <p>Referred to by the "pegasus.dir.timestamp.extended" property. * * @return the value specified in the properties file if valid boolean, else false. */ public boolean useExtendedTimeStamp() { return Boolean.parse(mProps.getProperty("pegasus.dir.timestamp.extended"), false); } /** * Returns a boolean indicating whether to use timestamp for directory name creation or not. * * <p>Referred to by "pegasus.dir.useTimestamp" property. * * @return the boolean value specified in the properties files, else false. */ public boolean useTimestampForDirectoryStructure() { return Boolean.parse(mProps.getProperty("pegasus.dir.useTimestamp"), false); } /** * Returns the execution directory suffix or absolute specified that is appended/replaced to the * exec-mount-point specified in the pool catalog for the various pools. * * <p>Referred to by the "pegasus.dir.exec" property * * @return the value specified in the properties file, else the default suffix. * @see #DEFAULT_EXEC_DIR */ public String getExecDirectory() { return mProps.getProperty("pegasus.dir.exec", DEFAULT_EXEC_DIR); } /** * Returns the the path to the logs directory on the submit host. This is the directory where * the condor logs for the workflows are created. The logs directory should be on the local * filesystem else condor may complain * * <p>Referred to by the "pegasus.dir.submit.logs" property * * @return the value in the properties file, else null */ public String getSubmitLogsDirectory() { return mProps.getProperty("pegasus.dir.submit.logs"); } /** * Returns a boolean indicating whether the submit directory for the sub workflows should * include the label of the sub workflow or not. * * <p>Referred to by the "pegasus.dir.submit.subwf.labelbased" property * * @return the value in the properties file, else false */ public boolean labelBasedSubmitDirectoryForSubWorkflows() { return Boolean.parse(mProps.getProperty("pegasus.dir.submit.subwf.labelbased"), false); } /** * Returns the storage directory suffix or absolute specified that is appended/replaced to the * storage-mount-point specified in the pool catalog for the various pools. * * <p>Referred to by the "pegasus.dir.storage" property. * * @return the value specified in the properties file, else the default suffix. * @see #DEFAULT_STORAGE_DIR */ public String getStorageDirectory() { return mProps.getProperty("pegasus.dir.storage", DEFAULT_STORAGE_DIR); } /** * Returns a boolean indicating whether to have a deep storage directory structure or not while * staging out data to the output site. * * <p>Referred to by the "pegasus.dir.storage.deep" property. * * @return the boolean value specified in the properties files, else false. */ public boolean useDeepStorageDirectoryStructure() { return Boolean.parse(mProps.getProperty("pegasus.dir.storage.deep"), false); } // PROPERTIES RELATED TO CLEANUP /** * Returns the name of the Strategy class that the user wants, to insert the cleanup jobs in the * graph. * * <p>Referred to by the "pegasus.file.cleanup.strategy" property. * * @return the create dir classname if specified in the properties file, else InPlace. */ public String getCleanupStrategy() { return mProps.getProperty("pegasus.file.cleanup.strategy", "InPlace"); } /** * Returns the name of the class that the user wants, to render the cleanup jobs. It dictates * what mechanism is used to remove the files on a remote system. * * <p>Referred to by the "pegasus.file.cleanup.impl" property. * * @return the cleanup implementation classname if specified in the properties file, else * Cleanup. */ public String getCleanupImplementation() { return mProps.getProperty("pegasus.file.cleanup.impl", "Cleanup"); } /** * Returns the maximum number of clean up jobs created per level of the workflow in case of * InPlace cleanup. * * <p>Referred to by the "pegasus.file.cleanup.clusters.num" property * * @return the value in the property file , else null */ public String getMaximumCleanupJobsPerLevel() { return mProps.getProperty("pegasus.file.cleanup.clusters.num"); } /** * Returns the fraction of cleanup jobs clustered into a single clustered cleanup job. * * <p>Referred to by the "pegasus.file.cleanup.clusters.size" property * * @return the value in the property file , else null */ public String getClusterSizeCleanupJobsPerLevel() { return mProps.getProperty("pegasus.file.cleanup.clusters.size"); } /** * Returns the maximum available space per site. * * <p>Referred to by the "pegasus.file.cleanup.constraint.maxspace" property * * @return the value in the property file , else null */ public String getCleanupConstraintMaxSpace() { return mProps.getProperty("pegasus.file.cleanup.constraint.maxspace"); } /** * Returns the scope for file cleanup. It is used to trigger cleanup in case of deferred * planning. The vaild property values accepted are - fullahead - deferred * * <p>Referred to by the property "pegasus.file.cleanup.scope" * * @return the value in property file if specified, else fullahead */ public CLEANUP_SCOPE getCleanupScope() { CLEANUP_SCOPE scope = CLEANUP_SCOPE.fullahead; String value = mProps.getProperty("pegasus.file.cleanup.scope"); if (value == null) { return scope; } // try to assign a cleanup value try { scope = CLEANUP_SCOPE.valueOf(value); } catch (IllegalArgumentException iae) { // ignore do nothing. } return scope; } // PROPERTIES RELATED TO THE TRANSFORMATION CATALOG /** * Returns the mode to be used for accessing the Transformation Catalog. * * <p>Referred to by the "pegasus.catalog.transformation" property. * * @return the value specified in properties file */ public String getTCMode() { return mProps.getProperty(PegasusProperties.PEGASUS_TRANSFORMATION_CATALOG_PROPERTY); } /** * Returns the location of the transformation catalog. * * <p>Referred to by "pegasus.catalog.transformation.file" property. * * @return the value specified in the properties file null */ public String getTCPath() { return mProps.getProperty(PegasusProperties.PEGASUS_TRANSFORMATION_CATALOG_FILE_PROPERTY); } /** * Returns the mode for loading the transformation mapper that sits in front of the * transformation catalog. * * <p>Referred to by the "pegasus.catalog.transformation.mapper" property. * * @return the value specified in the properties file, else default tc mapper mode. * @see #DEFAULT_TC_MAPPER_MODE */ public String getTCMapperMode() { return mProps.getProperty("pegasus.catalog.transformation.mapper", DEFAULT_TC_MAPPER_MODE); } // REPLICA CATALOG PROPERTIES /** * Returns the replica mode. It identifies the ReplicaMechanism being used by Pegasus to * determine logical file locations. * * <p>Referred to by the "pegasus.catalog.replica" property. * * @return the replica mode, that is used to load the appropriate implementing class if property * is specified, else null */ public String getReplicaMode() { return mProps.getProperty(PEGASUS_REPLICA_CATALOG_PROPERTY); } /** * Returns the properties required for connecting to replica catalog to be used for registering * outputs. If not specified, then input replica catalog is used. * * <p>Referred to by the "pegasus.catalog.replica.output" property. * * @return the replica mode, that is used to load the appropriate implementing class if property * is specified, else null */ public String getOutputReplicaProperites() { return mProps.getProperty(PEGASUS_REPLICA_CATALOG_PROPERTY); } /** * Returns the url to the RLI of the RLS. * * <p>Referred to by the "pegasus.rls.url" property. * * @return the value specified in properties file, else DEFAULT_RLI_URL. * @see #DEFAULT_RLI_URL */ public String getRLIURL() { return mProps.getProperty("pegasus.catalog.replica.url", DEFAULT_RLI_URL); } /** * It returns the timeout value in seconds after which to timeout in case of no activity from * the RLS. * * <p>Referred to by the "pegasus.rc.rls.timeout" property. * * @return the timeout value if specified else, DEFAULT_RLS_TIMEOUT. * @see #DEFAULT_RLS_TIMEOUT */ public int getRLSTimeout() { String prop = mProps.getProperty("pegasus.catalog.replica.rls.timeout", DEFAULT_RLS_TIMEOUT); int val; try { val = Integer.parseInt(prop); } catch (Exception e) { return Integer.parseInt(DEFAULT_RLS_TIMEOUT); } return val; } // PROPERTIES RELATED TO SITE CATALOG /** * Returns the mode to be used for accessing the pool information. * * <p>Referred to by the "pegasus.catalog.site" property. * * @return the site catalog implementor, that is used to load the appropriate implementing class * if the property is specified, else NULL */ public String getSiteCatalogImplementor() { return mProps.getProperty(PegasusProperties.PEGASUS_SITE_CATALOG_PROPERTY); } /** * Returns the location of the schema for the DAX. * * <p>Referred to by the "pegasus.schema.sc" property. * * @return the location of pool schema if specified in properties file, else null. */ public String getPoolSchemaLocation() { return this.getPoolSchemaLocation(null); } /** * Returns the location of the schema for the site catalog file. * * <p>Referred to by the "pegasus.schema.sc" property * * @param defaultLocation the default location where the schema should be if no other location * is specified. * @return the location specified by the property, else defaultLocation. */ public String getPoolSchemaLocation(String defaultLocation) { return mProps.getProperty("pegasus.schema.sc", defaultLocation); } // PROVENANCE CATALOG PROPERTIES /** * Returns the provenance store to use to log the refiner actions. * * <p>Referred to by the "pegasus.catalog.provenance.refinement" property. * * @return the value set in the properties, else null if not set. */ public String getRefinementProvenanceStore() { return mProps.getProperty("pegasus.catalog.provenance.refinement"); } // TRANSFER MECHANISM PROPERTIES /** * Returns the transfer implementation that is to be used for constructing the transfer jobs. * * <p>Referred to by the "pegasus.transfer.*.impl" property. * * @return the transfer implementation */ public String getTransferImplementation() { return getTransferImplementation("pegasus.transfer.*.impl"); } /** * Returns the sls transfer implementation that is to be used for constructing the transfer * jobs. * * <p>Referred to by the "pegasus.transfer.lite.*.impl" property. * * @return the transfer implementation */ /* PM-810 done away. public String getSLSTransferImplementation(){ return getTransferImplementation( "pegasus.transfer.lite.*.impl" ); } */ /** * Returns the transfer implementation. * * @param property property name. * @return the transfer implementation, else the one specified by "pegasus.transfer.*.impl", */ public String getTransferImplementation(String property) { return mProps.getProperty(property, getDefaultTransferImplementation()); } /** * Returns a boolean indicating whether to stage sls files via Pegasus First Level Staging or * let Condor do it. * * <p>Referred to by the property "pegasus.transfer.stage.lite.file" * * @return boolean value mentioned in the properties or else the default value which is true. */ public boolean stageSLSFilesViaFirstLevelStaging() { return Boolean.parse(mProps.getProperty("pegasus.transfer.stage.lite.file"), false); } /** * Returns the default list of third party sites. * * <p>Referred to by the "pegasus.transfer.*.thirdparty.sites" property. * * @return the value specified in the properties file, else null. */ private String getDefaultThirdPartySites() { return mProps.getProperty("pegasus.transfer.*.thirdparty.sites"); } /** * Returns the default transfer implementation to be picked up for constructing transfer jobs. * * <p>Referred to by the "pegasus.transfer.*.impl" property. * * @return the value specified in the properties file, else null. */ private String getDefaultTransferImplementation() { return mProps.getProperty("pegasus.transfer.*.impl"); } /** * Returns a boolean indicating whether to bypass first level staging of inputs. Useful in case * of PegasusLite setup * * <p>Referred to by the "pegasus.transfer.bypass.input.staging" property. * * @return boolean value specified , else false */ public boolean bypassFirstLevelStagingForInputs() { return Boolean.parse(mProps.getProperty("pegasus.transfer.bypass.input.staging"), false); } /** * Returns the default priority for the transfer jobs if specified in the properties file. * * @return the value specified in the properties file, else null if non integer value or no * value specified. */ private String getDefaultTransferPriority() { String prop = mProps.getProperty(this.ALL_TRANSFER_PRIORITY_PROPERTY_KEY); int val = -1; try { val = Integer.parseInt(prop); } catch (Exception e) { return null; } return Integer.toString(val); } /** * Returns the base source URL where pointing to the directory where the worker package * executables for pegasus releases are kept. * * <p>Referred to by the "pegasus.transfer.setup.source.base.url * * @return the value in the property file, else null */ public String getBaseSourceURLForSetupTransfers() { return mProps.getProperty("pegasus.transfer.setup.source.base.url"); } /** * Returns the transfer refiner that is to be used for adding in the transfer jobs in the * workflow * * <p>Referred to by the "pegasus.transfer.refiner" property. * * @return the transfer refiner, else null */ public String getTransferRefiner() { return mProps.getProperty("pegasus.transfer.refiner"); } /** * Returns whether to introduce quotes around url's before handing to g-u-c and condor. * * <p>Referred to by "pegasus.transfer.single.quote" property. * * @return boolean value specified in the properties file, else true in case of non boolean * value being specified or property not being set. */ public boolean quoteTransferURL() { return Boolean.parse(mProps.getProperty("pegasus.transfer.single.quote"), true); } /** * It returns the number of processes of g-u-c that the transfer script needs to spawn to do the * transfers. This is applicable only in the case where the transfer executable has the * capability of spawning processes. It should not be confused with the number of streams that * each process opens. By default it is set to 4. In case a non integer value is specified in * the properties file it returns the default value. * * <p>Referred to by "pegasus.transfer.throttle.processes" property. * * @return the number of processes specified in properties file, else DEFAULT_TRANSFER_PROCESSES * @see #DEFAULT_TRANSFER_PROCESSES */ public String getNumOfTransferProcesses() { String prop = mProps.getProperty( "pegasus.transfer.throttle.processes", DEFAULT_TRANSFER_PROCESSES); int val = -1; try { val = Integer.parseInt(prop); } catch (Exception e) { return DEFAULT_TRANSFER_PROCESSES; } return Integer.toString(val); } /** * It returns the number of streams that each transfer process uses to do the ftp transfer. By * default it is set to 1.In case a non integer value is specified in the properties file it * returns the default value. * * <p>Referred to by "pegasus.transfer.throttle.streams" property. * * @return the number of streams specified in the properties file, else * DEFAULT_TRANSFER_STREAMS. * @see #DEFAULT_TRANSFER_STREAMS */ public String getNumOfTransferStreams() { String prop = mProps.getProperty("pegasus.transfer.throttle.streams", DEFAULT_TRANSFER_STREAMS); int val = -1; try { val = Integer.parseInt(prop); } catch (Exception e) { return DEFAULT_TRANSFER_STREAMS; } return Integer.toString(val); } /** * It specifies whether the underlying transfer mechanism being used should use the force option * if available to transfer the files. * * <p>Referred to by "pegasus.transfer.force" property. * * @return boolean value specified in the properties file,else false in case of non boolean * value being specified or property not being set. */ public boolean useForceInTransfer() { return Boolean.parse(mProps.getProperty("pegasus.transfer.force"), false); } /** * It returns whether the use of symbolic links in case where the source and destination files * happen to be on the same file system. * * <p>Referred to by "pegasus.transfer.links" property. * * @return boolean value specified in the properties file, else false in case of non boolean * value being specified or property not being set. */ public boolean getUseOfSymbolicLinks() { String value = mProps.getProperty("pegasus.transfer.links"); return Boolean.parse(value, false); } /** * Returns the comma separated list of third party sites, specified in the properties. * * @param property property name. * @return the comma separated list of sites. */ public String getThirdPartySites(String property) { String value = mProps.getProperty(property); return value; } /** * Returns the comma separated list of third party sites for which the third party transfers are * executed on the remote sites. * * @param property property name. * @return the comma separated list of sites. */ public String getThirdPartySitesRemote(String property) { return mProps.getProperty(property); } /** * Returns the delimiter to be used for constructing the staged executable name, during transfer * of executables to remote sites. * * <p>Referred to by the "pegasus.transfer.staging.delimiter" property. * * @return the value specified in the properties file, else DEFAULT_STAGING_DELIMITER * @see #DEFAULT_STAGING_DELIMITER */ public String getStagingDelimiter() { return mProps.getProperty("pegasus.transfer.staging.delimiter", DEFAULT_STAGING_DELIMITER); } /** * Returns the list of sites for which the chmod job creation has to be disabled for executable * staging. * * <p>Referred to by the "pegasus.transfer.disable.chmod" property. * * @return a comma separated list of site names. */ public String getChmodDisabledSites() { return mProps.getProperty("pegasus.transfer.disable.chmod.sites"); } /** * It specifies if the worker package needs to be staged to the remote site or not. * * <p>Referred to by "pegasus.transfer.worker.package" property. * * @return boolean value specified in the properties file,else false in case of non boolean * value being specified or property not being set. */ public boolean transferWorkerPackage() { return Boolean.parse(mProps.getProperty(PEGASUS_TRANSFER_WORKER_PACKAGE_PROPERTY), false); } /** * A Boolean property to indicate whether to enforce strict checks against provided worker * package for jobs in PegasusLite mode. if a job comes with worker package and it does not * match fully with worker node architecture , it will revert to Pegasus download website. * Default value is true. * * <p>Referred to by "pegasus.transfer.worker.package.strict" property. * * @return boolean value specified in the properties file,else true in case of non boolean value * being specified or property not being set. */ public boolean enforceStrictChecksForWorkerPackage() { return Boolean.parse( mProps.getProperty(PEGASUS_TRANSFER_WORKER_PACKAGE_STRICT_PROPERTY), true); } /** * A Boolean property to indicate whether a pegasus lite job is allowed to download from Pegasus * website. * * <p>Referred to by "pegasus.transfer.worker.package.autodownload" property. * * @return boolean value specified in the properties file,else true in case of non boolean value * being specified or property not being set. */ public boolean allowDownloadOfWorkerPackageFromPegasusWebsite() { return Boolean.parse( mProps.getProperty(PEGASUS_TRANSFER_WORKER_PACKAGE_AUTODOWNLOAD_PROPERTY), true); } /** * Returns the arguments with which the transfer executable needs to be invoked. * * <p>Referred to by "pegasus.transfer.arguments" property. * * @return the arguments specified in the properties file, else null if property is not * specified. */ public String getTransferArguments() { return mProps.getProperty(PEGASUS_TRANSFER_ARGUMENTS_KEY); } /** * Returns the extra arguments with which the transfer executable used in PegasusLite needs to * be invoked. * * <p>Referred to by "pegasus.transfer.lite.arguments" property. * * @return the arguments specified in the properties file, else null if property is not * specified. * @see #PEGASUS_TRANSFER_LITE_ARGUMENTS_KEY */ public String getSLSTransferArguments() { return mProps.getProperty(PEGASUS_TRANSFER_LITE_ARGUMENTS_KEY); } /** * Returns the priority to be set for the stage in transfer job. * * <p>Referred to by "pegasus.transfer.stagein.priority" property if set, else by * "pegasus.transfer.*.priority" property. * * @return the priority as String if a valid integer specified in the properties, else null. */ public String getTransferStageInPriority() { return getTransferPriority("pegasus.transfer.stagein.priority"); } /** * Returns the priority to be set for the stage out transfer job. * * <p>Referred to by "pegasus.transfer.stageout.priority" property if set, else by * "pegasus.transfer.*.priority" property. * * @return the priority as String if a valid integer specified in the properties, else null. */ public String getTransferStageOutPriority() { return getTransferPriority("pegasus.transfer.stageout.priority"); } /** * Returns the priority to be set for the interpool transfer job. * * <p>Referred to by "pegasus.transfer.inter.priority" property if set, else by * "pegasus.transfer.*.priority" property. * * @return the priority as String if a valid integer specified in the properties, else null. */ public String getTransferInterPriority() { return getTransferPriority("pegasus.transfer.inter.priority"); } /** * Returns the transfer priority. * * @param property property name. * @return the priority as String if a valid integer specified in the properties as value to * property, else null. */ private String getTransferPriority(String property) { String value = mProps.getProperty(property, mDefaultTransferPriority); int val = -1; try { val = Integer.parseInt(value); } catch (Exception e) { } // if value in properties file is corrupted // again use the default transfer priority return (val < 0) ? mDefaultTransferPriority : Integer.toString(val); } // REPLICA SELECTOR FUNCTIONS /** * Returns the mode for loading the transformation selector that selects amongst the various * candidate transformation catalog entry objects. * * <p>Referred to by the "pegasus.selector.transformation" property. * * @return the value specified in the properties file, else default transformation selector. * @see #DEFAULT_TC_MAPPER_MODE */ public String getTXSelectorMode() { return mProps.getProperty("pegasus.selector.transformation", DEFAULT_TX_SELECTOR_MODE); } /** * Returns the name of the selector to be used for selection amongst the various replicas of a * single lfn. * * <p>Referred to by the "pegasus.selector.replica" property. * * @return the name of the selector if the property is specified, else null */ public String getReplicaSelector() { return mProps.getProperty("pegasus.selector.replica"); } /** * Returns a comma separated list of sites, that are restricted in terms of data movement from * the site. * * <p>Referred to by the "pegasus.rc.restricted.sites" property. * * @return comma separated list of sites. */ // public String getRestrictedSites(){ // return mProps.getProperty("pegasus.rc.restricted.sites",""); // } /** * Returns a comma separated list of sites, from which to prefer data transfers for all sites. * * <p>Referred to by the "pegasus.selector.replica.*.prefer.stagein.sites" property. * * @return comma separated list of sites. */ public String getAllPreferredSites() { return mProps.getProperty("pegasus.selector.replica.*.prefer.stagein.sites", ""); } /** * Returns a comma separated list of sites, from which to ignore data transfers for all sites. * Replaces the old pegasus.rc.restricted.sites property. * * <p>Referred to by the "pegasus.selector.ignore.*.prefer.stagein.sites" property. * * @return comma separated list of sites. */ public String getAllIgnoredSites() { return mProps.getProperty("pegasus.selector.replica.*.ignore.stagein.sites", ""); } // SITE SELECTOR PROPERTIES /** * Returns the class name of the site selector, that needs to be invoked to do the site * selection. * * <p>Referred to by the "pegasus.selector.site" property. * * @return the classname corresponding to the site selector that needs to be invoked if * specified in the properties file, else the default selector specified by * DEFAULT_SITE_SELECTOR. * @see #DEFAULT_SITE_SELECTOR */ public String getSiteSelectorMode() { return mProps.getProperty("pegasus.selector.site", DEFAULT_SITE_SELECTOR); } /** * Returns the path to the external site selector that needs to be called out to make the * decision of site selection. * * <p>Referred to by the "pegasus.selector.site.path" property. * * @return the path to the external site selector if specified in the properties file, else * null. */ public String getSiteSelectorPath() { return mProps.getProperty("pegasus.selector.site.path"); } /** * It returns the timeout value in seconds after which to timeout in case of no activity from * the external site selector. * * <p>Referred to by the "pegasus.selector.site.timeout" property. * * @return the timeout value if specified else, DEFAULT_SITE_SELECTOR_TIMEOUT. * @see #DEFAULT_SITE_SELECTOR_TIMEOUT */ public int getSiteSelectorTimeout() { String prop = mProps.getProperty("pegasus.selector.site.timeout", DEFAULT_SITE_SELECTOR_TIMEOUT); int val; try { val = Integer.parseInt(prop); } catch (Exception e) { return Integer.parseInt(DEFAULT_SITE_SELECTOR_TIMEOUT); } return val; } /** * Returns a value designating whether we need to keep the temporary files that are passed to * the external site selectors. The check for the valid tristate value should be done at the * calling function end. This just passes on the value user specified in the properties file. * * <p>Referred to by the "pegasus.selector.site.keep.tmp" property. * * @return the value of the property is specified, else DEFAULT_SITE_SELECTOR_KEEP * @see #DEFAULT_SITE_SELECTOR_KEEP */ public String getSiteSelectorKeep() { return mProps.getProperty("pegasus.selector.site.keep.tmp", DEFAULT_SITE_SELECTOR_KEEP); } // PROPERTIES RELATED TO KICKSTART AND EXITCODE /** * Returns the GRIDSTART that is to be used to launch the jobs on the grid. * * <p>Referred to by the "pegasus.gridstart" property. * * @return the value specified in the property file, else null */ public String getGridStart() { return mProps.getProperty("pegasus.gridstart"); } /** * Returns a boolean indicating whether kickstart should set x bit on staged executables before * launching them. * * <p>Referred to by the "pegasus.gridstart.kickstart.set.xbit" property. * * @return the value specified in the property file, else false */ public boolean setXBitWithKickstart() { return Boolean.parse(mProps.getProperty("pegasus.gridstart.kickstart.set.xbit"), false); } /** * Return a boolean indicating whether to turn the stat option for kickstart on or not. By * default it is turned on. * * <p>Referred to by the "pegasus.gridstart.kickstart.stat" property. * * @return value specified in the property file, else null. */ public String doStatWithKickstart() { return mProps.getProperty(PEGASUS_KICKSTART_STAT_PROPERTY); } /** * Return a boolean indicating whether to generate the LOF files for the jobs or not. This is * used to generate LOF files, but not trigger the stat option * * <p>Referred to by the "pegasus.gridstart.kickstart.generate.loft" property. * * @return the boolean value specified in the property file, else false if not specified or non * boolean specified. */ public boolean generateLOFFiles() { return Boolean.parse(mProps.getProperty("pegasus.gridstart.generate.lof"), false); } /** * Returns a boolean indicating whether to use invoke in kickstart always or not. * * <p>Referred to by the "pegasus.gridstart.invoke.always" property. * * @return the boolean value specified in the property file, else false if not specified or non * boolean specified. */ public boolean useInvokeInGridStart() { return Boolean.parse(mProps.getProperty("pegasus.gridstart.invoke.always"), false); } /** * Returns a boolean indicating whether to disable use of invoke or not. * * <p>Referred to by the "pegasus.gridstart.invoke.disable" property. * * @return the boolean value specified in the property file, else false if not specified or non * boolean specified. */ public boolean disableInvokeInGridStart() { return Boolean.parse(mProps.getProperty(PegasusProperties.DISABLE_INVOKE_PROPERTY), false); } /** * Returns the trigger value for invoking an application through kickstart using kickstart. If * the arguments value being constructed in the condor submit file is more than this value, then * invoke is used to pass the arguments to the remote end. Helps in bypassing the Condor 4K * limit. * * <p>Referred to by "pegasus.gridstart.invoke.length" property. * * @return the long value specified in the properties files, else DEFAULT_INVOKE_LENGTH * @see #DEFAULT_INVOKE_LENGTH */ public long getGridStartInvokeLength() { long value = Long.parseLong(this.DEFAULT_INVOKE_LENGTH); String st = mProps.getProperty("pegasus.gridstart.invoke.length", this.DEFAULT_INVOKE_LENGTH); try { value = Long.parseLong(st); } catch (Exception e) { // ignore malformed values from // the property file } return value; } /** * Returns a boolean indicating whehter to pass extra options to kickstart or not. The extra * options have appeared only in VDS version 1.4.2 (like -L and -T). * * <p>Referred to by "pegasus.gridstart.label" property. * * @return the boolean value specified in the property file, else true if not specified or non * boolean specified. */ public boolean generateKickstartExtraOptions() { return Boolean.parse(mProps.getProperty("pegasus.gridstart.label"), true); } /** * Returns the mode adding the postscripts for the jobs. At present takes in only two values all * or none default being none. * * <p>Referred to by the "pegasus.exitcode.scope" property. * * @return the mode specified by the property, else DEFAULT_POSTSCRIPT_MODE * @see #DEFAULT_POSTSCRIPT_MODE */ /* public String getPOSTScriptScope() { return mProps.getProperty( "pegasus.exitcode.dial", DEFAULT_POSTSCRIPT_MODE ); } */ /** * Returns the postscript to use with the jobs in the workflow. They maybe overriden by values * specified in the profiles. * * <p>Referred to by the "pegasus.exitcode.impl" property. * * @return the postscript to use for the workflow, else null if not specified in the properties. */ /* public String getPOSTScript(){ return mProps.getProperty( "pegasus.exitcode.impl" ); } */ /** * Returns the path to the exitcode executable to be used. * * <p>Referred to by the "pegasus.exitcode.path.[value]" property, where [value] is replaced by * the value passed an input to this function. * * @param value the short name of the postscript whose path we want. * @return the path to the postscript if specified in properties file. */ /* public String getPOSTScriptPath( String value ){ value = ( value == null ) ? "*" : value; StringBuffer key = new StringBuffer(); key.append( "pegasus.exitcode.path." ).append( value ); return mProps.getProperty( key.toString() ); } */ /** * Returns the argument string containing the arguments by which exitcode is invoked. * * <p>Referred to by the "pegasus.exitcode.arguments" property. * * @return String containing the arguments,else empty string. */ /* public String getPOSTScriptArguments() { return mProps.getProperty( "pegasus.exitcode.arguments", ""); } */ /** * Returns a boolean indicating whether to turn debug on or not for exitcode. By default false * is returned. * * <p>Referred to by the "pegasus.exitcode.debug" property. * * @return boolean value. */ public boolean setPostSCRIPTDebugON() { return Boolean.parse(mProps.getProperty("pegasus.exitcode.debug"), false); } /** * Returns the argument string containing the arguments by which prescript is invoked. * * <p>Referred to by the "pegasus.prescript.arguments" property. * * @return String containing the arguments. null if not specified. */ /* public String getPrescriptArguments() { return mProps.getProperty( "pegasus.prescript.arguments","" ); } */ // PROPERTIES RELATED TO REMOTE SCHEDULERS /** * Returns the project names that need to be appended to the RSL String while creating the * submit files. Referred to by pegasus.remote.projects property. If present, Pegasus ends up * inserting an RSL string (project = value) in the submit file. * * @return a comma separated list of key value pairs if property specified, else null. */ // public String getRemoteSchedulerProjects() { // return mProps.getProperty( "pegasus.remote.scheduler.projects" ); // } /** * Returns the queue names that need to be appended to the RSL String while creating the submit * files. Referred to by the pegasus.remote.queues property. If present, Pegasus ends up * inserting an RSL string (project = value) in the submit file. * * @return a comma separated list of key value pairs if property specified, else null. */ // public String getRemoteSchedulerQueues() { // return mProps.getProperty( "pegasus.remote.scheduler.queues" ); // } /** * Returns the maxwalltimes for the various pools that need to be appended to the RSL String * while creating the submit files. Referred to by the pegasus.scheduler.remote.queues property. * If present, Pegasus ends up inserting an RSL string (project = value) in the submit file. * * @return a comma separated list of key value pairs if property specified, else null. */ // public String getRemoteSchedulerMaxWallTimes() { // return mProps.getProperty( "pegasus.remote.scheduler.min.maxwalltime" ); // } /** * Returns the minimum walltimes that need to be enforced. * * <p>Referred to by "pegasus.scheduler.remote.min.[key]" property. * * @param key the appropriate globus RSL key. Generally are maxtime|maxwalltime|maxcputime * @return the integer value as specified, -1 in case of no value being specified. */ // public int getMinimumRemoteSchedulerTime( String key ){ // StringBuffer property = new StringBuffer(); // property.append( "pegasus.remote.scheduler.min." ).append( key ); // // int val = -1; // // try { // val = Integer.parseInt( mProps.getProperty( property.toString() ) ); // } catch ( Exception e ) { // } // return val; // } // PROPERTIES RELATED TO CONDOR /** * Completely disable placing a symlink for Condor common log (indiscriminately). * * <p>Starting 4.2.1 this defaults to "false" . * * <p>Referred to by the "pegasus.condor.logs.symlink" property. * * @return value specified by the property. Defaults to false. */ public boolean symlinkCommonLog() { return Boolean.parse(mProps.getProperty("pegasus.condor.logs.symlink"), false); } /** * Whether Pegasus should associate condor concurrency limits or not * * <p>Referred to by the "pegasus.condor.concurrency.limits" property. * * @return value specified by the property. Defaults to false. */ public boolean associateCondorConcurrencyLimits() { return Boolean.parse(mProps.getProperty("pegasus.condor.concurrency.limits"), false); } /** * Returns a boolean indicating whether we want to Condor Quote the arguments of the job or not. * * <p>Referred to by the "pegasus.condor.arguments.quote" property. * * @return boolean */ public boolean useCondorQuotingForArguments() { return Boolean.parse(mProps.getProperty("pegasus.condor.arguments.quote"), true); } /** * Returns the number of times Condor should retry running a job in case of failure. The retry * ends up reinvoking the prescript, that can change the site selection decision in case of * failure. * * <p>Referred to by the "pegasus.dagman.retry" property. * * @return an int denoting the number of times to retry. null if not specified or invalid entry. */ /* public String getCondorRetryValue() { String prop = mProps.getProperty( "pegasus.dagman.retry" ); int val = -1; try { val = Integer.parseInt( prop ); } catch ( Exception e ) { return null; } return Integer.toString( val ); } */ /** * Tells whether to stream condor output or not. By default it is true , meaning condor streams * the output from the remote hosts back to the submit hosts, instead of staging it. This helps * in saving filedescriptors at the jobmanager end. * * <p>If it is set to false, output is not streamed back. The line "stream_output = false" * should be added in the submit files for kickstart jobs. * * <p>Referred to by the "pegasus.condor.output.stream" property. * * @return the boolean value specified by the property, else false in case of invalid value or * property not being specified. */ /* public boolean streamCondorOutput() { return Boolean.parse(mProps.getProperty( "pegasus.condor.output.stream"), false ); } */ /** * Tells whether to stream condor error or not. By default it is true , meaning condor streams * the error from the remote hosts back to the submit hosts instead of staging it in. This helps * in saving filedescriptors at the jobmanager end. * * <p>Referred to by the "pegasus.condor.error.stream" property. * * <p>If it is set to false, output is not streamed back. The line "stream_output = false" * should be added in the submit files for kickstart jobs. * * @return the boolean value specified by the property, else false in case of invalid value or * property not being specified. */ /* public boolean streamCondorError() { return Boolean.parse(mProps.getProperty( "pegasus.condor.error.stream"), false ); } */ // PROPERTIES RELATED TO STORK /** * Returns the credential name to be used for the stork transfer jobs. * * <p>Referred to by the "pegasus.transfer.stork.cred" property. * * @return the credential name if specified by the property, else null. */ public String getCredName() { return mProps.getProperty("pegasus.transfer.stork.cred"); } // SOME LOGGING PROPERTIES /** * Returns the log manager to use. * * <p>Referred to by the "pegasus.log.manager" property. * * @return the value in the properties file, else Default */ public String getLogManager() { return mProps.getProperty("pegasus.log.manager", "Default"); } /** * Returns the log formatter to use. * * <p>Referred to by the "pegasus.log.formatter" property. * * @return the value in the properties file, else Simple */ public String getLogFormatter() { return mProps.getProperty("pegasus.log.formatter", "Simple"); } /** * Returns the http url for log4j properties for windward project. * * <p>Referred to by the "log4j.configuration" property. * * @return the value in the properties file, else null */ public String getHttpLog4jURL() { // return mProps.getProperty( "pegasus.log.windward.log4j.http.url" ); return mProps.getProperty("log4j.configuration"); } /** * Returns the file to which all the logging needs to be directed to. * * <p>Referred to by the "pegasus.log.*" property. * * @return the value of the property that is specified, else null */ public String getLoggingFile() { return mProps.getProperty("pegasus.log.*"); } /** * Returns the location of the local log file where you want the messages to be logged. Not used * for the moment. * * <p>Referred to by the "pegasus.log4j.log" property. * * @return the value specified in the property file,else null. */ public String getLog4JLogFile() { return mProps.getProperty("pegasus.log4j.log"); } /** * Returns a boolean indicating whether to write out the planner metrics or not. * * <p>Referred to by the "pegasus.log.metrics" property. * * @return boolean in the properties, else true */ public boolean writeOutMetrics() { return Boolean.parse( mProps.getProperty(PegasusProperties.PEGASUS_LOG_METRICS_PROPERTY), true) && (this.getMetricsLogFile() != null); } /** * Returns the path to the file that is used to be logging metrics * * <p>Referred to by the "pegasus.log.metrics.file" property. * * @return path to the metrics file if specified, else rundir/pegasus.metrics */ public String getMetricsLogFile() { String file = mProps.getProperty(PegasusProperties.PEGASUS_LOG_METRICS_PROPERTY_FILE); return file; } /** * Returns a boolean indicating whether to log JVM memory usage or not. * * <p>Referred to by the "pegasus.log.memory.usage" property. * * @return boolean value specified in properties else false. */ public boolean logMemoryUsage() { return Boolean.parse(mProps.getProperty("pegasus.log.memory.usage"), false); } // SOME MISCELLANEOUS PROPERTIES /** * Returns a boolean indicating whether we assign job priorities or not to the jobs * * <p>Referred to by the "pegasus.job.priority.assign" property. * * @return boolean value specified in properties else true. */ public boolean assignDefaultJobPriorities() { return Boolean.parse(mProps.getProperty("pegasus.job.priority.assign"), true); } /** * Returns a boolean indicating whether we create registration jobs or not. * * <p>Referred to by the "pegasus.register" property. * * @return boolean value specified in properties else true. */ public boolean createRegistrationJobs() { return Boolean.parse(mProps.getProperty("pegasus.register"), true); } /** * Returns a boolean indicating whether to register a deep LFN or not. * * <p>Referred to by the "pegasus.register.deep" property. * * @return boolean value specified in properties else true. */ public boolean registerDeepLFN() { return Boolean.parse(mProps.getProperty("pegasus.register.deep"), true); } /** * Returns a boolean indicating whether to have jobs executing on worker node tmp or not. * * <p>Referred to by the "pegasus.execute.*.filesystem.local" property. * * @return boolean value in the properties file, else false if not specified or an invalid value * specified. */ public boolean executeOnWorkerNode() { return Boolean.parse( mProps.getProperty(PegasusProperties.PEGASUS_WORKER_NODE_EXECUTION_PROPERTY), false); } /** * Returns a boolean indicating whether to treat the entries in the cache files as a replica * catalog or not. * * @return boolean */ public boolean treatCacheAsRC() { return Boolean.parse(mProps.getProperty("pegasus.catalog.replica.cache.asrc"), false); } /** * Returns a boolean indicating whether to treat the file locations in the DAX as a replica * catalog or not. * * <p>Referred to by the "pegasus.catalog.replica.dax.asrc" property. * * @return boolean value in the properties file, else false if not specified or an invalid value * specified. */ public boolean treatDAXLocationsAsRC() { return Boolean.parse(mProps.getProperty("pegasus.catalog.replica.dax.asrc"), false); } /** * Returns a boolean indicating whether to preserver line breaks. * * <p>Referred to by the "pegasus.parser.dax.preserve.linebreaks" property. * * @return boolean value in the properties file, else false if not specified or an invalid value * specified. */ public boolean preserveParserLineBreaks() { return Boolean.parse(mProps.getProperty("pegasus.parser.dax.preserve.linebreaks"), false); } /** * Returns a boolean indicating whether to automatically add edges as a result of underlying * data dependecnies between jobs. * * <p>Referred to by the "pegasus.parser.dax.data.dependencies" property. * * @return boolean value in the properties file, else true if not specified or an invalid value * specified. */ public boolean addDataDependencies() { return Boolean.parse(mProps.getProperty("pegasus.parser.dax.data.dependencies"), true); } /** * Returns the path to the wings properties file. * * <p>Referred to by the "pegasus.wings.properties" property. * * @return value in the properties file, else null. */ public String getWingsPropertiesFile() { return mProps.getProperty("pegasus.wings.properties"); } /** * Returns the request id. * * <p>Referred to by the "pegasus.wings.request-id" property. * * @return value in the properties file, else null. */ public String getWingsRequestID() { return mProps.getProperty("pegasus.wings.request.id"); } /** * Returns the timeout value in seconds after which to timeout in case of opening sockets to * grid ftp server. * * <p>Referred to by the "pegasus.auth.gridftp.timeout" property. * * @return the timeout value if specified else, null. * @see #DEFAULT_SITE_SELECTOR_TIMEOUT */ public String getGridFTPTimeout() { return mProps.getProperty("pegasus.auth.gridftp.timeout"); } /** * Returns which submit mode to be used to submit the jobs on to the grid. * * <p>Referred to by the "pegasus.code.generator" property. * * @return the submit mode specified in the property file, else the default i.e condor. */ public String getSubmitMode() { return mProps.getProperty("pegasus.code.generator", "condor"); } /** * Returns the mode for parsing the dax while writing out the partitioned daxes. * * <p>Referred to by the "pegasus.partition.parser.load" property. * * @return the value specified in the properties file, else the default value i.e single. */ public String getPartitionParsingMode() { return mProps.getProperty("pegasus.partition.parser.load", "single"); } /** * Returns the scope for the data reusue module. * * <p>Referred to by the "pegasus.data.reuse.scope" property. * * @return the value specified in the properties file, else null */ public String getDataReuseScope() { return mProps.getProperty("pegasus.data.reuse.scope"); } // JOB COLLAPSING PROPERTIES /** * Returns a comma separated list for the node collapsing criteria for the execution pools. This * determines how many jobs one fat node gobbles up. * * <p>Referred to by the "pegasus.clusterer.nodes" property. * * @return the value specified in the properties file, else null. */ public String getCollapseFactors() { return mProps.getProperty("pegasus.clusterer.nodes"); } /** * Returns the users horizontal clustering preference. This property determines how to cluster * horizontal jobs. If this property is set with a value value of runtime, the jobs will be * grouped into into clusters according to their runtimes as specified by <code>job.runtime * </code> property. For all other cases the default horizontal clustering approach will be * used. * * @return the value specified in the properties file, else null. */ public String getHorizontalClusterPreference() { return mProps.getProperty("pegasus.clusterer.preference"); } /** * Returns what job aggregator is to be used to aggregate multiple compute jobs into a single * condor job. * * <p>Referred to by the "pegasus.cluster.job.aggregator" property. * * @return the value specified in the properties file, else DEFAULT_JOB_AGGREGATOR * @see #DEFAULT_JOB_AGGREGATOR */ public String getJobAggregator() { return mProps.getProperty("pegasus.clusterer.job.aggregator", DEFAULT_JOB_AGGREGATOR); } /** * Returns whether the seqexec job aggregator should log progress to a log or not. * * <p>Referred to by the "pegasus.clusterer.job.aggregator.seqexec.log" property. * * @return the value specified in the properties file, else false */ public boolean logJobAggregatorProgress() { return Boolean.parse(getProperty("pegasus.clusterer.job.aggregator.seqexec.log"), false); } /** * Returns whether the seqexec job aggregator should write to a global log or not. This comes * into play only if "pegasus.clusterer.job.aggregator.seqexec.log" is set to true. * * <p>Referred to by the "pegasus.clusterer.job.aggregator.seqexec.log.global" property. * * @return the value specified in the properties file, else true */ public boolean logJobAggregatorProgressToGlobal() { return Boolean.parse( getProperty( "pegasus.clusterer.job.aggregator.seqexec.log.global", "pegasus.clusterer.job.aggregator.seqexec.hasgloballog"), true); } /** * Returns a boolean indicating whether seqexec trips on the first job failure. * * <p>Referred to by the "pegasus.clusterer.job.aggregator.seqexec.firstjobfail" property. * * @return the value specified in the properties file, else true */ public boolean abortOnFirstJobFailure() { return Boolean.parse( mProps.getProperty("pegasus.clusterer.job.aggregator.seqexec.firstjobfail"), true); } /** * Returns a boolean indicating whether clustering should be allowed for single jobs or not * * <p>Referred to by the "pegasus.clusterer.allow.single" property. * * @return the value specified in the properties file, else false */ public boolean allowClusteringOfSingleJobs() { return Boolean.parse(mProps.getProperty("pegasus.clusterer.allow.single"), false); } /** * Returns a boolean indicating whether to enable integrity checking or not. * * @return false if set explicitly to none, else true */ public boolean doIntegrityChecking() { return this.getIntegrityDial() != INTEGRITY_DIAL.none; } /** * Returns the integrity dial enum * * <p>Referred to by the "pegasus.integrity.checking" property. * * @return the value specified in the properties file, else INTEGRITY_DIAL.full * @see INTEGRITY_DIAL * @see #PEGASUS_INTEGRITY_CHECKING_KEY */ public INTEGRITY_DIAL getIntegrityDial() { INTEGRITY_DIAL dial = INTEGRITY_DIAL.full; String value = mProps.getProperty(PEGASUS_INTEGRITY_CHECKING_KEY); if (value == null) { return dial; } // try to assign a dial value try { dial = INTEGRITY_DIAL.valueOf(value); } catch (IllegalArgumentException iae) { throw new IllegalArgumentException( "Invalid value specified for integrity checking " + value, iae); } return dial; } // DEFERRED PLANNING PROPERTIES /** * Returns the root workflow UUID if defined in the properties, else null * * <p>Referred to by the "pegasus.workflow.root.uuid" property. * * @return the value in the properties file else, null */ public String getRootWorkflowUUID() { return mProps.getProperty(ROOT_WORKFLOW_UUID_PROPERTY_KEY, null); } /** * Returns the DAXCallback that is to be used while parsing the DAX. * * <p>Referred to by the "pegasus.partitioner.parser.dax.callback" property. * * @return the value specified in the properties file, else DEFAULT_DAX_CALLBACK * @see #DEFAULT_DAX_CALLBACK */ public String getPartitionerDAXCallback() { return mProps.getProperty("pegasus.partitioner.parser.dax.callback", DEFAULT_DAX_CALLBACK); } /** * Returns the key that is to be used as a label key, for labelled partitioning. * * <p>Referred to by the "pegasus.partitioner.label.key" property. * * @return the value specified in the properties file. */ public String getPartitionerLabelKey() { return mProps.getProperty("pegasus.partitioner.label.key"); } /** * Returns the bundle value for a particular transformation. * * <p>Referred to by the "pegasus.partitioner.horziontal.bundle.[txname]" property, where * [txname] is replaced by the name passed an input to this function. * * @param name the logical name of the transformation. * @return the path to the postscript if specified in properties file, else null. */ public String getHorizontalPartitionerBundleValue(String name) { StringBuffer key = new StringBuffer(); key.append("pegasus.partitioner.horizontal.bundle.").append(name); return mProps.getProperty(key.toString()); } /** * Returns the collapse value for a particular transformation. * * <p>Referred to by the "pegasus.partitioner.horziontal.collapse.[txname]" property, where * [txname] is replaced by the name passed an input to this function. * * @param name the logical name of the transformation. * @return the path to the postscript if specified in properties file, else null. */ public String getHorizontalPartitionerCollapseValue(String name) { StringBuffer key = new StringBuffer(); key.append("pegasus.partitioner.horizontal.collapse.").append(name); return mProps.getProperty(key.toString()); } /** * Returns the key that is to be used as a label key, for labelled clustering. * * <p>Referred to by the "pegasus.clusterer.label.key" property. * * @return the value specified in the properties file. */ public String getClustererLabelKey() { return mProps.getProperty("pegasus.clusterer.label.key"); } /** * Returns the estimator to be used * * <p>Referred to by the "pegasus.estimator" property * * @return value specified else null */ public String getEstimator() { return mProps.getProperty("pegasus.estimator"); } /** * Sets the file backend to which properties maybe written out to * * @param directory * @throws java.io.IOException */ public void setPropertiesFileBackend(String directory) throws IOException { // create a temporary file in directory File dir = new File(directory); // sanity check on the directory sanityCheck(dir); this.mPropsInSubmitDir = File.createTempFile("pegasus.", ".properties", dir).getAbsolutePath(); } /** * Returns the path to the property file that has been writing out in the submit directory. * * @return path to the property file, else null */ public String getPropertiesInSubmitDirectory() { /* PM-1523 don't throw exception here. Does not make sense in context of rc-converter */ /* if (mPropsInSubmitDir == null || mPropsInSubmitDir.length() == 0) { throw new RuntimeException( "Properties file does not exist in directory " + mPropsInSubmitDir); } */ return mPropsInSubmitDir; } /** * Writes out the properties to a temporary file in the directory passed. * * @return the absolute path to the properties file written in the directory. * @throws IOException in case of error while writing out file. */ public String writeOutProperties() throws IOException { return this.writeOutProperties(true); } /** * Writes out the properties to a temporary file in the directory passed. * * @param sanitizePath boolean indicating whether to sanitize paths for certain properties or * not. * @return the absolute path to the properties file written in the directory. * @throws IOException in case of error while writing out file. */ private String writeOutProperties(boolean sanitizePath) throws IOException { return this.writeOutProperties(new File(mPropsInSubmitDir), sanitizePath, true); } /** * Writes out the properties to a temporary file in the directory passed. * * @param file the file to which properties are written out to * @param sanitizePath boolean indicating whether to sanitize paths for certain properties or * not. * @param setInternalVariable whether to set the internal variable that stores the path to the * properties file. * @return the absolute path to the properties file written in the directory. * @throws IOException in case of error while writing out file. */ private String writeOutProperties(File file, boolean sanitizePath, boolean setInternalVariable) throws IOException { if (file == null) { throw new RuntimeException("Properties file does not exist " + file); } // we only want to write out the Pegasus properties for time being // and any profiles that were mentioned in the properties. Properties properties = new Properties(); for (Profiles.NAMESPACES n : Profiles.NAMESPACES.values()) { Properties p = this.mProps.matchingSubset(namespaceToPropertiesPrefix().get(n), true); properties.putAll(p); } // check if we need to sanitize paths for certain properties or not if (sanitizePath) { sanitizePathForProperty(properties, "pegasus.catalog.site.file"); sanitizePathForProperty(properties, "pegasus.catalog.replica.file"); sanitizePathForProperty(properties, "pegasus.catalog.transformation.file"); } // put in a sensible default for dagman maxpre for pegasus-run to // pick up if not specified beforehand StringBuffer buffer = new StringBuffer(); buffer.append(Dagman.NAMESPACE_NAME).append(".").append(Dagman.MAXPRE_KEY.toLowerCase()); String key = buffer.toString(); if (!properties.containsKey(key)) { // add defautl value properties.put(key, DEFAULT_DAGMAN_MAX_PRE_VALUE); } // the header of the file StringBuffer header = new StringBuffer(64); header.append("Pegasus USER PROPERTIES AT RUNTIME \n") .append("#ESCAPES IN VALUES ARE INTRODUCED"); // create an output stream to this file and write out the properties OutputStream os = new FileOutputStream(file); // PM-1593 write everything in properties as UTF-8 properties.store( new OutputStreamWriter(os, CommonProperties.DEFAULT_ENCODING_SET), header.toString()); os.close(); // also set it to the internal variable if (setInternalVariable) { mPropsInSubmitDir = file.getAbsolutePath(); return mPropsInSubmitDir; } else { return file.getAbsolutePath(); } } /** * Santizes the value in the properties . Ensures that the path is absolute. * * @param properties the properties * @param key the key whose value needs to be sanitized */ private void sanitizePathForProperty(Properties properties, String key) { if (properties.containsKey(key)) { String value = properties.getProperty(key); if (value != null) { properties.setProperty(key, new File(value).getAbsolutePath()); } } } /** * Checks the destination location for existence, if it can be created, if it is writable etc. * * @param dir is the new base directory to optionally create. * @throws IOException in case of error while writing out files. */ protected static void sanityCheck(File dir) throws IOException { if (dir.exists()) { // location exists if (dir.isDirectory()) { // ok, isa directory if (dir.canWrite()) { // can write, all is well return; } else { // all is there, but I cannot write to dir throw new IOException("Cannot write to existing directory " + dir.getPath()); } } else { // exists but not a directory throw new IOException( "Destination " + dir.getPath() + " already " + "exists, but is not a directory."); } } else { // does not exist, try to make it if (!dir.mkdirs()) { // try to get around JVM bug. JIRA PM-91 if (dir.getPath().endsWith(".")) { // just try to create the parent directory if (!dir.getParentFile().mkdirs()) { throw new IOException("Unable to create directory " + dir.getPath()); } return; } throw new IOException("Unable to create directory destination " + dir.getPath()); } } } /** * This function is used to check whether a deprecated property is used or not. If a deprecated * property is used,it logs a warning message specifying the new property. If both properties * are not set by the user, the function returns the default property. If no default property * then null. * * @param newProperty the new property that should be used. * @param deprecatedProperty the deprecated property that needs to be replaced. * @return the appropriate value. */ private String getProperty(String newProperty, String deprecatedProperty) { return this.getProperty(newProperty, deprecatedProperty, null); } /** * This function is used to check whether a deprecated property is used or not. If a deprecated * property is used,it logs a warning message specifying the new property. If both properties * are not set by the user, the function returns the default property. If no default property * then null. * * @param newProperty the new property that should be used. * @param deprecatedProperty the deprecated property that needs to be replaced. * @param defaultValue the default value that should be returned. * @return the appropriate value. */ private String getProperty(String newProperty, String deprecatedProperty, String defaultValue) { String value = null; // try for the new property // first value = mProps.getProperty(newProperty); if (value == null) { // try the deprecated property if set value = mProps.getProperty(deprecatedProperty); // if the value is not null if (value != null) { // print the warning message logDeprecatedWarning(deprecatedProperty, newProperty); return value; } else { // else return the default value return defaultValue; } } return value; } /** * Logs a warning about the deprecated property. Logs a warning only if it has not been * displayed before. * * @param deprecatedProperty the deprecated property that needs to be replaced. * @param newProperty the new property that should be used. */ private void logDeprecatedWarning(String deprecatedProperty, String newProperty) { if (!mDeprecatedProperties.contains(deprecatedProperty)) { // log only if it had already not been logged StringBuffer sb = new StringBuffer(); sb.append("The property ") .append(deprecatedProperty) .append(" has been deprecated. Use ") .append(newProperty) .append(" instead."); // mLogger.log(sb.toString(),LogManager.WARNING_MESSAGE_LEVEL ); System.err.println("[WARNING] " + sb.toString()); // push the property in to indicate it has already been // warned about mDeprecatedProperties.add(deprecatedProperty); } } /** * Returns a boolean indicating whether to use third party transfers for all types of transfers * or not. * * <p>Referred to by the "pegasus.transfer.*.thirdparty" property. * * @return the boolean value in the properties files, else false if no value specified, or non * boolean specified. */ // private boolean useThirdPartyForAll(){ // return Boolean.parse("pegasus.transfer.*.thirdparty", // false); // } /** * Gets the reference to the internal singleton object. This method is invoked with the * assumption that the singleton method has been invoked once and has been populated. Also that * it has not been disposed by the garbage collector. Can be potentially a buggy way to invoke. * * @return a handle to the Properties class. */ // public static PegasusProperties singletonInstance() { // return singletonInstance( null ); // } /** * Gets a reference to the internal singleton object. * * @param propFileName name of the properties file to picked from $PEGASUS_HOME/etc/ directory. * @return a handle to the Properties class. */ // public static PegasusProperties singletonInstance( String propFileName ) { // if ( pegProperties == null ) { // //only the default properties file // //can be picked up due to the way // //Singleton implemented in CommonProperties.??? // pegProperties = new PegasusProperties( null ); // } // return pegProperties; // } }
src/edu/isi/pegasus/planner/common/PegasusProperties.java
/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.common; import edu.isi.pegasus.common.util.Boolean; import edu.isi.pegasus.common.util.CommonProperties; import edu.isi.pegasus.planner.catalog.classes.Profiles; import edu.isi.pegasus.planner.classes.NameValue; import edu.isi.pegasus.planner.namespace.Dagman; import edu.isi.pegasus.planner.namespace.Namespace; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.MissingResourceException; import java.util.Properties; import java.util.Set; /** * A Central Properties class that keeps track of all the properties used by Pegasus. All other * classes access the methods in this class to get the value of the property. It access the * CommonProperties class to read the property file. * * @author Karan Vahi * @author Gaurang Mehta * @version $Revision$ * @see edu.isi.pegasus.common.util.CommonProperties */ public class PegasusProperties implements Cloneable { /** the name of the property to disable invoke functionality */ public static final String DISABLE_INVOKE_PROPERTY = "pegasus.gridstart.invoke.disable"; public static final String PEGASUS_KICKSTART_STAT_PROPERTY = "pegasus.gridstart.kickstart.stat"; public static final String PEGASUS_WORKER_NODE_EXECUTION_PROPERTY = "pegasus.execute.*.filesystem.local"; public static final String PEGASUS_TRANSFER_WORKER_PACKAGE_PROPERTY = "pegasus.transfer.worker.package"; public static final String PEGASUS_TRANSFER_WORKER_PACKAGE_STRICT_PROPERTY = "pegasus.transfer.worker.package.strict"; public static final String PEGASUS_TRANSFER_WORKER_PACKAGE_AUTODOWNLOAD_PROPERTY = "pegasus.transfer.worker.package.autodownload"; public static final String PEGASUS_TRANSFORMATION_CATALOG_PROPERTY = "pegasus.catalog.transformation"; public static final String PEGASUS_TRANSFORMATION_CATALOG_FILE_PROPERTY = "pegasus.catalog.transformation.file"; public static final String PEGASUS_REPLICA_CATALOG_PROPERTY = "pegasus.catalog.replica"; public static final String PEGASUS_REPLICA_CATALOG_FILE_PROPERTY = "pegasus.catalog.replica.file"; public static final String PEGASUS_SITE_CATALOG_PROPERTY = "pegasus.catalog.site"; public static final String PEGASUS_SITE_CATALOG_FILE_PROPERTY = "pegasus.catalog.site.file"; public static final String PEGASUS_LOG_METRICS_PROPERTY = "pegasus.log.metrics"; public static final String PEGASUS_LOG_METRICS_PROPERTY_FILE = "pegasus.log.metrics.file"; public static final String PEGASUS_APP_METRICS_PREFIX = "pegasus.metrics.app"; // Replica Catalog Constants public static final String DEFAULT_RC_COLLECTION = "GriphynData"; public static final String DEFAULT_RLI_URL = null; public static final String DEFAULT_RLS_QUERY_MODE = "bulk"; public static final String DEFAULT_RLS_EXIT_MODE = "error"; // public static final String DEFAULT_REPLICA_MODE = "rls"; public static final String DEFAULT_RLS_QUERY_ATTRIB = "false"; public static final String DEFAULT_LRC_IGNORE_URL = null; public static final String DEFAULT_RLS_TIMEOUT = "30"; public static final String DEFAULT_EXEC_DIR = ""; public static final String DEFAULT_STORAGE_DIR = ""; public static final String DEFAULT_CONDOR_BIN_DIR = ""; public static final String DEFAULT_CONDOR_CONFIG_DIR = ""; public static final String CONDOR_KICKSTART = "kickstart-condor"; // transfer constants public static final String DEFAULT_STAGING_DELIMITER = "-"; public static final String DEFAULT_TRANSFER_PROCESSES = "4"; public static final String DEFAULT_TRANSFER_STREAMS = "1"; // grid start constants public static final String DEFAULT_INVOKE_LENGTH = "4000"; // site selector constants public static final String DEFAULT_SITE_SELECTOR = "Random"; public static final String DEFAULT_SITE_SELECTOR_TIMEOUT = "300"; public static final String DEFAULT_SITE_SELECTOR_KEEP = "onerror"; /// some simulator constants that are used public static final String DEFAULT_DATA_MULTIPLICATION_FACTOR = "1"; public static final String DEFAULT_COMP_MULTIPLICATION_FACTOR = "1"; public static final String DEFAULT_COMP_ERROR_PERCENTAGE = "0"; public static final String DEFAULT_COMP_VARIANCE_PERCENTAGE = "0"; // collapsing constants public static final String DEFAULT_JOB_AGGREGATOR = "SeqExec"; // some tranformation catalog constants public static final String DEFAULT_TC_MAPPER_MODE = "All"; public static final String DEFAULT_TX_SELECTOR_MODE = "Random"; // logging constants public static final String DEFAULT_LOGGING_FILE = "stdout"; /** Default properties that applies priorities to all kinds of transfer jobs. */ public static final String ALL_TRANSFER_PRIORITY_PROPERTY_KEY = "pegasus.transfer.*.priority"; /** The property key designated the root workflow uuid. */ public static final String ROOT_WORKFLOW_UUID_PROPERTY_KEY = "pegasus.workflow.root.uuid"; /** The default value to be assigned for dagman.maxpre . */ public static final String DEFAULT_DAGMAN_MAX_PRE_VALUE = "1"; /** An enum defining The dial for cleanup algorithm */ public enum CLEANUP_SCOPE { fullahead, deferred }; /** An enum defining the dial for integrity checking */ public enum INTEGRITY_DIAL { none, nosymlink, full }; /** The default DAXCallback that is loaded, if none is specified by the user. */ private static final String DEFAULT_DAX_CALLBACK = "DAX2Graph"; /** The value of the PEGASUS_HOME environment variable. */ private String mPegasusHome; /** The object holding all the properties pertaining to the VDS system. */ private CommonProperties mProps; /** The default transfer priority that needs to be applied to the transfer jobs. */ private String mDefaultTransferPriority; /** The set containing the deprecated properties specified by the user. */ private Set mDeprecatedProperties; /** The pointer to the properties file that is written out in the submit directory. */ private String mPropsInSubmitDir; /** Profiles that are specified in the properties */ private Profiles mProfiles; private static Map<Profiles.NAMESPACES, String> mNamepsaceToPropertiesPrefix; public Map<Profiles.NAMESPACES, String> namespaceToPropertiesPrefix() { if (mNamepsaceToPropertiesPrefix == null) { mNamepsaceToPropertiesPrefix = new HashMap<Profiles.NAMESPACES, String>(); mNamepsaceToPropertiesPrefix.put(Profiles.NAMESPACES.condor, "condor"); mNamepsaceToPropertiesPrefix.put(Profiles.NAMESPACES.dagman, "dagman"); mNamepsaceToPropertiesPrefix.put(Profiles.NAMESPACES.globus, "globus"); mNamepsaceToPropertiesPrefix.put(Profiles.NAMESPACES.env, "env"); mNamepsaceToPropertiesPrefix.put(Profiles.NAMESPACES.hints, "hints"); mNamepsaceToPropertiesPrefix.put(Profiles.NAMESPACES.pegasus, "pegasus"); mNamepsaceToPropertiesPrefix.put(Profiles.NAMESPACES.selector, "selector"); } return mNamepsaceToPropertiesPrefix; } /** * Returns an instance to this properties object. * * @return a handle to the Properties class. */ public static PegasusProperties getInstance() { return getInstance(null); } /** * Returns an instance to this properties object. * * @param confProperties the path to conf properties, that supersede the loading of properties * from $PEGASUS_HOME/.pegasusrc * @return a handle to the Properties class. */ public static PegasusProperties getInstance(String confProperties) { return nonSingletonInstance(confProperties); } /** * To get a reference to the the object. The properties file that is loaded is from the path * specified in the argument. This is *not implemented* as singleton. However the invocation of * this does modify the internally held singleton object. * * @param confProperties the path to conf properties, that supersede the loading of properties * from $PEGASUS_HOME/.pegasusrc * @return a handle to the Properties class. */ protected static PegasusProperties nonSingletonInstance(String confProperties) { return new PegasusProperties(confProperties); } /** * To get a reference to the the object. The properties file that is loaded is from the path * specified in the argument. * * <p>This is *not implemented* as singleton. However the invocation of this does modify the * internally held singleton object. * * @return a handle to the Properties class. */ public static PegasusProperties nonSingletonInstance() { // return nonSingletonInstance( CommonProperties.PROPERTY_FILENAME ); return nonSingletonInstance(null); } /** * The constructor that constructs the default paths to the various configuration files, and * populates the singleton instance as required. If the properties file passed is null, then the * singleton instance is invoked, else the non singleton instance is invoked. * * @param confProperties the path to conf properties, that supersede the loading of properties * from $PEGASUS_HOME/.pegasusrc */ private PegasusProperties(String confProperties) { // mLogger = LogManager.getInstance(); mDeprecatedProperties = new HashSet(5); initializePropertyFile(confProperties); mDefaultTransferPriority = getDefaultTransferPriority(); } /** * Retrieves profiles from the properties * * @return profiles object. */ public Profiles retrieveProfilesFromProperties() { // retrieve up all the profiles that are specified in // the properties if (mProfiles == null) { mProfiles = retrieveProfilesFromProperties(mProps); // System.out.println( mProfiles ); } return mProfiles; } /** * Retrieves profiles from the properties * * @param properties the common properties so far * @return profiles object. */ protected Profiles retrieveProfilesFromProperties(CommonProperties properties) { Profiles profiles = new Profiles(); // retrieve some matching properties first // traverse through all the enum keys for (Profiles.NAMESPACES n : Profiles.NAMESPACES.values()) { Properties p = properties.matchingSubset(namespaceToPropertiesPrefix().get(n), false); for (Map.Entry<Object, Object> entry : p.entrySet()) { profiles.addProfile(n, (String) entry.getKey(), (String) entry.getValue()); } } return profiles; } /** * Returns the clone of the object. * * @return the clone */ public Object clone() { PegasusProperties props; try { // this will do a shallow clone for all member variables // that is fine for the string variables props = (PegasusProperties) super.clone(); // clone the CommonProperties props.mProfiles = (this.mProfiles == null) ? null : (Profiles) this.mProfiles.clone(); props.mProps = (this.mProps == null) ? null : (CommonProperties) this.mProps.clone(); } catch (CloneNotSupportedException e) { // somewhere in the hierarch chain clone is not implemented throw new RuntimeException( "Clone not implemented in the base class of " + this.getClass().getName(), e); } return props; } /** * Accessor to the bin directory of the Pegasus install * * @return the "etc" directory of the VDS runtime system. */ public File getBinDir() { return mProps.getBinDir(); } /** * Accessor to the schema directory of the Pegasus install * * @return the "etc" directory of the VDS runtime system. */ public File getSchemaDir() { return mProps.getSchemaDir(); } /** * Accessor to the bin directory of the Pegasus install * * @return the "etc" directory of the VDS runtime system. */ public File getSharedDir() { return mProps.getSharedStateDir(); } /** * Returns all the profiles relevant to a particular namespace * * @param ns the namespace corresponding to which you need the profiles */ public Namespace getProfiles(Profiles.NAMESPACES ns) { return this.retrieveProfilesFromProperties().get(ns); } /** * Returns the default path to the condor kickstart. Currently the path defaults to * $PEGASUS_HOME/bin/kickstart-condor. * * @return default path to kickstart condor. */ public String getDefaultPathToCondorKickstart() { StringBuffer sb = new StringBuffer(50); sb.append(mPegasusHome); sb.append(File.separator); sb.append("bin"); sb.append(File.separator); sb.append(CONDOR_KICKSTART); return sb.toString(); } /** * Gets the handle to the properties file. The singleton instance is invoked if the properties * file is null (partly due to the way CommonProperties is implemented ), else the non singleton * is invoked. * * @param confProperties the path to conf properties, that supersede the loading of properties * from $PEGASUS_HOME/.pegasusrc */ private void initializePropertyFile(String confProperties) { try { /* mProps = ( confProperties == null ) ? //invoke the singleton instance CommonProperties.instance() : //invoke the non singleton instance CommonProperties.nonSingletonInstance( confProperties ); */ // we always load non singleton instance? // Karan April 27, 2011 mProps = CommonProperties.nonSingletonInstance(confProperties); } catch (IOException e) { System.err.println("unable to read property file: " + e.getMessage()); System.exit(1); } catch (MissingResourceException e) { System.err.println("A required property is missing: " + e.getMessage()); System.exit(1); } } /** * It allows you to get any property from the property file without going through the * corresponding accesor function in this class. For coding and clarity purposes, the function * should be used judiciously, and the accessor function should be used as far as possible. * * @param key the property whose value is desired. * @return String */ public String getProperty(String key) { return mProps.getProperty(key); } /** * Returns the CommonProperties that this object encapsulates. Use only when absolutely * necessary. Use accessor methods whereever possible. * * @return CommonProperties */ public CommonProperties getVDSProperties() { return this.mProps; } /** * Accessor: Overwrite any properties from within the program. * * @param key is the key to look up * @param value is the new property value to place in the system. * @return the old value, or null if it didn't exist before. */ public Object setProperty(String key, String value) { return mProps.setProperty(key, value); } /** * Extracts a specific property key subset from the known properties. The prefix may be removed * from the keys in the resulting dictionary, or it may be kept. In the latter case, exact * matches on the prefix will also be copied into the resulting dictionary. * * @param prefix is the key prefix to filter the properties by. * @param keepPrefix if true, the key prefix is kept in the resulting dictionary. As * side-effect, a key that matches the prefix exactly will also be copied. If false, the * resulting dictionary's keys are shortened by the prefix. An exact prefix match will not * be copied, as it would result in an empty string key. * @return a property dictionary matching the filter key. May be an empty dictionary, if no * prefix matches were found. * @see #getProperty( String ) is used to assemble matches */ public Properties matchingSubset(String prefix, boolean keepPrefix) { return mProps.matchingSubset(prefix, keepPrefix); } /** * Remaps property keys matching a particular prefix to the new prefix, and returns the remapped * properties in a new Properties object * * @param prefix * @param remapToPrefix * @return */ public Properties remap(String prefix, String remapToPrefix) { Properties output = this.matchingSubset(prefix, true); Properties result = new Properties(); if (!output.isEmpty()) { // remap the properties for (String outputProperty : output.stringPropertyNames()) { String key = outputProperty.replace(prefix, remapToPrefix); String value = output.getProperty(outputProperty); result.setProperty(key, value); } } return result; } /** * Returns the properties matching a particular prefix as a list of sorted name value pairs, * where name is the full name of the matching property (including the prefix) and value is it's * value in the properties file. * * @param prefix the prefix for the property names. * @param system boolean indicating whether to match only System properties or all including the * ones in the property file. * @return list of <code>NameValue</code> objects corresponding to the matched properties sorted * by keys. null if no matching property is found. */ public List getMatchingProperties(String prefix, boolean system) { // sanity check if (prefix == null) { return null; } Properties p = (system) ? System.getProperties() : matchingSubset(prefix, true); java.util.Enumeration e = p.propertyNames(); List l = (e.hasMoreElements()) ? new java.util.ArrayList() : null; while (e.hasMoreElements()) { String key = (String) e.nextElement(); NameValue nv = new NameValue(key, p.getProperty(key)); l.add(nv); } Collections.sort(l); return (l.isEmpty()) ? null : l; } /** * Accessor to $PEGASUS_HOME/etc. The files in this directory have a low change frequency, are * effectively read-only, they reside on a per-machine basis, and they are valid usually for a * single user. * * @return the "etc" directory of the VDS runtime system. */ public File getSysConfDir() { return mProps.getSysConfDir(); } /** * Removes a property from the soft state. * * @param key the key * @return the corresponding value if key exits, else null */ public String removeProperty(String key) { return mProps.removeProperty(key); } // PROPERTIES RELATED TO SCHEMAS /** * Returns the location of the schema for the DAX. * * <p>Referred to by the "pegasus.schema.dax" property. * * @return location to the DAX schema. */ public String getDAXSchemaLocation() { return this.getDAXSchemaLocation(null); } /** * Returns the location of the schema for the DAX. * * <p>Referred to by the "pegasus.schema.dax" property. * * @param defaultLocation the default location to the schema. * @return location to the DAX schema specified in the properties file, else the default * location if no value specified. */ public String getDAXSchemaLocation(String defaultLocation) { return mProps.getProperty("pegasus.schema.dax", defaultLocation); } /** * Returns the location of the schema for the PDAX. * * <p>Referred to by the "pegasus.schema.pdax" property * * @param defaultLocation the default location to the schema. * @return location to the PDAX schema specified in the properties file, else the default * location if no value specified. */ public String getPDAXSchemaLocation(String defaultLocation) { return mProps.getProperty("pegasus.schema.pdax", defaultLocation); } // DIRECTORY CREATION PROPERTIES /** * Returns the name of the class that the user wants, to insert the create directory jobs in the * graph in case of creating random directories. * * <p>Referred to by the "pegasus.dir.create.strategy" property. * * @return the create dir classname if specified in the properties file, else Minimal. */ public String getCreateDirClass() { return getProperty("pegasus.dir.create.strategy", "pegasus.dir.create", "Minimal"); } /** * Returns the name of the class that the user wants, to render the directory creation jobs. It * dictates what mechanism is used to create the directory for a workflow. * * <p>Referred to by the "pegasus.dir.create.impl" property. * * @return the create dir classname if specified in the properties file, else * DefaultImplementation. */ public String getCreateDirImplementation() { return mProps.getProperty("pegasus.dir.create.impl", "DefaultImplementation"); } /** * It specifies whether to use the extended timestamp format for generation of timestamps that * are used to create the random directory name, and for the classads generation. * * <p>Referred to by the "pegasus.dir.timestamp.extended" property. * * @return the value specified in the properties file if valid boolean, else false. */ public boolean useExtendedTimeStamp() { return Boolean.parse(mProps.getProperty("pegasus.dir.timestamp.extended"), false); } /** * Returns a boolean indicating whether to use timestamp for directory name creation or not. * * <p>Referred to by "pegasus.dir.useTimestamp" property. * * @return the boolean value specified in the properties files, else false. */ public boolean useTimestampForDirectoryStructure() { return Boolean.parse(mProps.getProperty("pegasus.dir.useTimestamp"), false); } /** * Returns the execution directory suffix or absolute specified that is appended/replaced to the * exec-mount-point specified in the pool catalog for the various pools. * * <p>Referred to by the "pegasus.dir.exec" property * * @return the value specified in the properties file, else the default suffix. * @see #DEFAULT_EXEC_DIR */ public String getExecDirectory() { return mProps.getProperty("pegasus.dir.exec", DEFAULT_EXEC_DIR); } /** * Returns the the path to the logs directory on the submit host. This is the directory where * the condor logs for the workflows are created. The logs directory should be on the local * filesystem else condor may complain * * <p>Referred to by the "pegasus.dir.submit.logs" property * * @return the value in the properties file, else null */ public String getSubmitLogsDirectory() { return mProps.getProperty("pegasus.dir.submit.logs"); } /** * Returns a boolean indicating whether the submit directory for the sub workflows should * include the label of the sub workflow or not. * * <p>Referred to by the "pegasus.dir.submit.subwf.labelbased" property * * @return the value in the properties file, else false */ public boolean labelBasedSubmitDirectoryForSubWorkflows() { return Boolean.parse(mProps.getProperty("pegasus.dir.submit.subwf.labelbased"), false); } /** * Returns the storage directory suffix or absolute specified that is appended/replaced to the * storage-mount-point specified in the pool catalog for the various pools. * * <p>Referred to by the "pegasus.dir.storage" property. * * @return the value specified in the properties file, else the default suffix. * @see #DEFAULT_STORAGE_DIR */ public String getStorageDirectory() { return mProps.getProperty("pegasus.dir.storage", DEFAULT_STORAGE_DIR); } /** * Returns a boolean indicating whether to have a deep storage directory structure or not while * staging out data to the output site. * * <p>Referred to by the "pegasus.dir.storage.deep" property. * * @return the boolean value specified in the properties files, else false. */ public boolean useDeepStorageDirectoryStructure() { return Boolean.parse(mProps.getProperty("pegasus.dir.storage.deep"), false); } // PROPERTIES RELATED TO CLEANUP /** * Returns the name of the Strategy class that the user wants, to insert the cleanup jobs in the * graph. * * <p>Referred to by the "pegasus.file.cleanup.strategy" property. * * @return the create dir classname if specified in the properties file, else InPlace. */ public String getCleanupStrategy() { return mProps.getProperty("pegasus.file.cleanup.strategy", "InPlace"); } /** * Returns the name of the class that the user wants, to render the cleanup jobs. It dictates * what mechanism is used to remove the files on a remote system. * * <p>Referred to by the "pegasus.file.cleanup.impl" property. * * @return the cleanup implementation classname if specified in the properties file, else * Cleanup. */ public String getCleanupImplementation() { return mProps.getProperty("pegasus.file.cleanup.impl", "Cleanup"); } /** * Returns the maximum number of clean up jobs created per level of the workflow in case of * InPlace cleanup. * * <p>Referred to by the "pegasus.file.cleanup.clusters.num" property * * @return the value in the property file , else null */ public String getMaximumCleanupJobsPerLevel() { return mProps.getProperty("pegasus.file.cleanup.clusters.num"); } /** * Returns the fraction of cleanup jobs clustered into a single clustered cleanup job. * * <p>Referred to by the "pegasus.file.cleanup.clusters.size" property * * @return the value in the property file , else null */ public String getClusterSizeCleanupJobsPerLevel() { return mProps.getProperty("pegasus.file.cleanup.clusters.size"); } /** * Returns the maximum available space per site. * * <p>Referred to by the "pegasus.file.cleanup.constraint.maxspace" property * * @return the value in the property file , else null */ public String getCleanupConstraintMaxSpace() { return mProps.getProperty("pegasus.file.cleanup.constraint.maxspace"); } /** * Returns the scope for file cleanup. It is used to trigger cleanup in case of deferred * planning. The vaild property values accepted are - fullahead - deferred * * <p>Referred to by the property "pegasus.file.cleanup.scope" * * @return the value in property file if specified, else fullahead */ public CLEANUP_SCOPE getCleanupScope() { CLEANUP_SCOPE scope = CLEANUP_SCOPE.fullahead; String value = mProps.getProperty("pegasus.file.cleanup.scope"); if (value == null) { return scope; } // try to assign a cleanup value try { scope = CLEANUP_SCOPE.valueOf(value); } catch (IllegalArgumentException iae) { // ignore do nothing. } return scope; } // PROPERTIES RELATED TO THE TRANSFORMATION CATALOG /** * Returns the mode to be used for accessing the Transformation Catalog. * * <p>Referred to by the "pegasus.catalog.transformation" property. * * @return the value specified in properties file */ public String getTCMode() { return mProps.getProperty(PegasusProperties.PEGASUS_TRANSFORMATION_CATALOG_PROPERTY); } /** * Returns the location of the transformation catalog. * * <p>Referred to by "pegasus.catalog.transformation.file" property. * * @return the value specified in the properties file null */ public String getTCPath() { return mProps.getProperty(PegasusProperties.PEGASUS_TRANSFORMATION_CATALOG_FILE_PROPERTY); } /** * Returns the mode for loading the transformation mapper that sits in front of the * transformation catalog. * * <p>Referred to by the "pegasus.catalog.transformation.mapper" property. * * @return the value specified in the properties file, else default tc mapper mode. * @see #DEFAULT_TC_MAPPER_MODE */ public String getTCMapperMode() { return mProps.getProperty("pegasus.catalog.transformation.mapper", DEFAULT_TC_MAPPER_MODE); } // REPLICA CATALOG PROPERTIES /** * Returns the replica mode. It identifies the ReplicaMechanism being used by Pegasus to * determine logical file locations. * * <p>Referred to by the "pegasus.catalog.replica" property. * * @return the replica mode, that is used to load the appropriate implementing class if property * is specified, else null */ public String getReplicaMode() { return mProps.getProperty(PEGASUS_REPLICA_CATALOG_PROPERTY); } /** * Returns the properties required for connecting to replica catalog to be used for registering * outputs. If not specified, then input replica catalog is used. * * <p>Referred to by the "pegasus.catalog.replica.output" property. * * @return the replica mode, that is used to load the appropriate implementing class if property * is specified, else null */ public String getOutputReplicaProperites() { return mProps.getProperty(PEGASUS_REPLICA_CATALOG_PROPERTY); } /** * Returns the url to the RLI of the RLS. * * <p>Referred to by the "pegasus.rls.url" property. * * @return the value specified in properties file, else DEFAULT_RLI_URL. * @see #DEFAULT_RLI_URL */ public String getRLIURL() { return mProps.getProperty("pegasus.catalog.replica.url", DEFAULT_RLI_URL); } /** * It returns the timeout value in seconds after which to timeout in case of no activity from * the RLS. * * <p>Referred to by the "pegasus.rc.rls.timeout" property. * * @return the timeout value if specified else, DEFAULT_RLS_TIMEOUT. * @see #DEFAULT_RLS_TIMEOUT */ public int getRLSTimeout() { String prop = mProps.getProperty("pegasus.catalog.replica.rls.timeout", DEFAULT_RLS_TIMEOUT); int val; try { val = Integer.parseInt(prop); } catch (Exception e) { return Integer.parseInt(DEFAULT_RLS_TIMEOUT); } return val; } // PROPERTIES RELATED TO SITE CATALOG /** * Returns the mode to be used for accessing the pool information. * * <p>Referred to by the "pegasus.catalog.site" property. * * @return the site catalog implementor, that is used to load the appropriate implementing class * if the property is specified, else NULL */ public String getSiteCatalogImplementor() { return mProps.getProperty(PegasusProperties.PEGASUS_SITE_CATALOG_PROPERTY); } /** * Returns the location of the schema for the DAX. * * <p>Referred to by the "pegasus.schema.sc" property. * * @return the location of pool schema if specified in properties file, else null. */ public String getPoolSchemaLocation() { return this.getPoolSchemaLocation(null); } /** * Returns the location of the schema for the site catalog file. * * <p>Referred to by the "pegasus.schema.sc" property * * @param defaultLocation the default location where the schema should be if no other location * is specified. * @return the location specified by the property, else defaultLocation. */ public String getPoolSchemaLocation(String defaultLocation) { return mProps.getProperty("pegasus.schema.sc", defaultLocation); } // PROVENANCE CATALOG PROPERTIES /** * Returns the provenance store to use to log the refiner actions. * * <p>Referred to by the "pegasus.catalog.provenance.refinement" property. * * @return the value set in the properties, else null if not set. */ public String getRefinementProvenanceStore() { return mProps.getProperty("pegasus.catalog.provenance.refinement"); } // TRANSFER MECHANISM PROPERTIES /** * Returns the transfer implementation that is to be used for constructing the transfer jobs. * * <p>Referred to by the "pegasus.transfer.*.impl" property. * * @return the transfer implementation */ public String getTransferImplementation() { return getTransferImplementation("pegasus.transfer.*.impl"); } /** * Returns the sls transfer implementation that is to be used for constructing the transfer * jobs. * * <p>Referred to by the "pegasus.transfer.lite.*.impl" property. * * @return the transfer implementation */ /* PM-810 done away. public String getSLSTransferImplementation(){ return getTransferImplementation( "pegasus.transfer.lite.*.impl" ); } */ /** * Returns the transfer implementation. * * @param property property name. * @return the transfer implementation, else the one specified by "pegasus.transfer.*.impl", */ public String getTransferImplementation(String property) { return mProps.getProperty(property, getDefaultTransferImplementation()); } /** * Returns a boolean indicating whether to stage sls files via Pegasus First Level Staging or * let Condor do it. * * <p>Referred to by the property "pegasus.transfer.stage.lite.file" * * @return boolean value mentioned in the properties or else the default value which is true. */ public boolean stageSLSFilesViaFirstLevelStaging() { return Boolean.parse(mProps.getProperty("pegasus.transfer.stage.lite.file"), false); } /** * Returns the default list of third party sites. * * <p>Referred to by the "pegasus.transfer.*.thirdparty.sites" property. * * @return the value specified in the properties file, else null. */ private String getDefaultThirdPartySites() { return mProps.getProperty("pegasus.transfer.*.thirdparty.sites"); } /** * Returns the default transfer implementation to be picked up for constructing transfer jobs. * * <p>Referred to by the "pegasus.transfer.*.impl" property. * * @return the value specified in the properties file, else null. */ private String getDefaultTransferImplementation() { return mProps.getProperty("pegasus.transfer.*.impl"); } /** * Returns a boolean indicating whether to bypass first level staging of inputs. Useful in case * of PegasusLite setup * * <p>Referred to by the "pegasus.transfer.bypass.input.staging" property. * * @return boolean value specified , else false */ public boolean bypassFirstLevelStagingForInputs() { return Boolean.parse(mProps.getProperty("pegasus.transfer.bypass.input.staging"), false); } /** * Returns the default priority for the transfer jobs if specified in the properties file. * * @return the value specified in the properties file, else null if non integer value or no * value specified. */ private String getDefaultTransferPriority() { String prop = mProps.getProperty(this.ALL_TRANSFER_PRIORITY_PROPERTY_KEY); int val = -1; try { val = Integer.parseInt(prop); } catch (Exception e) { return null; } return Integer.toString(val); } /** * Returns the base source URL where pointing to the directory where the worker package * executables for pegasus releases are kept. * * <p>Referred to by the "pegasus.transfer.setup.source.base.url * * @return the value in the property file, else null */ public String getBaseSourceURLForSetupTransfers() { return mProps.getProperty("pegasus.transfer.setup.source.base.url"); } /** * Returns the transfer refiner that is to be used for adding in the transfer jobs in the * workflow * * <p>Referred to by the "pegasus.transfer.refiner" property. * * @return the transfer refiner, else null */ public String getTransferRefiner() { return mProps.getProperty("pegasus.transfer.refiner"); } /** * Returns whether to introduce quotes around url's before handing to g-u-c and condor. * * <p>Referred to by "pegasus.transfer.single.quote" property. * * @return boolean value specified in the properties file, else true in case of non boolean * value being specified or property not being set. */ public boolean quoteTransferURL() { return Boolean.parse(mProps.getProperty("pegasus.transfer.single.quote"), true); } /** * It returns the number of processes of g-u-c that the transfer script needs to spawn to do the * transfers. This is applicable only in the case where the transfer executable has the * capability of spawning processes. It should not be confused with the number of streams that * each process opens. By default it is set to 4. In case a non integer value is specified in * the properties file it returns the default value. * * <p>Referred to by "pegasus.transfer.throttle.processes" property. * * @return the number of processes specified in properties file, else DEFAULT_TRANSFER_PROCESSES * @see #DEFAULT_TRANSFER_PROCESSES */ public String getNumOfTransferProcesses() { String prop = mProps.getProperty( "pegasus.transfer.throttle.processes", DEFAULT_TRANSFER_PROCESSES); int val = -1; try { val = Integer.parseInt(prop); } catch (Exception e) { return DEFAULT_TRANSFER_PROCESSES; } return Integer.toString(val); } /** * It returns the number of streams that each transfer process uses to do the ftp transfer. By * default it is set to 1.In case a non integer value is specified in the properties file it * returns the default value. * * <p>Referred to by "pegasus.transfer.throttle.streams" property. * * @return the number of streams specified in the properties file, else * DEFAULT_TRANSFER_STREAMS. * @see #DEFAULT_TRANSFER_STREAMS */ public String getNumOfTransferStreams() { String prop = mProps.getProperty("pegasus.transfer.throttle.streams", DEFAULT_TRANSFER_STREAMS); int val = -1; try { val = Integer.parseInt(prop); } catch (Exception e) { return DEFAULT_TRANSFER_STREAMS; } return Integer.toString(val); } /** * It specifies whether the underlying transfer mechanism being used should use the force option * if available to transfer the files. * * <p>Referred to by "pegasus.transfer.force" property. * * @return boolean value specified in the properties file,else false in case of non boolean * value being specified or property not being set. */ public boolean useForceInTransfer() { return Boolean.parse(mProps.getProperty("pegasus.transfer.force"), false); } /** * It returns whether the use of symbolic links in case where the source and destination files * happen to be on the same file system. * * <p>Referred to by "pegasus.transfer.links" property. * * @return boolean value specified in the properties file, else false in case of non boolean * value being specified or property not being set. */ public boolean getUseOfSymbolicLinks() { String value = mProps.getProperty("pegasus.transfer.links"); return Boolean.parse(value, false); } /** * Returns the comma separated list of third party sites, specified in the properties. * * @param property property name. * @return the comma separated list of sites. */ public String getThirdPartySites(String property) { String value = mProps.getProperty(property); return value; } /** * Returns the comma separated list of third party sites for which the third party transfers are * executed on the remote sites. * * @param property property name. * @return the comma separated list of sites. */ public String getThirdPartySitesRemote(String property) { return mProps.getProperty(property); } /** * Returns the delimiter to be used for constructing the staged executable name, during transfer * of executables to remote sites. * * <p>Referred to by the "pegasus.transfer.staging.delimiter" property. * * @return the value specified in the properties file, else DEFAULT_STAGING_DELIMITER * @see #DEFAULT_STAGING_DELIMITER */ public String getStagingDelimiter() { return mProps.getProperty("pegasus.transfer.staging.delimiter", DEFAULT_STAGING_DELIMITER); } /** * Returns the list of sites for which the chmod job creation has to be disabled for executable * staging. * * <p>Referred to by the "pegasus.transfer.disable.chmod" property. * * @return a comma separated list of site names. */ public String getChmodDisabledSites() { return mProps.getProperty("pegasus.transfer.disable.chmod.sites"); } /** * It specifies if the worker package needs to be staged to the remote site or not. * * <p>Referred to by "pegasus.transfer.worker.package" property. * * @return boolean value specified in the properties file,else false in case of non boolean * value being specified or property not being set. */ public boolean transferWorkerPackage() { return Boolean.parse(mProps.getProperty(PEGASUS_TRANSFER_WORKER_PACKAGE_PROPERTY), false); } /** * A Boolean property to indicate whether to enforce strict checks against provided worker * package for jobs in PegasusLite mode. if a job comes with worker package and it does not * match fully with worker node architecture , it will revert to Pegasus download website. * Default value is true. * * <p>Referred to by "pegasus.transfer.worker.package.strict" property. * * @return boolean value specified in the properties file,else true in case of non boolean value * being specified or property not being set. */ public boolean enforceStrictChecksForWorkerPackage() { return Boolean.parse( mProps.getProperty(PEGASUS_TRANSFER_WORKER_PACKAGE_STRICT_PROPERTY), true); } /** * A Boolean property to indicate whether a pegasus lite job is allowed to download from Pegasus * website. * * <p>Referred to by "pegasus.transfer.worker.package.autodownload" property. * * @return boolean value specified in the properties file,else true in case of non boolean value * being specified or property not being set. */ public boolean allowDownloadOfWorkerPackageFromPegasusWebsite() { return Boolean.parse( mProps.getProperty(PEGASUS_TRANSFER_WORKER_PACKAGE_AUTODOWNLOAD_PROPERTY), true); } /** * Returns the arguments with which the transfer executable needs to be invoked. * * <p>Referred to by "pegasus.transfer.arguments" property. * * @return the arguments specified in the properties file, else null if property is not * specified. */ public String getTransferArguments() { return mProps.getProperty("pegasus.transfer.arguments"); } /** * Returns the extra arguments with which the transfer executable used in PegasusLite needs to * be invoked. * * <p>Referred to by "pegasus.transfer.lite.arguments" property. * * @return the arguments specified in the properties file, else null if property is not * specified. */ public String getSLSTransferArguments() { return mProps.getProperty("pegasus.transfer.lite.arguments"); } /** * Returns the priority to be set for the stage in transfer job. * * <p>Referred to by "pegasus.transfer.stagein.priority" property if set, else by * "pegasus.transfer.*.priority" property. * * @return the priority as String if a valid integer specified in the properties, else null. */ public String getTransferStageInPriority() { return getTransferPriority("pegasus.transfer.stagein.priority"); } /** * Returns the priority to be set for the stage out transfer job. * * <p>Referred to by "pegasus.transfer.stageout.priority" property if set, else by * "pegasus.transfer.*.priority" property. * * @return the priority as String if a valid integer specified in the properties, else null. */ public String getTransferStageOutPriority() { return getTransferPriority("pegasus.transfer.stageout.priority"); } /** * Returns the priority to be set for the interpool transfer job. * * <p>Referred to by "pegasus.transfer.inter.priority" property if set, else by * "pegasus.transfer.*.priority" property. * * @return the priority as String if a valid integer specified in the properties, else null. */ public String getTransferInterPriority() { return getTransferPriority("pegasus.transfer.inter.priority"); } /** * Returns the transfer priority. * * @param property property name. * @return the priority as String if a valid integer specified in the properties as value to * property, else null. */ private String getTransferPriority(String property) { String value = mProps.getProperty(property, mDefaultTransferPriority); int val = -1; try { val = Integer.parseInt(value); } catch (Exception e) { } // if value in properties file is corrupted // again use the default transfer priority return (val < 0) ? mDefaultTransferPriority : Integer.toString(val); } // REPLICA SELECTOR FUNCTIONS /** * Returns the mode for loading the transformation selector that selects amongst the various * candidate transformation catalog entry objects. * * <p>Referred to by the "pegasus.selector.transformation" property. * * @return the value specified in the properties file, else default transformation selector. * @see #DEFAULT_TC_MAPPER_MODE */ public String getTXSelectorMode() { return mProps.getProperty("pegasus.selector.transformation", DEFAULT_TX_SELECTOR_MODE); } /** * Returns the name of the selector to be used for selection amongst the various replicas of a * single lfn. * * <p>Referred to by the "pegasus.selector.replica" property. * * @return the name of the selector if the property is specified, else null */ public String getReplicaSelector() { return mProps.getProperty("pegasus.selector.replica"); } /** * Returns a comma separated list of sites, that are restricted in terms of data movement from * the site. * * <p>Referred to by the "pegasus.rc.restricted.sites" property. * * @return comma separated list of sites. */ // public String getRestrictedSites(){ // return mProps.getProperty("pegasus.rc.restricted.sites",""); // } /** * Returns a comma separated list of sites, from which to prefer data transfers for all sites. * * <p>Referred to by the "pegasus.selector.replica.*.prefer.stagein.sites" property. * * @return comma separated list of sites. */ public String getAllPreferredSites() { return mProps.getProperty("pegasus.selector.replica.*.prefer.stagein.sites", ""); } /** * Returns a comma separated list of sites, from which to ignore data transfers for all sites. * Replaces the old pegasus.rc.restricted.sites property. * * <p>Referred to by the "pegasus.selector.ignore.*.prefer.stagein.sites" property. * * @return comma separated list of sites. */ public String getAllIgnoredSites() { return mProps.getProperty("pegasus.selector.replica.*.ignore.stagein.sites", ""); } // SITE SELECTOR PROPERTIES /** * Returns the class name of the site selector, that needs to be invoked to do the site * selection. * * <p>Referred to by the "pegasus.selector.site" property. * * @return the classname corresponding to the site selector that needs to be invoked if * specified in the properties file, else the default selector specified by * DEFAULT_SITE_SELECTOR. * @see #DEFAULT_SITE_SELECTOR */ public String getSiteSelectorMode() { return mProps.getProperty("pegasus.selector.site", DEFAULT_SITE_SELECTOR); } /** * Returns the path to the external site selector that needs to be called out to make the * decision of site selection. * * <p>Referred to by the "pegasus.selector.site.path" property. * * @return the path to the external site selector if specified in the properties file, else * null. */ public String getSiteSelectorPath() { return mProps.getProperty("pegasus.selector.site.path"); } /** * It returns the timeout value in seconds after which to timeout in case of no activity from * the external site selector. * * <p>Referred to by the "pegasus.selector.site.timeout" property. * * @return the timeout value if specified else, DEFAULT_SITE_SELECTOR_TIMEOUT. * @see #DEFAULT_SITE_SELECTOR_TIMEOUT */ public int getSiteSelectorTimeout() { String prop = mProps.getProperty("pegasus.selector.site.timeout", DEFAULT_SITE_SELECTOR_TIMEOUT); int val; try { val = Integer.parseInt(prop); } catch (Exception e) { return Integer.parseInt(DEFAULT_SITE_SELECTOR_TIMEOUT); } return val; } /** * Returns a value designating whether we need to keep the temporary files that are passed to * the external site selectors. The check for the valid tristate value should be done at the * calling function end. This just passes on the value user specified in the properties file. * * <p>Referred to by the "pegasus.selector.site.keep.tmp" property. * * @return the value of the property is specified, else DEFAULT_SITE_SELECTOR_KEEP * @see #DEFAULT_SITE_SELECTOR_KEEP */ public String getSiteSelectorKeep() { return mProps.getProperty("pegasus.selector.site.keep.tmp", DEFAULT_SITE_SELECTOR_KEEP); } // PROPERTIES RELATED TO KICKSTART AND EXITCODE /** * Returns the GRIDSTART that is to be used to launch the jobs on the grid. * * <p>Referred to by the "pegasus.gridstart" property. * * @return the value specified in the property file, else null */ public String getGridStart() { return mProps.getProperty("pegasus.gridstart"); } /** * Returns a boolean indicating whether kickstart should set x bit on staged executables before * launching them. * * <p>Referred to by the "pegasus.gridstart.kickstart.set.xbit" property. * * @return the value specified in the property file, else false */ public boolean setXBitWithKickstart() { return Boolean.parse(mProps.getProperty("pegasus.gridstart.kickstart.set.xbit"), false); } /** * Return a boolean indicating whether to turn the stat option for kickstart on or not. By * default it is turned on. * * <p>Referred to by the "pegasus.gridstart.kickstart.stat" property. * * @return value specified in the property file, else null. */ public String doStatWithKickstart() { return mProps.getProperty(PEGASUS_KICKSTART_STAT_PROPERTY); } /** * Return a boolean indicating whether to generate the LOF files for the jobs or not. This is * used to generate LOF files, but not trigger the stat option * * <p>Referred to by the "pegasus.gridstart.kickstart.generate.loft" property. * * @return the boolean value specified in the property file, else false if not specified or non * boolean specified. */ public boolean generateLOFFiles() { return Boolean.parse(mProps.getProperty("pegasus.gridstart.generate.lof"), false); } /** * Returns a boolean indicating whether to use invoke in kickstart always or not. * * <p>Referred to by the "pegasus.gridstart.invoke.always" property. * * @return the boolean value specified in the property file, else false if not specified or non * boolean specified. */ public boolean useInvokeInGridStart() { return Boolean.parse(mProps.getProperty("pegasus.gridstart.invoke.always"), false); } /** * Returns a boolean indicating whether to disable use of invoke or not. * * <p>Referred to by the "pegasus.gridstart.invoke.disable" property. * * @return the boolean value specified in the property file, else false if not specified or non * boolean specified. */ public boolean disableInvokeInGridStart() { return Boolean.parse(mProps.getProperty(PegasusProperties.DISABLE_INVOKE_PROPERTY), false); } /** * Returns the trigger value for invoking an application through kickstart using kickstart. If * the arguments value being constructed in the condor submit file is more than this value, then * invoke is used to pass the arguments to the remote end. Helps in bypassing the Condor 4K * limit. * * <p>Referred to by "pegasus.gridstart.invoke.length" property. * * @return the long value specified in the properties files, else DEFAULT_INVOKE_LENGTH * @see #DEFAULT_INVOKE_LENGTH */ public long getGridStartInvokeLength() { long value = Long.parseLong(this.DEFAULT_INVOKE_LENGTH); String st = mProps.getProperty("pegasus.gridstart.invoke.length", this.DEFAULT_INVOKE_LENGTH); try { value = Long.parseLong(st); } catch (Exception e) { // ignore malformed values from // the property file } return value; } /** * Returns a boolean indicating whehter to pass extra options to kickstart or not. The extra * options have appeared only in VDS version 1.4.2 (like -L and -T). * * <p>Referred to by "pegasus.gridstart.label" property. * * @return the boolean value specified in the property file, else true if not specified or non * boolean specified. */ public boolean generateKickstartExtraOptions() { return Boolean.parse(mProps.getProperty("pegasus.gridstart.label"), true); } /** * Returns the mode adding the postscripts for the jobs. At present takes in only two values all * or none default being none. * * <p>Referred to by the "pegasus.exitcode.scope" property. * * @return the mode specified by the property, else DEFAULT_POSTSCRIPT_MODE * @see #DEFAULT_POSTSCRIPT_MODE */ /* public String getPOSTScriptScope() { return mProps.getProperty( "pegasus.exitcode.dial", DEFAULT_POSTSCRIPT_MODE ); } */ /** * Returns the postscript to use with the jobs in the workflow. They maybe overriden by values * specified in the profiles. * * <p>Referred to by the "pegasus.exitcode.impl" property. * * @return the postscript to use for the workflow, else null if not specified in the properties. */ /* public String getPOSTScript(){ return mProps.getProperty( "pegasus.exitcode.impl" ); } */ /** * Returns the path to the exitcode executable to be used. * * <p>Referred to by the "pegasus.exitcode.path.[value]" property, where [value] is replaced by * the value passed an input to this function. * * @param value the short name of the postscript whose path we want. * @return the path to the postscript if specified in properties file. */ /* public String getPOSTScriptPath( String value ){ value = ( value == null ) ? "*" : value; StringBuffer key = new StringBuffer(); key.append( "pegasus.exitcode.path." ).append( value ); return mProps.getProperty( key.toString() ); } */ /** * Returns the argument string containing the arguments by which exitcode is invoked. * * <p>Referred to by the "pegasus.exitcode.arguments" property. * * @return String containing the arguments,else empty string. */ /* public String getPOSTScriptArguments() { return mProps.getProperty( "pegasus.exitcode.arguments", ""); } */ /** * Returns a boolean indicating whether to turn debug on or not for exitcode. By default false * is returned. * * <p>Referred to by the "pegasus.exitcode.debug" property. * * @return boolean value. */ public boolean setPostSCRIPTDebugON() { return Boolean.parse(mProps.getProperty("pegasus.exitcode.debug"), false); } /** * Returns the argument string containing the arguments by which prescript is invoked. * * <p>Referred to by the "pegasus.prescript.arguments" property. * * @return String containing the arguments. null if not specified. */ /* public String getPrescriptArguments() { return mProps.getProperty( "pegasus.prescript.arguments","" ); } */ // PROPERTIES RELATED TO REMOTE SCHEDULERS /** * Returns the project names that need to be appended to the RSL String while creating the * submit files. Referred to by pegasus.remote.projects property. If present, Pegasus ends up * inserting an RSL string (project = value) in the submit file. * * @return a comma separated list of key value pairs if property specified, else null. */ // public String getRemoteSchedulerProjects() { // return mProps.getProperty( "pegasus.remote.scheduler.projects" ); // } /** * Returns the queue names that need to be appended to the RSL String while creating the submit * files. Referred to by the pegasus.remote.queues property. If present, Pegasus ends up * inserting an RSL string (project = value) in the submit file. * * @return a comma separated list of key value pairs if property specified, else null. */ // public String getRemoteSchedulerQueues() { // return mProps.getProperty( "pegasus.remote.scheduler.queues" ); // } /** * Returns the maxwalltimes for the various pools that need to be appended to the RSL String * while creating the submit files. Referred to by the pegasus.scheduler.remote.queues property. * If present, Pegasus ends up inserting an RSL string (project = value) in the submit file. * * @return a comma separated list of key value pairs if property specified, else null. */ // public String getRemoteSchedulerMaxWallTimes() { // return mProps.getProperty( "pegasus.remote.scheduler.min.maxwalltime" ); // } /** * Returns the minimum walltimes that need to be enforced. * * <p>Referred to by "pegasus.scheduler.remote.min.[key]" property. * * @param key the appropriate globus RSL key. Generally are maxtime|maxwalltime|maxcputime * @return the integer value as specified, -1 in case of no value being specified. */ // public int getMinimumRemoteSchedulerTime( String key ){ // StringBuffer property = new StringBuffer(); // property.append( "pegasus.remote.scheduler.min." ).append( key ); // // int val = -1; // // try { // val = Integer.parseInt( mProps.getProperty( property.toString() ) ); // } catch ( Exception e ) { // } // return val; // } // PROPERTIES RELATED TO CONDOR /** * Completely disable placing a symlink for Condor common log (indiscriminately). * * <p>Starting 4.2.1 this defaults to "false" . * * <p>Referred to by the "pegasus.condor.logs.symlink" property. * * @return value specified by the property. Defaults to false. */ public boolean symlinkCommonLog() { return Boolean.parse(mProps.getProperty("pegasus.condor.logs.symlink"), false); } /** * Whether Pegasus should associate condor concurrency limits or not * * <p>Referred to by the "pegasus.condor.concurrency.limits" property. * * @return value specified by the property. Defaults to false. */ public boolean associateCondorConcurrencyLimits() { return Boolean.parse(mProps.getProperty("pegasus.condor.concurrency.limits"), false); } /** * Returns a boolean indicating whether we want to Condor Quote the arguments of the job or not. * * <p>Referred to by the "pegasus.condor.arguments.quote" property. * * @return boolean */ public boolean useCondorQuotingForArguments() { return Boolean.parse(mProps.getProperty("pegasus.condor.arguments.quote"), true); } /** * Returns the number of times Condor should retry running a job in case of failure. The retry * ends up reinvoking the prescript, that can change the site selection decision in case of * failure. * * <p>Referred to by the "pegasus.dagman.retry" property. * * @return an int denoting the number of times to retry. null if not specified or invalid entry. */ /* public String getCondorRetryValue() { String prop = mProps.getProperty( "pegasus.dagman.retry" ); int val = -1; try { val = Integer.parseInt( prop ); } catch ( Exception e ) { return null; } return Integer.toString( val ); } */ /** * Tells whether to stream condor output or not. By default it is true , meaning condor streams * the output from the remote hosts back to the submit hosts, instead of staging it. This helps * in saving filedescriptors at the jobmanager end. * * <p>If it is set to false, output is not streamed back. The line "stream_output = false" * should be added in the submit files for kickstart jobs. * * <p>Referred to by the "pegasus.condor.output.stream" property. * * @return the boolean value specified by the property, else false in case of invalid value or * property not being specified. */ /* public boolean streamCondorOutput() { return Boolean.parse(mProps.getProperty( "pegasus.condor.output.stream"), false ); } */ /** * Tells whether to stream condor error or not. By default it is true , meaning condor streams * the error from the remote hosts back to the submit hosts instead of staging it in. This helps * in saving filedescriptors at the jobmanager end. * * <p>Referred to by the "pegasus.condor.error.stream" property. * * <p>If it is set to false, output is not streamed back. The line "stream_output = false" * should be added in the submit files for kickstart jobs. * * @return the boolean value specified by the property, else false in case of invalid value or * property not being specified. */ /* public boolean streamCondorError() { return Boolean.parse(mProps.getProperty( "pegasus.condor.error.stream"), false ); } */ // PROPERTIES RELATED TO STORK /** * Returns the credential name to be used for the stork transfer jobs. * * <p>Referred to by the "pegasus.transfer.stork.cred" property. * * @return the credential name if specified by the property, else null. */ public String getCredName() { return mProps.getProperty("pegasus.transfer.stork.cred"); } // SOME LOGGING PROPERTIES /** * Returns the log manager to use. * * <p>Referred to by the "pegasus.log.manager" property. * * @return the value in the properties file, else Default */ public String getLogManager() { return mProps.getProperty("pegasus.log.manager", "Default"); } /** * Returns the log formatter to use. * * <p>Referred to by the "pegasus.log.formatter" property. * * @return the value in the properties file, else Simple */ public String getLogFormatter() { return mProps.getProperty("pegasus.log.formatter", "Simple"); } /** * Returns the http url for log4j properties for windward project. * * <p>Referred to by the "log4j.configuration" property. * * @return the value in the properties file, else null */ public String getHttpLog4jURL() { // return mProps.getProperty( "pegasus.log.windward.log4j.http.url" ); return mProps.getProperty("log4j.configuration"); } /** * Returns the file to which all the logging needs to be directed to. * * <p>Referred to by the "pegasus.log.*" property. * * @return the value of the property that is specified, else null */ public String getLoggingFile() { return mProps.getProperty("pegasus.log.*"); } /** * Returns the location of the local log file where you want the messages to be logged. Not used * for the moment. * * <p>Referred to by the "pegasus.log4j.log" property. * * @return the value specified in the property file,else null. */ public String getLog4JLogFile() { return mProps.getProperty("pegasus.log4j.log"); } /** * Returns a boolean indicating whether to write out the planner metrics or not. * * <p>Referred to by the "pegasus.log.metrics" property. * * @return boolean in the properties, else true */ public boolean writeOutMetrics() { return Boolean.parse( mProps.getProperty(PegasusProperties.PEGASUS_LOG_METRICS_PROPERTY), true) && (this.getMetricsLogFile() != null); } /** * Returns the path to the file that is used to be logging metrics * * <p>Referred to by the "pegasus.log.metrics.file" property. * * @return path to the metrics file if specified, else rundir/pegasus.metrics */ public String getMetricsLogFile() { String file = mProps.getProperty(PegasusProperties.PEGASUS_LOG_METRICS_PROPERTY_FILE); return file; } /** * Returns a boolean indicating whether to log JVM memory usage or not. * * <p>Referred to by the "pegasus.log.memory.usage" property. * * @return boolean value specified in properties else false. */ public boolean logMemoryUsage() { return Boolean.parse(mProps.getProperty("pegasus.log.memory.usage"), false); } // SOME MISCELLANEOUS PROPERTIES /** * Returns a boolean indicating whether we assign job priorities or not to the jobs * * <p>Referred to by the "pegasus.job.priority.assign" property. * * @return boolean value specified in properties else true. */ public boolean assignDefaultJobPriorities() { return Boolean.parse(mProps.getProperty("pegasus.job.priority.assign"), true); } /** * Returns a boolean indicating whether we create registration jobs or not. * * <p>Referred to by the "pegasus.register" property. * * @return boolean value specified in properties else true. */ public boolean createRegistrationJobs() { return Boolean.parse(mProps.getProperty("pegasus.register"), true); } /** * Returns a boolean indicating whether to register a deep LFN or not. * * <p>Referred to by the "pegasus.register.deep" property. * * @return boolean value specified in properties else true. */ public boolean registerDeepLFN() { return Boolean.parse(mProps.getProperty("pegasus.register.deep"), true); } /** * Returns a boolean indicating whether to have jobs executing on worker node tmp or not. * * <p>Referred to by the "pegasus.execute.*.filesystem.local" property. * * @return boolean value in the properties file, else false if not specified or an invalid value * specified. */ public boolean executeOnWorkerNode() { return Boolean.parse( mProps.getProperty(PegasusProperties.PEGASUS_WORKER_NODE_EXECUTION_PROPERTY), false); } /** * Returns a boolean indicating whether to treat the entries in the cache files as a replica * catalog or not. * * @return boolean */ public boolean treatCacheAsRC() { return Boolean.parse(mProps.getProperty("pegasus.catalog.replica.cache.asrc"), false); } /** * Returns a boolean indicating whether to treat the file locations in the DAX as a replica * catalog or not. * * <p>Referred to by the "pegasus.catalog.replica.dax.asrc" property. * * @return boolean value in the properties file, else false if not specified or an invalid value * specified. */ public boolean treatDAXLocationsAsRC() { return Boolean.parse(mProps.getProperty("pegasus.catalog.replica.dax.asrc"), false); } /** * Returns a boolean indicating whether to preserver line breaks. * * <p>Referred to by the "pegasus.parser.dax.preserve.linebreaks" property. * * @return boolean value in the properties file, else false if not specified or an invalid value * specified. */ public boolean preserveParserLineBreaks() { return Boolean.parse(mProps.getProperty("pegasus.parser.dax.preserve.linebreaks"), false); } /** * Returns a boolean indicating whether to automatically add edges as a result of underlying * data dependecnies between jobs. * * <p>Referred to by the "pegasus.parser.dax.data.dependencies" property. * * @return boolean value in the properties file, else true if not specified or an invalid value * specified. */ public boolean addDataDependencies() { return Boolean.parse(mProps.getProperty("pegasus.parser.dax.data.dependencies"), true); } /** * Returns the path to the wings properties file. * * <p>Referred to by the "pegasus.wings.properties" property. * * @return value in the properties file, else null. */ public String getWingsPropertiesFile() { return mProps.getProperty("pegasus.wings.properties"); } /** * Returns the request id. * * <p>Referred to by the "pegasus.wings.request-id" property. * * @return value in the properties file, else null. */ public String getWingsRequestID() { return mProps.getProperty("pegasus.wings.request.id"); } /** * Returns the timeout value in seconds after which to timeout in case of opening sockets to * grid ftp server. * * <p>Referred to by the "pegasus.auth.gridftp.timeout" property. * * @return the timeout value if specified else, null. * @see #DEFAULT_SITE_SELECTOR_TIMEOUT */ public String getGridFTPTimeout() { return mProps.getProperty("pegasus.auth.gridftp.timeout"); } /** * Returns which submit mode to be used to submit the jobs on to the grid. * * <p>Referred to by the "pegasus.code.generator" property. * * @return the submit mode specified in the property file, else the default i.e condor. */ public String getSubmitMode() { return mProps.getProperty("pegasus.code.generator", "condor"); } /** * Returns the mode for parsing the dax while writing out the partitioned daxes. * * <p>Referred to by the "pegasus.partition.parser.load" property. * * @return the value specified in the properties file, else the default value i.e single. */ public String getPartitionParsingMode() { return mProps.getProperty("pegasus.partition.parser.load", "single"); } /** * Returns the scope for the data reusue module. * * <p>Referred to by the "pegasus.data.reuse.scope" property. * * @return the value specified in the properties file, else null */ public String getDataReuseScope() { return mProps.getProperty("pegasus.data.reuse.scope"); } // JOB COLLAPSING PROPERTIES /** * Returns a comma separated list for the node collapsing criteria for the execution pools. This * determines how many jobs one fat node gobbles up. * * <p>Referred to by the "pegasus.clusterer.nodes" property. * * @return the value specified in the properties file, else null. */ public String getCollapseFactors() { return mProps.getProperty("pegasus.clusterer.nodes"); } /** * Returns the users horizontal clustering preference. This property determines how to cluster * horizontal jobs. If this property is set with a value value of runtime, the jobs will be * grouped into into clusters according to their runtimes as specified by <code>job.runtime * </code> property. For all other cases the default horizontal clustering approach will be * used. * * @return the value specified in the properties file, else null. */ public String getHorizontalClusterPreference() { return mProps.getProperty("pegasus.clusterer.preference"); } /** * Returns what job aggregator is to be used to aggregate multiple compute jobs into a single * condor job. * * <p>Referred to by the "pegasus.cluster.job.aggregator" property. * * @return the value specified in the properties file, else DEFAULT_JOB_AGGREGATOR * @see #DEFAULT_JOB_AGGREGATOR */ public String getJobAggregator() { return mProps.getProperty("pegasus.clusterer.job.aggregator", DEFAULT_JOB_AGGREGATOR); } /** * Returns whether the seqexec job aggregator should log progress to a log or not. * * <p>Referred to by the "pegasus.clusterer.job.aggregator.seqexec.log" property. * * @return the value specified in the properties file, else false */ public boolean logJobAggregatorProgress() { return Boolean.parse(getProperty("pegasus.clusterer.job.aggregator.seqexec.log"), false); } /** * Returns whether the seqexec job aggregator should write to a global log or not. This comes * into play only if "pegasus.clusterer.job.aggregator.seqexec.log" is set to true. * * <p>Referred to by the "pegasus.clusterer.job.aggregator.seqexec.log.global" property. * * @return the value specified in the properties file, else true */ public boolean logJobAggregatorProgressToGlobal() { return Boolean.parse( getProperty( "pegasus.clusterer.job.aggregator.seqexec.log.global", "pegasus.clusterer.job.aggregator.seqexec.hasgloballog"), true); } /** * Returns a boolean indicating whether seqexec trips on the first job failure. * * <p>Referred to by the "pegasus.clusterer.job.aggregator.seqexec.firstjobfail" property. * * @return the value specified in the properties file, else true */ public boolean abortOnFirstJobFailure() { return Boolean.parse( mProps.getProperty("pegasus.clusterer.job.aggregator.seqexec.firstjobfail"), true); } /** * Returns a boolean indicating whether clustering should be allowed for single jobs or not * * <p>Referred to by the "pegasus.clusterer.allow.single" property. * * @return the value specified in the properties file, else false */ public boolean allowClusteringOfSingleJobs() { return Boolean.parse(mProps.getProperty("pegasus.clusterer.allow.single"), false); } /** * Returns a boolean indicating whether to enable integrity checking or not. * * @return false if set explicitly to none, else true */ public boolean doIntegrityChecking() { return this.getIntegrityDial() != INTEGRITY_DIAL.none; } /** * Returns the integrity dial enum * * <p>Referred to by the "pegasus.integrity.checking" property. * * @return the value specified in the properties file, else INTEGRITY_DIAL.full * @see INTEGRITY_DIAL */ public INTEGRITY_DIAL getIntegrityDial() { INTEGRITY_DIAL dial = INTEGRITY_DIAL.full; String value = mProps.getProperty("pegasus.integrity.checking"); if (value == null) { return dial; } // try to assign a dial value try { dial = INTEGRITY_DIAL.valueOf(value); } catch (IllegalArgumentException iae) { throw new IllegalArgumentException( "Invalid value specified for integrity checking " + value, iae); } return dial; } // DEFERRED PLANNING PROPERTIES /** * Returns the root workflow UUID if defined in the properties, else null * * <p>Referred to by the "pegasus.workflow.root.uuid" property. * * @return the value in the properties file else, null */ public String getRootWorkflowUUID() { return mProps.getProperty(ROOT_WORKFLOW_UUID_PROPERTY_KEY, null); } /** * Returns the DAXCallback that is to be used while parsing the DAX. * * <p>Referred to by the "pegasus.partitioner.parser.dax.callback" property. * * @return the value specified in the properties file, else DEFAULT_DAX_CALLBACK * @see #DEFAULT_DAX_CALLBACK */ public String getPartitionerDAXCallback() { return mProps.getProperty("pegasus.partitioner.parser.dax.callback", DEFAULT_DAX_CALLBACK); } /** * Returns the key that is to be used as a label key, for labelled partitioning. * * <p>Referred to by the "pegasus.partitioner.label.key" property. * * @return the value specified in the properties file. */ public String getPartitionerLabelKey() { return mProps.getProperty("pegasus.partitioner.label.key"); } /** * Returns the bundle value for a particular transformation. * * <p>Referred to by the "pegasus.partitioner.horziontal.bundle.[txname]" property, where * [txname] is replaced by the name passed an input to this function. * * @param name the logical name of the transformation. * @return the path to the postscript if specified in properties file, else null. */ public String getHorizontalPartitionerBundleValue(String name) { StringBuffer key = new StringBuffer(); key.append("pegasus.partitioner.horizontal.bundle.").append(name); return mProps.getProperty(key.toString()); } /** * Returns the collapse value for a particular transformation. * * <p>Referred to by the "pegasus.partitioner.horziontal.collapse.[txname]" property, where * [txname] is replaced by the name passed an input to this function. * * @param name the logical name of the transformation. * @return the path to the postscript if specified in properties file, else null. */ public String getHorizontalPartitionerCollapseValue(String name) { StringBuffer key = new StringBuffer(); key.append("pegasus.partitioner.horizontal.collapse.").append(name); return mProps.getProperty(key.toString()); } /** * Returns the key that is to be used as a label key, for labelled clustering. * * <p>Referred to by the "pegasus.clusterer.label.key" property. * * @return the value specified in the properties file. */ public String getClustererLabelKey() { return mProps.getProperty("pegasus.clusterer.label.key"); } /** * Returns the estimator to be used * * <p>Referred to by the "pegasus.estimator" property * * @return value specified else null */ public String getEstimator() { return mProps.getProperty("pegasus.estimator"); } /** * Sets the file backend to which properties maybe written out to * * @param directory * @throws java.io.IOException */ public void setPropertiesFileBackend(String directory) throws IOException { // create a temporary file in directory File dir = new File(directory); // sanity check on the directory sanityCheck(dir); this.mPropsInSubmitDir = File.createTempFile("pegasus.", ".properties", dir).getAbsolutePath(); } /** * Returns the path to the property file that has been writing out in the submit directory. * * @return path to the property file, else null */ public String getPropertiesInSubmitDirectory() { /* PM-1523 don't throw exception here. Does not make sense in context of rc-converter */ /* if (mPropsInSubmitDir == null || mPropsInSubmitDir.length() == 0) { throw new RuntimeException( "Properties file does not exist in directory " + mPropsInSubmitDir); } */ return mPropsInSubmitDir; } /** * Writes out the properties to a temporary file in the directory passed. * * @return the absolute path to the properties file written in the directory. * @throws IOException in case of error while writing out file. */ public String writeOutProperties() throws IOException { return this.writeOutProperties(true); } /** * Writes out the properties to a temporary file in the directory passed. * * @param sanitizePath boolean indicating whether to sanitize paths for certain properties or * not. * @return the absolute path to the properties file written in the directory. * @throws IOException in case of error while writing out file. */ private String writeOutProperties(boolean sanitizePath) throws IOException { return this.writeOutProperties(new File(mPropsInSubmitDir), sanitizePath, true); } /** * Writes out the properties to a temporary file in the directory passed. * * @param file the file to which properties are written out to * @param sanitizePath boolean indicating whether to sanitize paths for certain properties or * not. * @param setInternalVariable whether to set the internal variable that stores the path to the * properties file. * @return the absolute path to the properties file written in the directory. * @throws IOException in case of error while writing out file. */ private String writeOutProperties(File file, boolean sanitizePath, boolean setInternalVariable) throws IOException { if (file == null) { throw new RuntimeException("Properties file does not exist " + file); } // we only want to write out the Pegasus properties for time being // and any profiles that were mentioned in the properties. Properties properties = new Properties(); for (Profiles.NAMESPACES n : Profiles.NAMESPACES.values()) { Properties p = this.mProps.matchingSubset(namespaceToPropertiesPrefix().get(n), true); properties.putAll(p); } // check if we need to sanitize paths for certain properties or not if (sanitizePath) { sanitizePathForProperty(properties, "pegasus.catalog.site.file"); sanitizePathForProperty(properties, "pegasus.catalog.replica.file"); sanitizePathForProperty(properties, "pegasus.catalog.transformation.file"); } // put in a sensible default for dagman maxpre for pegasus-run to // pick up if not specified beforehand StringBuffer buffer = new StringBuffer(); buffer.append(Dagman.NAMESPACE_NAME).append(".").append(Dagman.MAXPRE_KEY.toLowerCase()); String key = buffer.toString(); if (!properties.containsKey(key)) { // add defautl value properties.put(key, DEFAULT_DAGMAN_MAX_PRE_VALUE); } // the header of the file StringBuffer header = new StringBuffer(64); header.append("Pegasus USER PROPERTIES AT RUNTIME \n") .append("#ESCAPES IN VALUES ARE INTRODUCED"); // create an output stream to this file and write out the properties OutputStream os = new FileOutputStream(file); // PM-1593 write everything in properties as UTF-8 properties.store( new OutputStreamWriter(os, CommonProperties.DEFAULT_ENCODING_SET), header.toString()); os.close(); // also set it to the internal variable if (setInternalVariable) { mPropsInSubmitDir = file.getAbsolutePath(); return mPropsInSubmitDir; } else { return file.getAbsolutePath(); } } /** * Santizes the value in the properties . Ensures that the path is absolute. * * @param properties the properties * @param key the key whose value needs to be sanitized */ private void sanitizePathForProperty(Properties properties, String key) { if (properties.containsKey(key)) { String value = properties.getProperty(key); if (value != null) { properties.setProperty(key, new File(value).getAbsolutePath()); } } } /** * Checks the destination location for existence, if it can be created, if it is writable etc. * * @param dir is the new base directory to optionally create. * @throws IOException in case of error while writing out files. */ protected static void sanityCheck(File dir) throws IOException { if (dir.exists()) { // location exists if (dir.isDirectory()) { // ok, isa directory if (dir.canWrite()) { // can write, all is well return; } else { // all is there, but I cannot write to dir throw new IOException("Cannot write to existing directory " + dir.getPath()); } } else { // exists but not a directory throw new IOException( "Destination " + dir.getPath() + " already " + "exists, but is not a directory."); } } else { // does not exist, try to make it if (!dir.mkdirs()) { // try to get around JVM bug. JIRA PM-91 if (dir.getPath().endsWith(".")) { // just try to create the parent directory if (!dir.getParentFile().mkdirs()) { throw new IOException("Unable to create directory " + dir.getPath()); } return; } throw new IOException("Unable to create directory destination " + dir.getPath()); } } } /** * This function is used to check whether a deprecated property is used or not. If a deprecated * property is used,it logs a warning message specifying the new property. If both properties * are not set by the user, the function returns the default property. If no default property * then null. * * @param newProperty the new property that should be used. * @param deprecatedProperty the deprecated property that needs to be replaced. * @return the appropriate value. */ private String getProperty(String newProperty, String deprecatedProperty) { return this.getProperty(newProperty, deprecatedProperty, null); } /** * This function is used to check whether a deprecated property is used or not. If a deprecated * property is used,it logs a warning message specifying the new property. If both properties * are not set by the user, the function returns the default property. If no default property * then null. * * @param newProperty the new property that should be used. * @param deprecatedProperty the deprecated property that needs to be replaced. * @param defaultValue the default value that should be returned. * @return the appropriate value. */ private String getProperty(String newProperty, String deprecatedProperty, String defaultValue) { String value = null; // try for the new property // first value = mProps.getProperty(newProperty); if (value == null) { // try the deprecated property if set value = mProps.getProperty(deprecatedProperty); // if the value is not null if (value != null) { // print the warning message logDeprecatedWarning(deprecatedProperty, newProperty); return value; } else { // else return the default value return defaultValue; } } return value; } /** * Logs a warning about the deprecated property. Logs a warning only if it has not been * displayed before. * * @param deprecatedProperty the deprecated property that needs to be replaced. * @param newProperty the new property that should be used. */ private void logDeprecatedWarning(String deprecatedProperty, String newProperty) { if (!mDeprecatedProperties.contains(deprecatedProperty)) { // log only if it had already not been logged StringBuffer sb = new StringBuffer(); sb.append("The property ") .append(deprecatedProperty) .append(" has been deprecated. Use ") .append(newProperty) .append(" instead."); // mLogger.log(sb.toString(),LogManager.WARNING_MESSAGE_LEVEL ); System.err.println("[WARNING] " + sb.toString()); // push the property in to indicate it has already been // warned about mDeprecatedProperties.add(deprecatedProperty); } } /** * Returns a boolean indicating whether to use third party transfers for all types of transfers * or not. * * <p>Referred to by the "pegasus.transfer.*.thirdparty" property. * * @return the boolean value in the properties files, else false if no value specified, or non * boolean specified. */ // private boolean useThirdPartyForAll(){ // return Boolean.parse("pegasus.transfer.*.thirdparty", // false); // } /** * Gets the reference to the internal singleton object. This method is invoked with the * assumption that the singleton method has been invoked once and has been populated. Also that * it has not been disposed by the garbage collector. Can be potentially a buggy way to invoke. * * @return a handle to the Properties class. */ // public static PegasusProperties singletonInstance() { // return singletonInstance( null ); // } /** * Gets a reference to the internal singleton object. * * @param propFileName name of the properties file to picked from $PEGASUS_HOME/etc/ directory. * @return a handle to the Properties class. */ // public static PegasusProperties singletonInstance( String propFileName ) { // if ( pegProperties == null ) { // //only the default properties file // //can be picked up due to the way // //Singleton implemented in CommonProperties.??? // pegProperties = new PegasusProperties( null ); // } // return pegProperties; // } }
PM-1429 added property key constants added new property pegasus.mode and also an enum for valid mode values
src/edu/isi/pegasus/planner/common/PegasusProperties.java
PM-1429 added property key constants added new property pegasus.mode and also an enum for valid mode values